forked from OSSInnovation/mindspore
Compare commits
No commits in common. "master" and "r0.1" have entirely different histories.
|
@ -52,7 +52,7 @@ ConstructorInitializerAllOnOneLineOrOnePerLine: true
|
|||
ConstructorInitializerIndentWidth: 4
|
||||
ContinuationIndentWidth: 2
|
||||
Cpp11BracedListStyle: true
|
||||
DerivePointerAlignment: false
|
||||
DerivePointerAlignment: true
|
||||
DisableFormat: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
FixNamespaceComments: true
|
||||
|
@ -94,7 +94,7 @@ PenaltyBreakString: 1000
|
|||
PenaltyBreakTemplateDeclaration: 10
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 200
|
||||
PointerAlignment: Right
|
||||
PointerAlignment: Left
|
||||
RawStringFormats:
|
||||
- Language: Cpp
|
||||
Delimiters:
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
<!-- Thanks for sending a pull request! Here are some tips for you:
|
||||
|
||||
If this is your first time, please read our contributor guidelines: https://gitee.com/mindspore/mindspore/blob/master/CONTRIBUTING.md
|
||||
-->
|
||||
|
||||
**What type of PR is this?**
|
||||
> Uncomment only one ` /kind <>` line, hit enter to put that in a new line, and remove leading whitespaces from that line:
|
||||
>
|
||||
> /kind bug
|
||||
> /kind task
|
||||
> /kind feature
|
||||
|
||||
|
||||
**What does this PR do / why do we need it**:
|
||||
|
||||
|
||||
**Which issue(s) this PR fixes**:
|
||||
<!--
|
||||
*Automatically closes linked issue when PR is merged.
|
||||
Usage: `Fixes #<issue number>`, or `Fixes (paste link of issue)`.
|
||||
-->
|
||||
Fixes #
|
||||
|
||||
**Special notes for your reviewers**:
|
||||
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
---
|
||||
name: RFC
|
||||
about: Use this template for the new feature or enhancement
|
||||
labels: kind/feature or kind/enhancement
|
||||
|
||||
---
|
||||
|
||||
## Background
|
||||
- Describe the status of the problem you wish to solve
|
||||
- Attach the relevant issue if have
|
||||
|
||||
## Introduction
|
||||
- Describe the general solution, design and/or pseudo-code
|
||||
|
||||
## Trail
|
||||
| No. | Task Description | Related Issue(URL) |
|
||||
| --- | ---------------- | ------------------ |
|
||||
| 1 | | |
|
||||
| 2 | | |
|
|
@ -1,43 +0,0 @@
|
|||
---
|
||||
name: Bug Report
|
||||
about: Use this template for reporting a bug
|
||||
labels: kind/bug
|
||||
|
||||
---
|
||||
|
||||
<!-- Thanks for sending an issue! Here are some tips for you:
|
||||
|
||||
If this is your first time, please read our contributor guidelines: https://github.com/mindspore-ai/mindspore/blob/master/CONTRIBUTING.md
|
||||
-->
|
||||
|
||||
## Environment
|
||||
### Hardware Environment(`Ascend`/`GPU`/`CPU`):
|
||||
> Uncomment only one ` /device <>` line, hit enter to put that in a new line, and remove leading whitespaces from that line:
|
||||
>
|
||||
> `/device ascend`</br>
|
||||
> `/device gpu`</br>
|
||||
> `/device cpu`</br>
|
||||
|
||||
### Software Environment:
|
||||
- **MindSpore version (source or binary)**:
|
||||
- **Python version (e.g., Python 3.7.5)**:
|
||||
- **OS platform and distribution (e.g., Linux Ubuntu 16.04)**:
|
||||
- **GCC/Compiler version (if compiled from source)**:
|
||||
|
||||
## Describe the current behavior
|
||||
|
||||
|
||||
## Describe the expected behavior
|
||||
|
||||
|
||||
## Steps to reproduce the issue
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Related log / screenshot
|
||||
|
||||
|
||||
## Special notes for this issue
|
||||
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
---
|
||||
name: Task
|
||||
about: Use this template for task tracking
|
||||
labels: kind/task
|
||||
|
||||
---
|
||||
|
||||
## Task Description
|
||||
|
||||
|
||||
## Task Goal
|
||||
|
||||
|
||||
## Sub Task
|
||||
| No. | Task Description | Issue ID |
|
||||
| --- | ---------------- | -------- |
|
||||
| 1 | | |
|
||||
| 2 | | |
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
<!-- Thanks for sending a pull request! Here are some tips for you:
|
||||
|
||||
If this is your first time, please read our contributor guidelines: https://github.com/mindspore-ai/mindspore/blob/master/CONTRIBUTING.md
|
||||
-->
|
||||
|
||||
**What type of PR is this?**
|
||||
> Uncomment only one ` /kind <>` line, hit enter to put that in a new line, and remove leading whitespaces from that line:
|
||||
>
|
||||
> `/kind bug`</br>
|
||||
> `/kind task`</br>
|
||||
> `/kind feature`</br>
|
||||
|
||||
**What does this PR do / why do we need it**:
|
||||
|
||||
|
||||
**Which issue(s) this PR fixes**:
|
||||
<!--
|
||||
*Automatically closes linked issue when PR is merged.
|
||||
Usage: `Fixes #<issue number>`, or `Fixes (paste link of issue)`.
|
||||
-->
|
||||
Fixes #
|
||||
|
||||
**Special notes for your reviewers**:
|
||||
|
|
@ -4,20 +4,6 @@ mindspore/lib
|
|||
output
|
||||
*.ir
|
||||
|
||||
# flatbuffer
|
||||
mindspore/lite/tools/converter/parser/tflite/schema_generated.h
|
||||
mindspore/lite/tools/converter/parser/caffe/caffe.pb.cc
|
||||
mindspore/lite/tools/converter/parser/caffe/caffe.pb.h
|
||||
mindspore/lite/tools/converter/parser/onnx/onnx.pb.h
|
||||
mindspore/lite/tools/converter/parser/onnx/onnx.pb.h
|
||||
mindspore/lite/tools/converter/schema/*.h
|
||||
mindspore/lite/tools/converter/schema/inner
|
||||
mindspore/lite/schema/*.h
|
||||
mindspore/lite/schema/inner
|
||||
|
||||
mindspore/lite/src/runtime/kernel/opencl/cl/fp16/*.inc
|
||||
mindspore/lite/src/runtime/kernel/opencl/cl/fp32/*.inc
|
||||
|
||||
# Cmake files
|
||||
CMakeFiles/
|
||||
cmake_install.cmake
|
||||
|
@ -40,8 +26,6 @@ cmake-build-debug
|
|||
*_pb2.py
|
||||
*.pb.h
|
||||
*.pb.cc
|
||||
*.pb
|
||||
*_grpc.py
|
||||
|
||||
# Object files
|
||||
*.o
|
||||
|
@ -81,11 +65,9 @@ test_temp_summary_event_file/
|
|||
*.ckpt
|
||||
*.shp
|
||||
*.pkl
|
||||
*.pb
|
||||
.clangd
|
||||
mindspore/version.py
|
||||
mindspore/default_config.py
|
||||
mindspore/.commit_id
|
||||
|
||||
# lite test file
|
||||
mindspore/lite/test/do_test/
|
||||
onnx.proto
|
||||
mindspore/ccsrc/onnx.proto
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
[submodule "third_party/flatbuffers"]
|
||||
path = third_party/flatbuffers
|
||||
url = https://github.com/google/flatbuffers.git
|
||||
ignore = all
|
||||
[submodule "third_party/googletest"]
|
||||
path = third_party/googletest
|
||||
url = https://github.com/google/googletest.git
|
||||
|
@ -11,16 +10,6 @@
|
|||
[submodule "third_party/protobuf"]
|
||||
path = third_party/protobuf
|
||||
url = https://github.com/protocolbuffers/protobuf.git
|
||||
ignore = all
|
||||
[submodule "akg"]
|
||||
path = akg
|
||||
url = https://gitee.com/mindspore/akg.git
|
||||
[submodule "graphengine"]
|
||||
path = graphengine
|
||||
url = https://gitee.com/mindspore/graphengine.git
|
||||
[submodule "third_party/OpenCL-CLHPP"]
|
||||
path = third_party/OpenCL-CLHPP
|
||||
url = https://github.com/KhronosGroup/OpenCL-CLHPP.git
|
||||
[submodule "third_party/OpenCL-Headers"]
|
||||
path = third_party/OpenCL-Headers
|
||||
url = https://github.com/KhronosGroup/OpenCL-Headers.git
|
||||
|
|
|
@ -1,28 +1,12 @@
|
|||
cmake_minimum_required(VERSION 3.14)
|
||||
project (MindSpore)
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.3.0)
|
||||
message(FATAL_ERROR "GCC vesion ${CMAKE_CXX_COMPILER_VERSION} must not be less than 7.3.0")
|
||||
endif ()
|
||||
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/options.cmake)
|
||||
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake/modules/")
|
||||
if (NOT CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
add_compile_definitions(_GLIBCXX_USE_CXX11_ABI=0)
|
||||
endif ()
|
||||
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "$ENV{CXXFLAGS} -O2 -Werror -Wno-return-std-move -Wno-unused-private-field -Wno-unused-lambda-capture -Wno-sign-compare -Wno-overloaded-virtual -Wno-unneeded-internal-declaration -Wno-unused-variable -Wno-pessimizing-move -Wno-inconsistent-missing-override -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2")
|
||||
else()
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "$ENV{CXXFLAGS} -O2 -Wl,--allow-shlib-undefined -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2")
|
||||
endif()
|
||||
|
||||
if (ENABLE_PYTHON)
|
||||
add_compile_definitions(ENABLE_PYTHON)
|
||||
endif()
|
||||
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "$ENV{CXXFLAGS} -O0 -g2 -ggdb -fno-inline-functions -fno-omit-frame-pointer -Wl,--allow-shlib-undefined -D_LIBCPP_INLINE_VISIBILITY='' -D'_LIBCPP_EXTERN_TEMPLATE(...)=' -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2 -Wno-cpp")
|
||||
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "$ENV{CXXFLAGS} -O2 -Wl,--allow-shlib-undefined -DHALF_ENABLE_CPP11_USER_LITERALS=0 -D_FORTIFY_SOURCE=2")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I/usr/local/include -std=c++17 -Werror -Wall -Wno-deprecated-declarations -fPIC")
|
||||
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
|
||||
|
||||
|
@ -30,46 +14,16 @@ set(PYBIND11_CPP_STANDARD -std=c++17)
|
|||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OPTION_CXX_FLAGS}")
|
||||
|
||||
find_package(Threads)
|
||||
if (DEFINED ENV{MS_PATCH_PATH})
|
||||
find_program(Patch_EXECUTABLE patch PATHS $ENV{MS_PATCH_PATH})
|
||||
set(Patch_FOUND ${Patch_EXECUTABLE})
|
||||
else ()
|
||||
find_package(Patch)
|
||||
endif ()
|
||||
if (NOT Patch_FOUND)
|
||||
message(FATAL_ERROR "Patch not found, please set env variable MS_PATCH_PATH, "
|
||||
"usually locate in GIT_PATH/usr/bin in windows")
|
||||
endif ()
|
||||
message(PATCH_EXECUTABLE = ${Patch_EXECUTABLE})
|
||||
|
||||
if (ENABLE_AKG AND (ENABLE_D OR ENABLE_GPU))
|
||||
add_subdirectory("${CMAKE_SOURCE_DIR}/akg")
|
||||
endif()
|
||||
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/mind_expression.cmake)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR})
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/third_party/flatbuffers/include)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/third_party/flatbuffers/include/flatbuffers)
|
||||
|
||||
if (NOT ENABLE_ACL)
|
||||
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/dependency_utils.cmake)
|
||||
find_package(Python3 3.7 COMPONENTS Interpreter Development)
|
||||
find_package(Python3 COMPONENTS Interpreter Development)
|
||||
if(Python3_FOUND)
|
||||
set(PYTHON_INCLUDE_DIRS "${Python3_INCLUDE_DIRS}")
|
||||
set(PYTHON_LIBRARIES "${Python3_LIBRARIES}")
|
||||
if (WIN32)
|
||||
if (Python3_DIR)
|
||||
message("Python3_DIR set already: " ${Python3_DIR})
|
||||
else()
|
||||
string(LENGTH ${PYTHON_LIBRARIES} PYTHON_LIBRARIES_LEN)
|
||||
string(LENGTH "libpythonxx.a" Python3_NAME_LEN)
|
||||
math(EXPR Python3_DIR_LEN ${PYTHON_LIBRARIES_LEN}-${Python3_NAME_LEN})
|
||||
string(SUBSTRING ${Python3_LIBRARIES} 0 ${Python3_DIR_LEN} Python3_DIR)
|
||||
message("Python3_DIR: " ${Python3_DIR})
|
||||
endif()
|
||||
link_directories(${Python3_DIR})
|
||||
endif()
|
||||
else()
|
||||
find_python_package(py_inc py_lib)
|
||||
set(PYTHON_INCLUDE_DIRS "${py_inc}")
|
||||
|
@ -84,31 +38,18 @@ set(MS_CCSRC_BUILD_PATH ${BUILD_PATH}/mindspore/mindspore/ccsrc)
|
|||
|
||||
if (ENABLE_GE)
|
||||
link_directories(${CMAKE_SOURCE_DIR}/third_party/ge/lib)
|
||||
elseif(ENABLE_D OR ENABLE_TESTCASES)
|
||||
else()
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/dependency_graphengine.cmake)
|
||||
endif()
|
||||
|
||||
if (ENABLE_GE OR ENABLE_D OR ENABLE_TESTCASES)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc/external)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc/framework)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc/toolchain)
|
||||
endif()
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc/external)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/inc/framework)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc)
|
||||
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/graphengine/third_party/fwkacllib/inc/toolchain)
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
|
||||
add_subdirectory(mindspore/ccsrc)
|
||||
if (ENABLE_TESTCASES)
|
||||
add_subdirectory(tests)
|
||||
endif()
|
||||
|
||||
endif() # NOT ENABLE_ACL
|
||||
|
||||
if (ENABLE_SERVING)
|
||||
add_subdirectory(serving)
|
||||
add_subdirectory(serving/example/cpp_client)
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_ACL)
|
||||
include(cmake/package.cmake)
|
||||
endif() # NOT ENABLE_ACL
|
||||
|
|
|
@ -78,7 +78,7 @@ Please follow this style to make MindSpore easy to review, maintain and develop.
|
|||
|
||||
* Pull a request to MindSpore repository
|
||||
|
||||
In the last step, your need to pull a compare request between your new branch and MindSpore `master` branch. After finishing the pull request, the Jenkins CI will be automatically set up for building test.
|
||||
In the last step, your need to pull a compare request between your new branch and MindSpore `master` branch. After finishing the pull request, the Jekins CI will be automatically set up for building test.
|
||||
|
||||
### Report issues
|
||||
|
||||
|
@ -105,11 +105,11 @@ When reporting issues, refer to this format:
|
|||
* If it is a new feature that needs lots of design details, a design proposal should also be submitted.
|
||||
* After reaching consensus in the issue discussions and design proposal reviews, complete the development on the forked repo and submit a PR.
|
||||
* None of PRs is not permitted until it receives **2+ LGTM** from approvers. Please NOTICE that approver is NOT allowed to add *LGTM* on his own PR.
|
||||
* After PR is sufficiently discussed, it will get merged, abandoned or rejected depending on the outcome of the discussion.
|
||||
* After PR is sufficiently discussed, it will get merged, abondoned or rejected depending on the outcome of the discussion.
|
||||
|
||||
**PRs advisory:**
|
||||
|
||||
- Any irrelevant changes should be avoided.
|
||||
- Make sure your commit history being ordered.
|
||||
- Always keep your branch up with the master branch.
|
||||
- For bug-fix PRs, make sure all related issues being linked.
|
||||
- For bug-fix PRs, make sure all related issues being linked.
|
||||
|
|
121
README.md
121
README.md
|
@ -1,13 +1,12 @@
|
|||
![MindSpore Logo](docs/MindSpore-logo.png "MindSpore logo")
|
||||
============================================================
|
||||
|
||||
- [What Is MindSpore?](#what-is-mindspore)
|
||||
- [What is MindSpore?](#what-is-MindSpore)
|
||||
- [Automatic Differentiation](#automatic-differentiation)
|
||||
- [Automatic Parallel](#automatic-parallel)
|
||||
- [Installation](#installation)
|
||||
- [Binaries](#binaries)
|
||||
- [From Source](#from-source)
|
||||
- [Docker Image](#docker-image)
|
||||
- [Quickstart](#quickstart)
|
||||
- [Docs](#docs)
|
||||
- [Community](#community)
|
||||
|
@ -29,7 +28,7 @@ enrichment of the AI software/hardware application ecosystem.
|
|||
|
||||
<img src="docs/MindSpore-architecture.png" alt="MindSpore Architecture" width="600"/>
|
||||
|
||||
For more details please check out our [Architecture Guide](https://www.mindspore.cn/docs/en/master/architecture.html).
|
||||
For more details please check out our [Architecture Guide](https://www.mindspore.cn/docs/en/0.1.0-alpha/architecture.html).
|
||||
|
||||
### Automatic Differentiation
|
||||
|
||||
|
@ -53,7 +52,7 @@ The goal of MindSpore automatic parallel is to build a training method that comb
|
|||
|
||||
<img src="docs/Automatic-parallel.png" alt="Automatic Parallel" width="600"/>
|
||||
|
||||
At present, MindSpore uses a fine-grained parallel strategy of splitting operators, that is, each operator in the figure is splitted into a cluster to complete parallel operations. The splitting strategy during this period may be very complicated, but as a developer advocating Pythonic, you don't need to care about the underlying implementation, as long as the top-level API compute is efficient.
|
||||
At present, MindSpore uses a fine-grained parallel strategy of splitting operators, that is, each operator in the figure is splited into a cluster to complete parallel operations. The splitting strategy during this period may be very complicated, but as a developer advocating Pythonic, you don't need to care about the underlying implementation, as long as the top-level API compute is efficient.
|
||||
|
||||
## Installation
|
||||
|
||||
|
@ -66,127 +65,31 @@ MindSpore offers build options across multiple backends:
|
|||
| Ascend910 | Ubuntu-x86 | ✔️ |
|
||||
| | EulerOS-x86 | ✔️ |
|
||||
| | EulerOS-aarch64 | ✔️ |
|
||||
| GPU CUDA 9.2 | Ubuntu-x86 | ✔️ |
|
||||
| GPU CUDA 10.1 | Ubuntu-x86 | ✔️ |
|
||||
| CPU | Ubuntu-x86 | ✔️ |
|
||||
| | Windows-x86 | ✔️ |
|
||||
|
||||
For installation using `pip`, take `CPU` and `Ubuntu-x86` build version as an example:
|
||||
For installation using pip, take `Ubuntu-x86` and `CPU` build version as an example:
|
||||
|
||||
1. Download whl from [MindSpore download page](https://www.mindspore.cn/versions/en), and install the package.
|
||||
1. Download whl from [MindSpore website](https://www.mindspore.cn/), and install the package.
|
||||
|
||||
```
|
||||
pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.6.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.6.0-cp37-cp37m-linux_x86_64.whl
|
||||
pip install https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/cpu/ubuntu-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl
|
||||
```
|
||||
|
||||
2. Run the following command to verify the install.
|
||||
|
||||
```python
|
||||
import numpy as np
|
||||
import mindspore.context as context
|
||||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops import operations as P
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
|
||||
|
||||
class Mul(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Mul, self).__init__()
|
||||
self.mul = P.Mul()
|
||||
|
||||
def construct(self, x, y):
|
||||
return self.mul(x, y)
|
||||
|
||||
x = Tensor(np.array([1.0, 2.0, 3.0]).astype(np.float32))
|
||||
y = Tensor(np.array([4.0, 5.0, 6.0]).astype(np.float32))
|
||||
|
||||
mul = Mul()
|
||||
print(mul(x, y))
|
||||
```
|
||||
```
|
||||
[ 4. 10. 18.]
|
||||
python -c 'import mindspore'
|
||||
```
|
||||
|
||||
### From Source
|
||||
|
||||
[Install MindSpore](https://www.mindspore.cn/install/en).
|
||||
|
||||
### Docker Image
|
||||
|
||||
MindSpore docker image is hosted on [Docker Hub](https://hub.docker.com/r/mindspore),
|
||||
currently the containerized build options are supported as follows:
|
||||
|
||||
| Hardware Platform | Docker Image Repository | Tag | Description |
|
||||
| :---------------- | :---------------------- | :-- | :---------- |
|
||||
| CPU | `mindspore/mindspore-cpu` | `x.y.z` | Production environment with pre-installed MindSpore `x.y.z` CPU release. |
|
||||
| | | `devel` | Development environment provided to build MindSpore (with `CPU` backend) from the source, refer to https://www.mindspore.cn/install/en for installation details. |
|
||||
| | | `runtime` | Runtime environment provided to install MindSpore binary package with `CPU` backend. |
|
||||
| GPU | `mindspore/mindspore-gpu` | `x.y.z` | Production environment with pre-installed MindSpore `x.y.z` GPU release. |
|
||||
| | | `devel` | Development environment provided to build MindSpore (with `GPU CUDA10.1` backend) from the source, refer to https://www.mindspore.cn/install/en for installation details. |
|
||||
| | | `runtime` | Runtime environment provided to install MindSpore binary package with `GPU CUDA10.1` backend. |
|
||||
| Ascend | <center>—</center> | <center>—</center> | Coming soon. |
|
||||
|
||||
> **NOTICE:** For GPU `devel` docker image, it's NOT suggested to directly install the whl package after building from the source, instead we strongly RECOMMEND you transfer and install the whl package inside GPU `runtime` docker image.
|
||||
|
||||
* CPU
|
||||
|
||||
For `CPU` backend, you can directly pull and run the latest stable image using the below command:
|
||||
```
|
||||
docker pull mindspore/mindspore-cpu:0.6.0-beta
|
||||
docker run -it mindspore/mindspore-cpu:0.6.0-beta /bin/bash
|
||||
```
|
||||
|
||||
* GPU
|
||||
|
||||
For `GPU` backend, please make sure the `nvidia-container-toolkit` has been installed in advance, here are some install guidelines for `Ubuntu` users:
|
||||
```
|
||||
DISTRIBUTION=$(. /etc/os-release; echo $ID$VERSION_ID)
|
||||
curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | apt-key add -
|
||||
curl -s -L https://nvidia.github.io/nvidia-docker/$DISTRIBUTION/nvidia-docker.list | tee /etc/apt/sources.list.d/nvidia-docker.list
|
||||
|
||||
sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit nvidia-docker2
|
||||
sudo systemctl restart docker
|
||||
```
|
||||
|
||||
Then you can pull and run the latest stable image using the below command:
|
||||
```
|
||||
docker pull mindspore/mindspore-gpu:0.6.0-beta
|
||||
docker run -it --runtime=nvidia --privileged=true mindspore/mindspore-gpu:0.6.0-beta /bin/bash
|
||||
```
|
||||
|
||||
To test if the docker image works, please execute the python code below and check the output:
|
||||
```python
|
||||
import numpy as np
|
||||
import mindspore.context as context
|
||||
from mindspore import Tensor
|
||||
from mindspore.ops import functional as F
|
||||
|
||||
context.set_context(device_target="GPU")
|
||||
|
||||
x = Tensor(np.ones([1,3,3,4]).astype(np.float32))
|
||||
y = Tensor(np.ones([1,3,3,4]).astype(np.float32))
|
||||
print(F.tensor_add(x, y))
|
||||
```
|
||||
```
|
||||
[[[ 2. 2. 2. 2.],
|
||||
[ 2. 2. 2. 2.],
|
||||
[ 2. 2. 2. 2.]],
|
||||
|
||||
[[ 2. 2. 2. 2.],
|
||||
[ 2. 2. 2. 2.],
|
||||
[ 2. 2. 2. 2.]],
|
||||
|
||||
[[ 2. 2. 2. 2.],
|
||||
[ 2. 2. 2. 2.],
|
||||
[ 2. 2. 2. 2.]]]
|
||||
```
|
||||
|
||||
If you want to learn more about the building process of MindSpore docker images,
|
||||
please check out [docker](docker/README.md) repo for the details.
|
||||
|
||||
## Quickstart
|
||||
|
||||
See the [Quick Start](https://www.mindspore.cn/tutorial/en/master/quick_start/quick_start.html)
|
||||
See the [Quick Start](https://www.mindspore.cn/tutorial/en/0.1.0-alpha/quick_start/quick_start.html)
|
||||
to implement the image classification.
|
||||
|
||||
## Docs
|
||||
|
@ -202,10 +105,10 @@ Check out how MindSpore Open Governance [works](https://gitee.com/mindspore/comm
|
|||
|
||||
### Communication
|
||||
|
||||
- [MindSpore Slack](https://join.slack.com/t/mindspore/shared_invite/zt-dgk65rli-3ex4xvS4wHX7UDmsQmfu8w) - Communication platform for developers.
|
||||
- [MindSpore Slack](https://join.slack.com/t/mindspore/shared_invite/enQtOTcwMTIxMDI3NjM0LTNkMWM2MzI5NjIyZWU5ZWQ5M2EwMTQ5MWNiYzMxOGM4OWFhZjI4M2E5OGI2YTg3ODU1ODE2Njg1MThiNWI3YmQ) - Communication platform for developers.
|
||||
- IRC channel at `#mindspore` (only for meeting minutes logging purpose)
|
||||
- Video Conferencing: TBD
|
||||
- Mailing-list: <https://mailweb.mindspore.cn/postorius/lists>
|
||||
- Video Conferencing: meet.jit.si
|
||||
- Mailing-list: https://mailweb.mindspore.cn/postorius/lists
|
||||
|
||||
## Contributing
|
||||
|
||||
|
|
302
RELEASE.md
302
RELEASE.md
|
@ -1,303 +1,3 @@
|
|||
# Release 0.6.0-beta
|
||||
|
||||
## Major Features and Improvements
|
||||
### Ascend 910 Training and Inference Framework
|
||||
* New models
|
||||
* There are official, research and community under modelzoo.
|
||||
* Official is maintained with the newest APIs by MindSpore team, MaskRCNN are added.
|
||||
* Research is uploaded by researchers for official review, and APIs may not be updated in time.
|
||||
* Community reprints the relevant links of partner research results.
|
||||
* Hub added on the same level as modelzoo, synchronous storage of materials needed for official hub web pages which will be launched soon.
|
||||
* Support pre-trained models, few lines of code can be used to download and load pre-trained models, supporting inference or transfer learning.
|
||||
* Frontend and user interface
|
||||
* Supports user side operator compilation and graph execution error rendering.
|
||||
* Uniform definition dynamic learning rate behavior in optimizers.
|
||||
* Support IndexSlice in sparse expression.
|
||||
* Support use parent construct method during construct.
|
||||
* Support asynchronous execution save checkpoint file.
|
||||
* Support implicit type conversion in pynative mode.
|
||||
* User interfaces change log
|
||||
* unform learning rate behavior in optimizers([!2755](https://gitee.com/mindspore/mindspore/pulls/2755))
|
||||
* rename operator of sparse optimizer([!3217](https://gitee.com/mindspore/mindspore/pulls/3217))
|
||||
* move profiler module from mindinsight to mindspore([!3075](https://gitee.com/mindspore/mindspore/pulls/3075))
|
||||
* VOCDataset output change to multi-columns([!3093](https://gitee.com/mindspore/mindspore/pulls/3093))
|
||||
* GetDatasize feature([!3212](https://gitee.com/mindspore/mindspore/pulls/3212))
|
||||
* dataset: modify config api([!2936](https://gitee.com/mindspore/mindspore/pulls/2936))
|
||||
* Executor and performance optimization
|
||||
* Decouple C++ and python, so make the architecture more extensible.
|
||||
* Parameter Server for distributed deep learning supported.
|
||||
* Serving:a flexible service deployment framework for deep learning models.
|
||||
* Memory reuse is enhanced, and the batch size of Bert large model is increased from 96 to 160 on a single server.
|
||||
* Data processing, augmentation, and save format
|
||||
* Support MindRecord save operator after date processing
|
||||
* Support automatic fusion operator, such as decode/resize/crop
|
||||
* Support CSV dataset loading
|
||||
### Other Hardware Support
|
||||
* GPU platform
|
||||
* New model supported: ResNext50, WarpCTC and GoogLeNet.
|
||||
* Support hyperparametric search and data enhanced automl on GPU.
|
||||
* Support Resnet50 automatic parallel in GPU backend.
|
||||
|
||||
## Bugfixes
|
||||
* Models
|
||||
* Improved the performance and accuracy on ResNet50([!3456](https://gitee.com/mindspore/mindspore/pulls/3456))
|
||||
* Fixed the performance test case of bert([!3486](https://gitee.com/mindspore/mindspore/pulls/3486))
|
||||
* Python API
|
||||
* Fix assign used in while loop([!2720](https://gitee.com/mindspore/mindspore/pulls/2720))
|
||||
* Revert optimize the graph output of all nop node.([!2857](https://gitee.com/mindspore/mindspore/pulls/2857))
|
||||
* Print tensor as numpy.([!2859](https://gitee.com/mindspore/mindspore/pulls/2859))
|
||||
* Support weight decay for sparse optimizer([!2668](https://gitee.com/mindspore/mindspore/pulls/2668))
|
||||
* Fix BatchToSpaceND([!2741](https://gitee.com/mindspore/mindspore/pulls/2741))
|
||||
* Fixing type check mistakes of InplaceAdd and Inplace Sub ops([!2744](https://gitee.com/mindspore/mindspore/pulls/2744]))
|
||||
* Change order param only equal to group param([!2748](https://gitee.com/mindspore/mindspore/pulls/2748))
|
||||
* Executor
|
||||
* The performance of graph whith control flow is optimized([!2931](https://gitee.com/mindspore/mindspore/pulls/2931))
|
||||
* Fix bug of wrong number of tuple layers([!3390](https://gitee.com/mindspore/mindspore/pulls/3390))
|
||||
* Fix cpu multi graph memory exception([!3631](https://gitee.com/mindspore/mindspore/pulls/3631))
|
||||
* Enable data sync when calling operator without defining a cell([!3081](https://gitee.com/mindspore/mindspore/pulls/3081))
|
||||
* Fix argmaxwith value error in pynative mode on GPU([!3082](https://gitee.com/mindspore/mindspore/pulls/3082))
|
||||
* Fix precision error with fp16 input on pynative mode([!3196](https://gitee.com/mindspore/mindspore/pulls/3196))
|
||||
* Data processing
|
||||
* Fix bug of RandomColor and RandomSharpness default parameter checking ([!2833](https://gitee.com/mindspore/mindspore/pulls/2833))
|
||||
* Fix process hung when training and eval ([!3469](https://gitee.com/mindspore/mindspore/pulls/3469))
|
||||
|
||||
## Contributors
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
Alexey Shevlyakov, avakh, baihuawei, BowenK, buxue, caifubi, caojian05, Cathy Wong, changzherui, chenfei, chengxianbin, chenhaozhe, chenjianping, chentingting, chenzomi, chujinjin, Danish Farid, dayschan, dengwentao, dinghao, etone-chan, fangzehua, fary86, geekun, Giancarlo Colmenares, gong chen, gukecai, guohongzilong, hangangqiang, heleiwang, hesham, He Wei, hexia, hongxing, huangdongrun, huanghui, islam_amin, Jamie Nisbet, Jesse Lee, jiangjinsheng, jiangzhiwen, jinyaohui, jjfeing, jojobugfree, Jonathan Yan, jonyguo, Junhan Hu, Kang, kingfo, kouzhenzhong, kpy, kswang, laiyongqiang, leopz, liangzelang, lichenever, lihongkang, Li Hongzhang, lilei, limingqi107, lirongzhen1, liubuyu, liuchongming74, liuwenhao4, liuxiao, Lixia Chen, liyanliu, liyong, lizhenyu, lvliang, Mahdi, Margaret_wangrui, meixiaowei, ms_yan, nhussain, ougongchang, panfengfeng, panyifeng, peilinwang, Peilin Wang, pkuliuliu, qianlong, rick_sanchez, shibeiji, Shida He, shijianning, simson, sunsuodong, suteng, Tinazhang, Tron Zhang, unknown, VectorSL, wandongdong, wangcong, wangdongxu, wangdongxu6, wanghua, wangnan39, Wei Luning, wenchunjiang, wenkai, wilfChen, WilliamLian, wukesong, Xian Weizhao, Xiaoda Zhang, xiefangqi, xulei2020, xunxue, xutianchun, Yang, yanghaitao, yanghaitao1, yanghaoran, yangjie, yangjie159, YangLuo, Yanjun Peng, yankai, yanzhenxiang2020, yao_yf, Yi Huaijie, yoonlee666, yuchaojie, yujianfeng, zhangzhongpeng, zhangdengcheng, Zhang Qinghua, zhangyinxia, zhangz0911gm, zhaojichen, zhaoting, zhaozhenlong, zhoufeng, zhouneng, zhousiyi, Zirui Wu, Ziyan, zjun, ZPaC, lihongzhang, wangdongxu
|
||||
|
||||
Contributions of any kind are welcome!
|
||||
|
||||
|
||||
# Release 0.5.0-beta
|
||||
|
||||
## Major Features and Improvements
|
||||
|
||||
### Ascend 910 Training and Inference Framework
|
||||
* New models
|
||||
* ResNext50: a simple, highly modularized network architecture using aggregated resdiual transformations for image classification on ImageNet 2012 dataset.
|
||||
* MASS: a pre-training method for sequence to sequence based language generation tasks on Text Summarization and Conversational Response Generation using News Crawls 2007-2017 dataset, Gigaword corpus and Cornell movie dialog corpus.
|
||||
* Transformer: a neural network architecture for language understanding on WMT 2014 English-German dataset.
|
||||
* GCN:Graph Convolutional Networks for the task of classification of nodes in a graph on Cora and Citeseer datasets.
|
||||
* GAT:an attention-based graph neural network for node classification on Cora and CiteSeer dataset.
|
||||
* Frontend and user interface
|
||||
* Support tensor value and assignment of mixed tensor index in graph mode.
|
||||
* Support tensor comparison, len operator, constexpr syntax, value and assignment of tensor index in pynative mode.
|
||||
* Support converting MindSpore IR to pb format for infer model.
|
||||
* Support print operator to write data directly on the hard disk.
|
||||
* Add the double recursive programming solution for very high speed parallel strategy search in automatic parallel.
|
||||
* User interfaces change log
|
||||
* Allow the learning rate of AdamWeightDecayDynamicLR and Lamb to be 0([!1826](https://gitee.com/mindspore/mindspore/pulls/1826))
|
||||
* Restricting the entire network input parameter is Tensor([!1967](https://gitee.com/mindspore/mindspore/pulls/1967))
|
||||
* Turn shape and dtype into attributes instead of interfaces([!1919](https://gitee.com/mindspore/mindspore/pulls/1919))
|
||||
* Delete multitypefungraph([!2116](https://gitee.com/mindspore/mindspore/pulls/2116))
|
||||
* Refactor the callback module in an encapsulated way, use _CallbackManager instead of _build_callbacks([!2236](https://gitee.com/mindspore/mindspore/pulls/2236))
|
||||
* Delete EmbeddingLookup([!2163](https://gitee.com/mindspore/mindspore/pulls/2163))
|
||||
* Checkpoint add model_type([!2517](https://gitee.com/mindspore/mindspore/pulls/2517))
|
||||
* Executor and performance optimization
|
||||
* Heterogeneous execution on CPU and Ascend devices supported, and is verified in Wide&Deep model.
|
||||
* Quantitative training of MobileNetV2, Lenet and Resnet50 on Ascend-910 are supported.
|
||||
* Support new fusion architecture, which can do fusion optimization across graphs and kernels to improve execution speed.
|
||||
* Data processing, augmentation, and save format
|
||||
* Support data processing pipeline performance profiling.
|
||||
* Support public dataset loading, such as CLUE and Coco.
|
||||
* Support more text processing, such as more tokenizers and vocab data.
|
||||
* Support MindRecord padded data.
|
||||
### Other Hardware Support
|
||||
* GPU platform
|
||||
* New model supported: Bert / Wide&Deep.
|
||||
* Support setting max device memory.
|
||||
* CPU platform
|
||||
* New model supported: LSTM.
|
||||
|
||||
## Bugfixes
|
||||
* Models
|
||||
* Bert, Move Bert from `example` to `model_zoo`, optimize network for better performance. ([!1902](https://gitee.com/mindspore/mindspore/pulls/1902))
|
||||
* VGG16, Move VGG16 from `example` to `model_zoo`, optimize network for better accuracy. ([!2645](https://gitee.com/mindspore/mindspore/pulls/2645))
|
||||
* Alexnet, modify parameter setting to improve accuracy ([!1364](https://gitee.com/mindspore/mindspore/pulls/2370))
|
||||
* Wide&Deep, Move Wide&Deep from `example` to `model_zoo`, optimize network for better performance. ([!2221](https://gitee.com/mindspore/mindspore/pulls/2221))
|
||||
* Python API
|
||||
* Fix bug in auto cast([!1766](https://gitee.com/mindspore/mindspore/pulls/1766))
|
||||
* Fix bug of register_backward_hook([!2148](https://gitee.com/mindspore/mindspore/pulls/2148))
|
||||
* Fix bug of tuple args in pynative mode([!1878](https://gitee.com/mindspore/mindspore/pulls/1878))
|
||||
* Fix bug of checking numbers of arguments and graph parameters([!1701](https://gitee.com/mindspore/mindspore/pulls/1701))
|
||||
* Executor
|
||||
* Fix bug of loading input data repeatedly in pynative mode([!1966](https://gitee.com/mindspore/mindspore/pulls/1966))
|
||||
* Fix bug of list cannot be used as input in pynative mode([!1765](https://gitee.com/mindspore/mindspore/pulls/1765))
|
||||
* Fix bug of kernel select ([!2103](https://gitee.com/mindspore/mindspore/pulls/2103))
|
||||
* Fix bug of pattern matching for batchnorm fusion in the case of auto mix precision.([!1851](https://gitee.com/mindspore/mindspore/pulls/1851))
|
||||
* Fix bug of generate hccl's kernel info.([!2393](https://gitee.com/mindspore/mindspore/mindspore/pulls/2393))
|
||||
* GPU platform
|
||||
* Fix bug of summary feature invalid([!2173](https://gitee.com/mindspore/mindspore/pulls/2173))
|
||||
* Data processing
|
||||
* Fix bug of Cifar dataset reading([!2096](https://gitee.com/mindspore/mindspore/pulls/2096))
|
||||
* Fix bug of C++ behavior in RandomCropAndResize([!2026](https://gitee.com/mindspore/mindspore/pulls/2026))
|
||||
* Fix the bug of mindrecord shuffle([!2420](https://gitee.com/mindspore/mindspore/pulls/2420))
|
||||
|
||||
## Contributors
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
Alexey Shevlyakov, avakh, baihuawei, BowenK, buxue, caifubi, caojian05, Cathy Wong, changzherui, chenfei, chengxianbin, chenhaozhe, chenjianping, chentingting, chenzomi, chujinjin, Danish Farid, dayschan, dengwentao, dinghao, etone-chan, fangzehua, fary86, geekun, Giancarlo Colmenares, gong chen, gukecai, guohongzilong, hangangqiang, heleiwang, hesham, He Wei, hexia, hongxing, huangdongrun, huanghui, islam_amin, Jamie Nisbet, Jesse Lee, jiangjinsheng, jiangzhiwen, jinyaohui, jjfeing, jojobugfree, Jonathan Yan, jonyguo, Junhan Hu, Kang, kingfo, kouzhenzhong, kpy, kswang, laiyongqiang, leopz, liangzelang, lichenever, lihongkang, Li Hongzhang, lilei, limingqi107, lirongzhen1, liubuyu, liuchongming74, liuwenhao4, liuxiao, Lixia Chen, liyanliu, liyong, lizhenyu, lvliang, Mahdi, Margaret_wangrui, meixiaowei, ms_yan, nhussain, ougongchang, panfengfeng, panyifeng, peilinwang, Peilin Wang, pkuliuliu, qianlong, rick_sanchez, shibeiji, Shida He, shijianning, simson, sunsuodong, suteng, Tinazhang, Tron Zhang, unknown, VectorSL, wandongdong, wangcong, wangdongxu, wangdongxu6, wanghua, wangnan39, Wei Luning, wenchunjiang, wenkai, wilfChen, WilliamLian, wukesong, Xian Weizhao, Xiaoda Zhang, xiefangqi, xulei2020, xunxue, xutianchun, Yang, yanghaitao, yanghaitao1, yanghaoran, yangjie, yangjie159, YangLuo, Yanjun Peng, yankai, yanzhenxiang2020, yao_yf, Yi Huaijie, yoonlee666, yuchaojie, yujianfeng, zhangzhongpeng, zhangdengcheng, Zhang Qinghua, zhangyinxia, zhangz0911gm, zhaojichen, zhaoting, zhaozhenlong, zhoufeng, zhouneng, zhousiyi, Zirui Wu, Ziyan, zjun, ZPaC, lihongzhang, wangdongxu
|
||||
|
||||
Contributions of any kind are welcome!
|
||||
|
||||
# Release 0.3.1-alpha
|
||||
|
||||
## Major Features and Improvements
|
||||
|
||||
### Ascend 910 Training and Inference Framework
|
||||
* Frontend and User Interface
|
||||
* Independent model init interface.
|
||||
* Data processing, augmentation, and save format
|
||||
* Support sample padding for minddataset.
|
||||
|
||||
## Bugfixes
|
||||
* Python API
|
||||
* Fix bugs in the lars optimizer([!1894](https://gitee.com/mindspore/mindspore/pulls/1894))
|
||||
* Data processing
|
||||
* Fix accuracy problem of RandomCropDecodeResize ([!2340](https://gitee.com/mindspore/mindspore/pulls/2340))
|
||||
|
||||
# Release 0.3.0-alpha
|
||||
|
||||
## Major Features and Improvements
|
||||
|
||||
### Ascend 910 Training and Inference Framework
|
||||
* New models
|
||||
* DeepFM: a factorization-machine based neural network for CTR prediction on Criteo dataset.
|
||||
* DeepLabV3: significantly improves over our previous DeepLab versions without DenseCRF post-processing and attains comparable performance with other state-of-art models on the PASCAL VOC 2007 semantic image segmentation benchmark.
|
||||
* Faster-RCNN: towards real-time object detection with region proposal networks on COCO 2017 dataset.
|
||||
* SSD: a single stage object detection methods on COCO 2017 dataset.
|
||||
* GoogLeNet: a deep convolutional neural network architecture codenamed Inception V1 for classification and detection on CIFAR-10 dataset.
|
||||
* Wide&Deep: jointly trained wide linear models and deep neural networks for recommender systems on Criteo dataset.
|
||||
* Frontend and User Interface
|
||||
* Complete numpy advanced indexing method. Supports value and assignment through tensor index.
|
||||
* Some optimizers support separating parameter groups. Different parameter groups can set different `learning_rate` and `weight_decay`.
|
||||
* Support setting submodule's logging level independently, e.g. you can set logging level of module `A` to warning and set logging level of module `B` to info.
|
||||
* Support weights to be compiled according to shape to solve the problem of large memory overhead.
|
||||
* Add some operators implement and grammar support in pynative mode. To be consistent with graph mode.
|
||||
* User interfaces change log
|
||||
* Learning rate and weight decay making group params([!637](https://gitee.com/mindspore/mindspore/pulls/637))
|
||||
* Support weights to be compiled according to shape([!1015](https://gitee.com/mindspore/mindspore/pulls/1015))
|
||||
* delete some context param([!1100](https://gitee.com/mindspore/mindspore/pulls/1100))
|
||||
* ImageSummary/ScalarSummary/TensorSummary/HistogramSummary([!1329](https://gitee.com/mindspore/mindspore/pulls/1329))([!1425](https://gitee.com/mindspore/mindspore/pulls/1425))
|
||||
* Executor and Performance Optimization
|
||||
* Support doing evaluation while in training process, so that the accuracy of training can be easily obtained.
|
||||
* Enable second-order optimization for resnet50, which can achieve 75.9% accuracy in 45 epochs (Resnet50 @ImageNet).
|
||||
* Optimize pynative implementation and improve it's execution performance.
|
||||
* Optimize summary record implementation and improve its performance.
|
||||
* Data processing, augmentation, and save format
|
||||
* Support simple text processing, such as tokenizer/buildvocab/lookup.
|
||||
* Support padding batch.
|
||||
* Support split or concat dataset.
|
||||
* Support MindDataset reading from file list.
|
||||
|
||||
### Other Hardware Support
|
||||
* GPU platform
|
||||
* New models supported: MobileNetV2, MobileNetV3.
|
||||
* Support mixed precision training.
|
||||
* Support device memory swapping.
|
||||
|
||||
## Bugfixes
|
||||
* Python API
|
||||
* An exception to the broadcast input data type check([!712](https://gitee.com/mindspore/mindspore/pulls/712))
|
||||
* Fix issues assignsub return value 0([!1036](https://gitee.com/mindspore/mindspore/pulls/1036))
|
||||
* Fix issue Conv2dBackpropInput bprop should return 3 instead of 2 items([!1001](https://gitee.com/mindspore/mindspore/pulls/1001))
|
||||
* Fix sens shape error of TrainOneStepWithLossScaleCell([!1050](https://gitee.com/mindspore/mindspore/pulls/1050))
|
||||
* Fix BatchNormGrad operator([!1344](https://gitee.com/mindspore/mindspore/pulls/1344))
|
||||
* Executor
|
||||
* Fix dropout,topK and addn errors in PyNative mode ([!1285](https://gitee.com/mindspore/mindspore/pulls/1285), [!1138](https://gitee.com/mindspore/mindspore/pulls/1138), [!1033](https://gitee.com/mindspore/mindspore/pulls/1033)).
|
||||
* Fix memory leaks after execution in PyNatvie mode ([!1201](https://gitee.com/mindspore/mindspore/pulls/1201)).
|
||||
* Fix HCCL failure in some special scenes ([!1204](https://gitee.com/mindspore/dashboard/projects/mindspore/mindspore/pulls/1204), [!1252](https://gitee.com/mindspore/dashboard/projects/mindspore/mindspore/pulls/1252)).
|
||||
* Fix SSD network when Select failed, cann't find kernel info([!1449](https://gitee.com/mindspore/dashboard/projects/mindspore/mindspore/pulls/1449)).
|
||||
* Fix Topk operator selection strategy bug between aicore and aicpu([!1367](https://gitee.com/mindspore/dashboard/projects/mindspore/mindspore/pulls/1367)).
|
||||
* Fix input memory size of 'assign' op unequal in control sink mode when assigning a data from one child graph to another child graph([!802](https://gitee.com/mindspore/dashboard/projects/mindspore/mindspore/pulls/802)).
|
||||
* Fix allreduce ir inconsistency([!989](https://gitee.com/mindspore/dashboard/projects/mindspore/mindspore/pulls/989)).
|
||||
* GPU platform
|
||||
* Fix summary for gradient collection ([!1364](https://gitee.com/mindspore/mindspore/pulls/1364))
|
||||
* Fix the slice operator ([!1489](https://gitee.com/mindspore/mindspore/pulls/1489))
|
||||
* Data processing
|
||||
* Fix memory problems of GeneratorDataset of sub-process ([!907](https://gitee.com/mindspore/mindspore/pulls/907))
|
||||
* Fix getting data timeout when training the cifar10 dataset under the lenet([!1391](https://gitee.com/mindspore/mindspore/pulls/1391))
|
||||
|
||||
## Contributors
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
Alexey Shevlyakov, Amir Lashkari, anthony, baihuawei, biffex, buxue, caifubi, candanzg, caojian05, Cathy Wong, changzherui, chenfei, chengxianbin, chenhaozhe, chenzomi, chujinjin, cristoval, dengwentao, eric, etone-chan, fary86, gaojing, gengdongjie, gongchen, guohongzilong, guozhijian, heleiwang, hesham, He Wei, Hoai Linh Tran, hongxing, huangdongrun, huanghui, Jamie Nisbet, Jesse Lee, jiangjinsheng, jiangzhiwen, jinyaohui, jjfeing, jonwe, jonyguo, Junhan Hu, Kang, kingfo, kswang, laiyongqiang, leopz, lichenever, lihongkang, limingqi107, liubuyu, liuliyan2, liuwenhao4, liuxiao, liuxiao, liyong, lizhenyu, lvliang, Margaret_wangrui, meixiaowei, ms_yan, Nat Sutyanyong, ougongchang, panfengfeng, panyifeng, Peilin Wang, peixu_ren, qianlong, rick_sanchez, seatea, sheng, shijianning, simson, sunsuodong, Tinazhang, VectorSL, wandongdong, wangcong, wanghua, wangnan39, Wei Luning, wenchunjiang, wilfChen, WilliamLian, wsc, wukesong, wuxuejian, Xiaoda Zhang, xiefangqi, xulei2020, Yang, yangjie159, yangruoqi713, yangyongjie, yangzhenzhang, Yanjun Peng, yanzhenxiang2020, yao_yf, Yi Huaijie, yoonlee666, yujianfeng, YuJianfeng, yvetteliu, zhangdengcheng, Zhang Qinghua, zhangz0911gm, zhaojichen, zhaoting, zhaozhenlong, zhoufeng, zhouneng, zhousiyi, zhouyuanshen, Zirui Wu, Ziyan, zjun, ZPaC, lihongzhang
|
||||
|
||||
Contributions of any kind are welcome!
|
||||
|
||||
# Release 0.2.0-alpha
|
||||
|
||||
## Major Features and Improvements
|
||||
|
||||
### Ascend 910 Training and Inference Framework
|
||||
* New models
|
||||
* MobileNetV2: Inverted Residuals and Linear Bottlenecks.
|
||||
* ResNet101: Deep Residual Learning for Image Recognition.
|
||||
|
||||
* Frontend and User Interface
|
||||
* Support for all python comparison operators.
|
||||
* Support for math operators **,//,%. Support for other python operators like and/or/not/is/is not/ in/ not in.
|
||||
* Support for the gradients of function with variable arguments.
|
||||
* Support for tensor indexing assignment for certain indexing type.
|
||||
* Support for dynamic learning rate.
|
||||
* User interfaces change log
|
||||
* DepthwiseConv2dNative, DepthwiseConv2dNativeBackpropFilter, DepthwiseConv2dNativeBackpropInput([!424](https://gitee.com/mindspore/mindspore/pulls/424))
|
||||
* ReLU6, ReLU6Grad([!224](https://gitee.com/mindspore/mindspore/pulls/224))
|
||||
* GeneratorDataset([!183](https://gitee.com/mindspore/mindspore/pulls/183))
|
||||
* VOCDataset([!477](https://gitee.com/mindspore/mindspore/pulls/477))
|
||||
* MindDataset, PKSampler([!514](https://gitee.com/mindspore/mindspore/pulls/514))
|
||||
* map([!506](https://gitee.com/mindspore/mindspore/pulls/506))
|
||||
* Conv([!226](https://gitee.com/mindspore/mindspore/pulls/226))
|
||||
* Adam([!253](https://gitee.com/mindspore/mindspore/pulls/253))
|
||||
* _set_fusion_strategy_by_idx, _set_fusion_strategy_by_size([!189](https://gitee.com/mindspore/mindspore/pulls/189))
|
||||
* CheckpointConfig([!122](https://gitee.com/mindspore/mindspore/pulls/122))
|
||||
* Constant([!54](https://gitee.com/mindspore/mindspore/pulls/54))
|
||||
* Executor and Performance Optimization
|
||||
* Support parallel execution of data prefetching and forward/backward computing.
|
||||
* Support parallel execution of gradient aggregation and forward/backward computing in distributed training scenarios.
|
||||
* Support operator fusion optimization.
|
||||
* Optimize compilation process and improve the performance.
|
||||
* Data processing, augmentation, and save format
|
||||
* Support multi-process of GeneratorDataset/PyFunc for high performance
|
||||
* Support variable batchsize
|
||||
* Support new Dataset operators, such as filter,skip,take,TextLineDataset
|
||||
|
||||
### Other Hardware Support
|
||||
* GPU platform
|
||||
* Use dynamic memory pool by default on GPU.
|
||||
* Support parallel execution of computation and communication.
|
||||
* Support continuous address allocation by memory pool.
|
||||
* CPU platform
|
||||
* Support for windows 10 OS.
|
||||
|
||||
## Bugfixes
|
||||
* Models
|
||||
* Fix mixed precision bug for VGG16 model ([!629](https://gitee.com/mindspore/mindspore/pulls/629)).
|
||||
* Python API
|
||||
* Fix ControlDepend operator bugs on CPU and GPU ([!396](https://gitee.com/mindspore/mindspore/pulls/396)).
|
||||
* Fix ArgMinWithValue operator bugs ([!338](https://gitee.com/mindspore/mindspore/pulls/338)).
|
||||
* Fix Dense operator bugs on PyNative mode ([!276](https://gitee.com/mindspore/mindspore/pulls/276)).
|
||||
* Fix MatMul operator bugs on PyNative mode ([!288](https://gitee.com/mindspore/mindspore/pulls/288)).
|
||||
* Executor
|
||||
* Fix operator selection bugs and make it general ([!300](https://gitee.com/mindspore/mindspore/pulls/300)).
|
||||
* Fix memory reuse bug for GetNext op ([!291](https://gitee.com/mindspore/mindspore/pulls/291)).
|
||||
* GPU platform
|
||||
* Fix memory allocation in multi-graph scenarios ([!444](https://gitee.com/mindspore/mindspore/pulls/444)).
|
||||
* Fix bias_add_grad under fp16 precision ([!598](https://gitee.com/mindspore/mindspore/pulls/598)).
|
||||
* Fix support for fp16 kernels on nvidia 1080Ti([!571](https://gitee.com/mindspore/mindspore/pulls/571)).
|
||||
* Fix parsing of tuple type parameters ([!316](https://gitee.com/mindspore/mindspore/pulls/316)).
|
||||
* Data processing
|
||||
* Fix TypeErrors about can't pickle mindspore._c_dataengine.DEPipeline objects([!434](https://gitee.com/mindspore/mindspore/pulls/434)).
|
||||
* Add TFRecord file verification([!406](https://gitee.com/mindspore/mindspore/pulls/406)).
|
||||
|
||||
## Contributors
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
Alexey_Shevlyakov, Cathy, Chong, Hoai, Jonathan, Junhan, JunhanHu, Peilin, SanjayChan, StrawNoBerry, VectorSL, Wei, WeibiaoYu, Xiaoda, Yanjun, YuJianfeng, ZPaC, Zhang, ZhangQinghua, ZiruiWu, amongo, anthonyaje, anzhengqi, biffex, caifubi, candanzg, caojian05, casgj, cathwong, ch-l, chang, changzherui, chenfei, chengang, chenhaozhe, chenjianping, chentingting, chenzomi, chujinjin, dengwentao, dinghao, fanglei, fary86, flywind, gaojing, geekun, gengdongjie, ghzl, gong, gongchen, gukecai, guohongzilong, guozhijian, gziyan, h.farahat, hesham, huangdongrun, huanghui, jiangzhiwen, jinyaohui, jjfeing, jojobugfree, jonathan_yan, jonyguo, jzw, kingfo, kisnwang, laiyongqiang, leonwanghui, lianliguang, lichen, lichenever, limingqi107, liubuyu, liuxiao, liyong, liyong126, lizhenyu, lupengcheng, lvliang, maoweiyong, ms_yan, mxm, ougongchang, panfengfeng, panyifeng, pengyanjun, penn, qianlong, seatea, simson, suteng, thlinh, vlne-v1, wangchengke, wanghua, wangnan39, wangqiuliang, wenchunjiang, wenkai, wukesong, xiefangqi, xulei, yanghaitao, yanghaoran, yangjie159, yangzhenzhang, yankai10, yanzhenxiang2020, yao_yf, yoonlee666, zhangbuxue, zhangz0911gm, zhangzheng, zhaojichen, zhaoting, zhaozhenlong, zhongligeng, zhoufeng, zhousiyi, zjun, zyli2020, yuhuijun, limingqi107, lizhenyu, chenweifeng.
|
||||
|
||||
Contributions of any kind are welcome!
|
||||
|
||||
# Release 0.1.0-alpha
|
||||
|
||||
## Main Features
|
||||
|
@ -370,4 +70,4 @@ Contributions of any kind are welcome!
|
|||
* [MindSpore Official Website] (https://www.mindspore.cn/)
|
||||
* [MindInsight Visualization Debugging and Optimization] (https://gitee.com/mindspore/mindinsight)
|
||||
* [MindArmour Model Security Hardening Package] (https://gitee.com/mindspore/mindarmour)
|
||||
* [GraphEngine Computational Graph Engine] (https://gitee.com/mindspore/graphengine)
|
||||
* [GraphEngine Computational Graph Engine] (https://gitee.com/mindspore/graphengine)
|
|
@ -368,7 +368,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|||
THE SOFTWARE.
|
||||
|
||||
|
||||
Software: oneDNN 1.1.2
|
||||
Software: MKL-DNN 1.1.2
|
||||
Copyright (c) 2009-2018 The MathJax Consortium
|
||||
Copyright 2018 Intel Corporation
|
||||
Copyright 2019 Intel Corporation
|
||||
|
@ -2245,14 +2245,14 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
Please also refer to the file CONTRIBUTING.md, which clarifies licensing of
|
||||
external contributions to this project including patches, pull requests, etc.
|
||||
|
||||
Software: SQLite 3.32.2
|
||||
Software: SQLite 3.31.1
|
||||
Copyright notice:
|
||||
Copyright (c) 1991-2011 Unicode, Inc.
|
||||
Copyright 2008 D. Richard Hipp and Hipp, Wyrick & Company, Inc.
|
||||
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc.
|
||||
(c) The page number is greater than the largest page that existed in
|
||||
Copyright (c) 1991-2011 Unicode, Inc.
|
||||
Copyright (c) 2002 by David Gravereaux.
|
||||
Copyright (c) 2006 by Pat Thoyts
|
||||
(c) The page number is greater than the largest page that existed in
|
||||
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 2008 Free Software Foundation, Inc.
|
||||
|
||||
License: Public Domain
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means.
|
||||
|
@ -3042,602 +3042,6 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS", AND
|
|||
Why Three Licenses?
|
||||
The zlib License could have been used instead of the Modified (3-clause) BSD License, and since the IJG License effectively subsumes the distribution conditions of the zlib License, this would have effectively placed libjpeg-turbo binary distributions under the IJG License. However, the IJG License specifically refers to the Independent JPEG Group and does not extend attribution and endorsement protections to other entities. Thus, it was desirable to choose a license that granted us the same protections for new code that were granted to the IJG for code derived from their software.
|
||||
|
||||
Software: cppjieba 5.0.3
|
||||
Copyright notice:
|
||||
Copyright 2005, Google Inc.
|
||||
Copyright 2008, Google Inc.
|
||||
Copyright 2007, Google Inc.
|
||||
Copyright 2008 Google Inc.
|
||||
Copyright 2006, Google Inc.
|
||||
Copyright 2003 Google Inc.
|
||||
Copyright 2009 Google Inc.
|
||||
Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All
|
||||
|
||||
Software: tinyxml2 8.0.0
|
||||
Copyright 2011, John Resig.
|
||||
Copyright 2011, The Dojo Foundation.
|
||||
|
||||
Software: icu 67.1
|
||||
Copyright (C) 2000-2004, International Business Machines Corporation
|
||||
Copyright (C) 2002-2014, International Business Machines(C) Copyright IBM Corp. 1998-2011 - All Rights Reserved
|
||||
Copyright (C) 2003-2008, International Business Machines
|
||||
Copyright (C) 2005-2006, International Business Machines
|
||||
Copyright (C) 2016 and later: Unicode, Inc. and others.
|
||||
Copyright (c) 2001-2010 International Business Machines
|
||||
Copyright (C) 2009, International Business Machines
|
||||
Copyright (c) 2010-2015 International Business Machines Corporation and others. All rights reserved.
|
||||
Copyright (C) 2002-2015, International Business Machines verbatim (minus copyright and #include) and copied together into this file.
|
||||
Copyright (c) 1997-2014, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 1997-2008, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2003, International Business Machines Corporation and
|
||||
Copyright (c) 1996-2012, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2016, International Business Machines
|
||||
Copyright (c) 1997-2013 International Business Machines
|
||||
Copyright (c) 1997-2016, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2001, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2012, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2005, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2010, International Business Machines Corporation and
|
||||
Copyright (c) 2011-2016, International Business Machines Corporation
|
||||
Copyright (c) 1997-2009, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2002,2008, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2009,2014, International Business Machines
|
||||
Copyright (C) 2000-2009, International Business Machines
|
||||
Copyright (c) 1997-2015, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2013, International Business Machines Corporation and
|
||||
Copyright (c) 2001-2016, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2016, International Business Machines Corporation
|
||||
Copyright (c) 1997-2003, 2007-2009 International Business Machines Corporation and
|
||||
Copyright (c) 2011-2014, International Business Machines Corporation
|
||||
Copyright (c) 2003-2009, International Business Machines
|
||||
Copyright (c) 2016, International Business Machines Corporation
|
||||
Copyright (c) 1997-2004, International Business Machines Corporation and
|
||||
Copyright (C) 2002-2016, International Business Machines
|
||||
Copyright (C) 1998-2014, International Business Machines Corporation
|
||||
Copyright (c) 2003-2013, International Business Machines Corporation and
|
||||
Copyright (c) 2005-2016, International Business Machines Corporation and
|
||||
Copyright (c) 1999-2013, International Business Machines Corporation and
|
||||
Copyright (c) 2003-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2003-2016, International Business Machines
|
||||
Copyright (C) 2003-2014, International Business Machines
|
||||
Copyright (C) 2003, International Business Machines
|
||||
Copyright (c) 1998-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2004-2015, International Business Machines Corporation and
|
||||
Copyright (c) 2009-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2003-2012, International Business Machines
|
||||
Copyright (c) 2000-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2014, International Business Machines
|
||||
Copyright (C) 2001-2016, International Business Machines
|
||||
Copyright (c) 1997-2014, International Business Machines © 2017 and later: Unicode, Inc. and others.
|
||||
Copyright (C) 2007-2016, International Business Machines © 2018 and later: Unicode, Inc. and others.
|
||||
Copyright (c) 2015, International Business Machines Corporation
|
||||
Copyright (c) 2014-2016, International Business Machines Corporation
|
||||
Copyright (c) 2002-2016, International Business Machines
|
||||
Copyright (c) 2001-2011,2015 International Business Machines
|
||||
Copyright (c) 2001-2016 International Business Machines
|
||||
Copyright (c) 2005-2013, International Business Machines Corporation and
|
||||
Copyright (c) 1998-2014, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2016 International Business Machines
|
||||
Copyright (C) 2009-2014, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2014, International Business Machines Corporation
|
||||
Copyright (c) 2002-2007, International Business Machines Corporation
|
||||
Copyright (C) 1996-2012, International Business Machines Corporation
|
||||
Copyright (C) 1996-2008, International Business Machines Corporation
|
||||
Copyright (C) 2007-2013, International Business Machines Corporation and
|
||||
Copyright (C) 2008-2015, International Business Machines
|
||||
Copyright (C) 2003-2013, International Business Machines Corporation and
|
||||
Copyright (C) 2003-2013, International Business Machines Corporation
|
||||
Copyright (C) 1997-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2011, International Business Machines
|
||||
Copyright (C) 2001-2008, International Business Machines
|
||||
Copyright (C) 2003 - 2009, International Business Machines Corporation and
|
||||
Copyright (C) 2003 - 2008, International Business Machines Corporation and
|
||||
Copyright (C) 2007-2014, International Business Machines Corporation
|
||||
Copyright (C) 2007-2013, International Business Machines Corporation
|
||||
Copyright (C) 1997-2013, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2010-2014, International Business Machines
|
||||
Copyright (C) 2010-2015, International Business Machines
|
||||
Copyright (C) 2013-2014, International Business Machines
|
||||
Copyright (C) 1996-2015, International Business Machines
|
||||
Copyright (C) 1996-2014, International Business Machines
|
||||
Copyright (C) 2012-2015, International Business Machines
|
||||
Copyright (C) 2012-2014, International Business Machines
|
||||
Copyright (C) 2013-2015, International Business Machines
|
||||
Copyright (C) 2013-2016, International Business Machines
|
||||
Copyright (C) 1999-2016, International Business Machines
|
||||
Copyright (C) 1999-2015, International Business Machines
|
||||
Copyright (C) 1999-2014, International Business Machines
|
||||
Copyright (C) 2015-2016, International Business Machines Corporation and others.
|
||||
Copyright (C) 2003 - 2013, International Business Machines Corporation and
|
||||
Copyright (C) 1999-2011, International Business Machines
|
||||
Copyright (C) 2005-2016, International Business Machines
|
||||
Copyright (C) 2005-2012, International Business Machines
|
||||
Copyright (C) 2005-2015, International Business Machines
|
||||
Copyright (C) 2005-2013, International Business Machines
|
||||
Copyright (C) 2005-2014, International Business Machines
|
||||
Copyright (c) 2004, International Business Machines
|
||||
Copyright (c) 2004-2014 International Business Machines
|
||||
Copyright (c) 2004-2014, International Business Machines
|
||||
Copyright (C) 2013, International Business Machines Corporation
|
||||
Copyright (C) 1997-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2016, International Business Machines
|
||||
Copyright (c) IBM Corporation, 2000-2012. All rights reserved.
|
||||
Copyright (c) IBM Corporation, 2000-2011. All rights reserved.
|
||||
Copyright (c) IBM Corporation, 2000-2014. All rights reserved.
|
||||
Copyright (c) IBM Corporation, 2000-2010. All rights reserved.
|
||||
Copyright (c) IBM Corporation, 2000-2016. All rights reserved.
|
||||
Copyright 2010 the V8 project authors. All rights reserved.
|
||||
Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
Copyright 2012 the V8 project authors. All rights reserved.
|
||||
Copyright (C) 2008-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2007-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2007-2012, International Business Machines Corporation and
|
||||
Copyright (c) 2001-2011, International Business Machines
|
||||
Copyright (c) 2001-2007, International Business Machines
|
||||
Copyright (C) 2010-2014, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2010, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2012, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2012, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2012, International Business Machines Corporation
|
||||
Copyright (c) 2002-2011, International Business Machines Corporation
|
||||
Copyright (C) 2008-2013, International Business Machines Corporation and
|
||||
Copyright (c) 2003-2008, International Business Machines
|
||||
Copyright (C) 2003-2016, International Business Machines Corporation
|
||||
Copyright (C) 2003-2014, International Business Machines Corporation
|
||||
Copyright (C) 2003-2008, International Business Machines Corporation
|
||||
Copyright (C) 2005-2008, International Business Machines
|
||||
Copyright (C) 2003-2015, International Business Machines Corporation
|
||||
Copyright (C) 2003-2009,2012,2016 International Business Machines Corporation and
|
||||
Copyright (c) 2004-2016, International Business Machines © 2020 and later: Unicode, Inc. and others.
|
||||
Copyright (C) 2007-2008, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2007, International Business Machines
|
||||
Copyright (C) 1997-2012, International Business Machines
|
||||
Copyright (C) 1997-2015, International Business Machines
|
||||
Copyright (C) 2001-2010, International Business Machines
|
||||
Copyright (c) 2000-2005, International Business Machines
|
||||
Copyright (c) 2000-2007, International Business Machines © 2019 and later: Unicode, Inc. and others.
|
||||
Copyright (C) 2010-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2015, International Business Machines Corporation and
|
||||
Copyright (c) 2003-2013, International Business Machines
|
||||
Copyright (C) 2001-2012, International Business Machines
|
||||
Copyright (C) 2001-2011, International Business Machines Corporation
|
||||
Copyright (C) 2014-2016, International Business Machines
|
||||
Copyright (C) 1997-2015, International Business Machines Corporation
|
||||
Copyright (C) 1999-2007, International Business Machines
|
||||
Copyright (C) 1999-2007, International Business Machines Corporation
|
||||
Copyright (C) 1999-2011, International Business Machines Corporation
|
||||
Copyright (C) {1999-2001}, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2002-2016 International Business Machines Corporation and others.
|
||||
Copyright (C) 2002-2016, International Business Machines Corporation and others.
|
||||
Copyright (C) 2002-2016 International Business Machines Corporation
|
||||
Copyright (C) 2002-2015, International Business Machines Corporation and others.
|
||||
Copyright (C) 2012 International Business Machines Corporation
|
||||
Copyright (C) 2002-2015 International Business Machines Corporation
|
||||
Copyright (C) 2004-2015, International Business Machines Corporation and others.
|
||||
Copyright (C) 2003-2010, International Business Machines Corporation and others.
|
||||
Copyright (c) 2008-2011, International Business Machines Corporation and
|
||||
Copyright (c) 2008-2010, International Business Machines Corporation and
|
||||
Copyright (C) 2014-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2013, International Business Machines Corporation and
|
||||
Copyright (c) 2014, International Business Machines
|
||||
Copyright (C) 2014, International Business Machines
|
||||
Copyright (C) 2013, International Business Machines
|
||||
Copyright (C) 2001-2008,2010 IBM and others. All rights reserved.
|
||||
Copyright (C) 2010 , Yahoo! Inc.
|
||||
Copyright (c) 1997-2011, International Business Machines Corporation and
|
||||
Copyright (C) 2013-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2013, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2012, International Business Machines Corporation and
|
||||
Copyright (C) 2015, International Business Machines Corporation
|
||||
Copyright (c) 2001-2012, International Business Machines Corporation
|
||||
Copyright (C) 2001-2014 IBM and others. All rights reserved.
|
||||
Copyright (C) 2008-2014, Google, International Business Machines Corporation and
|
||||
Copyright (C) 2008, Google, International Business Machines Corporation and
|
||||
Copyright (C) 2008-2015, Google, International Business Machines Corporation
|
||||
Copyright (c) 2001-2014, International Business Machines
|
||||
Copyright (c) 2002-2010, International Business Machines Corporation
|
||||
Copyright (C) 2011-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2011-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2011-2012, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2016, International Business Machines
|
||||
Copyright (C) 1998-2014, International Business Machines
|
||||
Copyright (C) 2004-2016, International Business Machines
|
||||
Copyright (C) 2010-2011, International Business Machines
|
||||
Copyright (C) 2009-2015, International Business Machines
|
||||
Copyright (C) 2015, International Business Machines
|
||||
Copyright (C) 2012-2016, International Business Machines
|
||||
Copyright (C) 1999-2012, International Business Machines
|
||||
Copyright (C) 2001, International Business Machines
|
||||
Copyright (C) 2013, International Business Machines Corporation and others.
|
||||
Copyright (C) 2010-2012, International Business Machines
|
||||
Copyright (C) 2004-2015, International Business Machines
|
||||
Copyright (C) 2003-2006, International Business Machines
|
||||
Copyright (C) 2013-2015, International Business Machines Corporation and others.
|
||||
Copyright (C) 2001-2015 IBM and others. All rights reserved.
|
||||
Copyright (C) 2008-2015, International Business Machines Corporation
|
||||
Copyright (C) 2008-2016, International Business Machines
|
||||
Copyright (C) 2008-2013, International Business Machines Corporation
|
||||
Copyright (C) 2004-2012, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2009,2014 International Business Machines
|
||||
Copyright (C) 2009-2011, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2013, International Business Machines
|
||||
Copyright (C) 2008-2011, International Business Machines
|
||||
Copyright (C) 2007-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2010, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2016 International Business Machines Corporation
|
||||
Copyright (c) 2002-2011, International Business Machines
|
||||
Copyright (C) 2001-2012 IBM, Inc. All Rights Reserved.
|
||||
Copyright (c) 2013-2016 International Business Machines Corporation and others. All rights reserved.
|
||||
Copyright (c) 2013-2015 International Business Machines Corporation and others. All rights reserved.
|
||||
Copyright (c) 2007-2012, International Business Machines Corporation and
|
||||
Copyright (c) 2007-2012, International Business Machines
|
||||
Copyright (C) 2010, International Business Machines
|
||||
Copyright (C) 1997-2011, International Business Machines
|
||||
Copyright (C) 1997-2005, International Business Machines
|
||||
Copyright (C) 2009-2011, International Business Machines
|
||||
Copyright (C) 2003-2015, International Business Machines
|
||||
Copyright (C) 2009-2016, International Business Machines
|
||||
Copyright (C) 2008-2012, International Business Machines
|
||||
Copyright (C) 2008, International Business Machines
|
||||
Copyright (C) 2011-2014, International Business Machines
|
||||
Copyright (C) 2011-2013, International Business Machines
|
||||
Copyright (C) 2005, International Business Machines
|
||||
Copyright (C) 1999-2013, International Business Machines
|
||||
Copyright (C) 1998-2016, International Business Machines
|
||||
Copyright (c) 2007-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2003-2013, International Business Machines
|
||||
Copyright (c) 2007-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2008-2015, International Business Machines
|
||||
Copyright (C) 1999-2010, International Business Machines
|
||||
Copyright (C) 2000-2015, International Business Machines
|
||||
Copyright (C) 2000-2011, International Business Machines
|
||||
Copyright (C) 2000-2012, International Business Machines
|
||||
Copyright (C) 2000-2010, International Business Machines
|
||||
Copyright (C) 2004-2010, International Business Machines
|
||||
Copyright (C) 2004-2005, International Business Machines
|
||||
Copyright (c) 2013-2014, International Business Machines
|
||||
Copyright (c) 1991-2013 Unicode, Inc. © 2019 Unicode®, Inc.
|
||||
Copyright (C) 2018 and later: Unicode, Inc. and others.
|
||||
Copyright (c) 2008-2013 International Business Machines
|
||||
Copyright (C) 2002-2010, International Business Machines
|
||||
Copyright (c) 2012-2015 International Business Machines © 2020 Unicode®, Inc.
|
||||
Copyright (c) 2005-2013 IBM Corporation and others. All rights reserved
|
||||
Copyright (c) 2011-2012, International Business Machines Corporation and
|
||||
Copyright (C) 1998-2000, International Business Machines © 2017 Unicode®, Inc.
|
||||
Copyright (c) 2007-2015 International Business Machines
|
||||
Copyright (C) 2004-2006, International Business Machines
|
||||
Copyright (C) 2003-2005, International Business Machines
|
||||
Copyright (c) 1999-2014 International Business Machines
|
||||
Copyright (c) 2003, International Business Machines
|
||||
Copyright (C) 2014 International Business Machines
|
||||
Copyright (c) 2001-2003 International Business Machines
|
||||
Copyright (c) 2004-2011 International Business Machines
|
||||
Copyright (C) 2015-2016, International Business Machines
|
||||
Copyright (c) 2001-2015 International Business Machines
|
||||
Copyright (C) 2003-2012, International Business Machines Corporation and COPYRIGHT AND PERMISSION NOTICE
|
||||
Copyright (c) 2003 National Electronics and Computer Technology Center and others
|
||||
Copyright (C) 2005-2010, International Business Machines
|
||||
Copyright (c) 2007-2009 IBM Corporation and others. All rights reserved
|
||||
Copyright (C) 2004-2016 International Business Machines
|
||||
Copyright (C) 1998-2013, International Business Machines
|
||||
Copyright (C) 1998-2010, International Business Machines
|
||||
Copyright (c) 1999-2004, International Business Machines
|
||||
Copyright (C) 2002-2006 International Business Machines Corporation
|
||||
Copyright (C) 1999-2006, International Business Machines
|
||||
Copyright (C) 2002-2016 IBM, Inc. All Rights Reserved.
|
||||
Copyright (c) 2002-2006, International Business Machines(C) Copyright IBM Corp. 1998-2007 - All Rights Reserved
|
||||
Copyright (C) 1999-2003, International Business Machines
|
||||
Copyright (C) 1998-2006, International Business Machines Corporation and
|
||||
Copyright (C) 1998-2003, International Business Machines Corporation and
|
||||
Copyright (C) 2003 - 2008, International Business Machines
|
||||
Copyright (C) 1999-2008, International Business Machines
|
||||
Copyright (C) 1999-2001, International Business Machines
|
||||
Copyright (C) 1999-2005, International Business Machines
|
||||
Copyright (C) 2016 and later: Unicode, Inc. and others.
|
||||
Copyright (c) 2001-2010 IBM Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 1998-2005, International Business Machines Corporation and
|
||||
Copyright (C) 1998-2001, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2005, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2000-2014, International Business Machines
|
||||
Copyright (C) 1996-2013, International Business Machines
|
||||
Copyright (c) 2002-2006, International Business Machines Corporation and
|
||||
Copyright (c) 2004-2010, International Business Machines Corporation and
|
||||
Copyright (C) 2004-2011, International Business Machines
|
||||
Copyright (c) 2002-2005, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2014, International Business Machines
|
||||
Copyright (c) 1997-2012, International Business Machines
|
||||
Copyright (c) 2002-2008, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2011-2013, Apple Inc.; Unicode, Inc.; and others. All Rights Reserved.
|
||||
Copyright (C) 2011-2013, Apple Inc. and others. All Rights Reserved.
|
||||
Copyright (c) 2005-2007,2010 Apple Inc., Unicode Inc.,and others. All Rights Reserved.
|
||||
Copyright (c) 1999-2003, International Business Machines Corporation and
|
||||
Copyright (c) 2003-2014, International Business Machines
|
||||
Copyright (c) 2002-2010, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 1999-2010, International Business Machines Corporation and
|
||||
Copyright (c) 1999-2002, International Business Machines Corporation and
|
||||
Copyright (C) 2002-2003, International Business Machines
|
||||
Copyright (C) 2002, International Business Machines
|
||||
Copyright (c) 2007, International Business Machines Corporation and
|
||||
Copyright (C) 2007, International Business Machines
|
||||
Copyright (C) 2001-2006, International Business Machines
|
||||
Copyright (C) 2010-2014, International Business Machines Corporation and others.
|
||||
Copyright (C) 2005-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2015-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2008-2012, International Business Machines Corporation
|
||||
Copyright (c) 2006-2015 International Business Machines Corporation and others. All rights reserved.
|
||||
Copyright (c) 2014-2015 International Business Machines Corporation and others. All rights reserved.
|
||||
Copyright (C) 2002-2011, International Business Machines
|
||||
Copyright (c) 2003-2010, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2012 IBM Corporation and Others. All Rights Reserved.
|
||||
Copyright (C) 1998-2012, International Business Machines Corporation
|
||||
Copyright (c) 2009, International Business Machines Corporation and
|
||||
Copyright (C) The Internet Society (2002). All Rights Reserved.
|
||||
Copyright (c) 2015, International Business Machines Corporation and
|
||||
Copyright (c) 2002, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 1998-2016, International Business Machines Corporation
|
||||
Copyright (c) 2011-2016,International Business Machines
|
||||
Copyright (C) 2012 International Business Machines Corporation and Others. All Rights Reserved.
|
||||
Copyright (C) 2011, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2011, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 2011-2012,International Business Machines
|
||||
Copyright (c) 2007, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2007-2007, International Business Machines(C) Copyright IBM Corp. 1998-2014 - All Rights Reserved
|
||||
Copyright (C) 1998-2002, International Business Machines
|
||||
Copyright (c) 2001-2007, International Business Machines Corporation and others. All Rights Reserved.(C) Copyright IBM Corp. 1998-2013 - All Rights Reserved
|
||||
Copyright (C) 1998-2015, International Business Machines
|
||||
Copyright (C) 2001-2014 International Business Machines
|
||||
Copyright (C) 2011-2016, International Business Machines
|
||||
Copyright (C) 2011-2015, International Business Machines
|
||||
Copyright (c) 1999-2014, International Business Machines Corporation and
|
||||
Copyright (c) 1999-2009, International Business Machines Corporation and
|
||||
Copyright (c) 2010,International Business Machines
|
||||
Copyright (c) 2010-2016,International Business Machines
|
||||
Copyright (c) 2002-2005, International Business Machines
|
||||
Copyright (C) 2000-2003, International Business Machines
|
||||
Copyright (c) 2008-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2001 - 2005, International Business Machines
|
||||
Copyright (C) 2001-2005, International Business Machines
|
||||
Copyright (C) 1995-2014, International Business Machines
|
||||
Copyright (c) 2000-2004 IBM, Inc. and Others.
|
||||
Copyright (c) 2002-2014, International Business Machines Corporation and
|
||||
Copyright (c) 2007-2013, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2012, International Business Machines Corporation and
|
||||
Copyright (C) 2002-2012, International Business Machines
|
||||
Copyright (C) 2009-2011, International Business Machines Corporation, Google and Others.
|
||||
Copyright (c) 2002, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2009-2014, International Business Machines
|
||||
Copyright (C) 2008, International Business Machines Corporation and others.
|
||||
Copyright (C) 2000-2016, International Business Machines
|
||||
Copyright (C) 2011-2014 International Business Machines
|
||||
Copyright (C) 1997-2014, International Business Machines
|
||||
Copyright (C) 1997-2013, International Business Machines
|
||||
Copyright (c) 2004-2006, International Business Machines
|
||||
Copyright (C) 1997-2016, International Business Machines
|
||||
Copyright (C) 1997-2006, International Business Machines
|
||||
Copyright (C) 1997-2011, International Business Machines Corporation and others.
|
||||
Copyright (C) 1997-2013, International Business Machines Corporation and others.
|
||||
Copyright (c) 2004-2015, International Business Machines
|
||||
Copyright (C) 2009-2017, International Business Machines Corporation,Google, and others. All Rights Reserved.
|
||||
Copyright (C) 1997-2016, International Business Machines Corporation and others.
|
||||
Copyright (C) 2008-2015, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2015, International Business Machines Corporation and others.
|
||||
Copyright (C) 2014-2016, International Business Machines Corporation and others.
|
||||
Copyright (c) 2014-2016, International Business Machines
|
||||
Copyright (C) 2001-2011 IBM and others. All rights reserved.
|
||||
Copyright (C) 1996-2014, International Business Machines Corporation and others.
|
||||
Copyright (C) 1996-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2016, International Business Machines Corporation,
|
||||
Copyright (C) 2009-2010, Google, International Business Machines Corporation and
|
||||
Copyright (C) 2008-2014, Google, International Business Machines Corporation
|
||||
Copyright (C) 1996-2015, International Business Machines Corporation and
|
||||
Copyright (c) 1996-2015, International Business Machines Corporation and others.
|
||||
Copyright (C) 2010-2012,2015 International Business Machines
|
||||
Copyright (C) 2007-2015, International Business Machines
|
||||
Copyright (C) 2013-2014, International Business Machines Corporation and others.
|
||||
Copyright (C) 2010-2013, International Business Machines
|
||||
Copyright (c) 2002-2005, International Business Machines Corporation
|
||||
Copyright (C) 2001-2011,2014 IBM and others. All rights reserved.
|
||||
Copyright (C) 2008-2016, International Business Machines Corporation
|
||||
Copyright (C) 2004 - 2008, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2011,2014-2015 International Business Machines
|
||||
Copyright (C) 2001-2003, International Business Machines
|
||||
Copyright (C) 1999-2009, International Business Machines
|
||||
Copyright (C) 2020 and later: Unicode, Inc. and others.
|
||||
Copyright (c) 2002, International Business Machines Corporation and
|
||||
Copyright (C) 2000-2008, International Business Machines
|
||||
Copyright (C) 1998-2006, International Business Machines
|
||||
Copyright (C) 1998-2001, International Business Machines Corporation
|
||||
Copyright (C) 1998-2004, International Business Machines Corporation
|
||||
Copyright (C) 2000, International Business Machines
|
||||
Copyright (c) 1999-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2015, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 1999-2012, International Business Machines Corporation and
|
||||
Copyright (C) 1998-2011, International Business Machines
|
||||
Copyright (C) 2008-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2003-2004, International Business Machines
|
||||
Copyright (c) 2003-2005, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2002-2006 IBM, Inc. All Rights Reserved.
|
||||
Copyright (C) 2004-2008, International Business Machines
|
||||
Copyright (c) 2002-2016 International Business Machines Corporation and
|
||||
Copyright (c) 2002-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2002-2016, International Business Machines Corporation
|
||||
Copyright (c) 2002-2010,International Business Machines
|
||||
Copyright (c) 2002-2014,International Business Machines
|
||||
Copyright (c) 2002-2016,International Business Machines
|
||||
Copyright (C) 2016 International Business Machines Corporation
|
||||
Copyright © 2019 and later: Unicode, Inc. and others.
|
||||
Copyright (c) 2016, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 2016 International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 2015-2016, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 2005-2006, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2004, International Business Machines Corporation
|
||||
Copyright (c) 2012-2016, International Business Machines Corporation
|
||||
Copyright (c) 2012-2014, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2014, International Business Machines Corporation
|
||||
Copyright (c) 1996-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2003-2013, International Business Machines Corporation
|
||||
Copyright (c) 2003-2008, International Business Machines Corporation
|
||||
Copyright (c) 1997-2015, International Business Machines Corporation
|
||||
Copyright (c) 2002-2016, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2002, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2012, International Business Machines
|
||||
Copyright (c) 1997-2013 International Business Machines Corporation and
|
||||
Copyright (c) 2010-2012, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2011, International Business Machines Corporation
|
||||
Copyright (c) 1997-2006, International Business Machines Corporation and
|
||||
Copyright (c) 2008-2016 International Business Machines Corporation and
|
||||
Copyright (c) 2008-2016, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2016 International Business Machines Corporation and
|
||||
Copyright (c) 2007-2011, International Business Machines
|
||||
Copyright (c) 2007-2010, International Business Machines
|
||||
Copyright (C) 2001-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2003, International Business Machines Corporation and
|
||||
Copyright (C) 2003-2011, International Business Machines
|
||||
Copyright (c) 1997-2007, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2015, International Business Machines
|
||||
Copyright (C) 2004-2009, International Business Machines Corporation and
|
||||
Copyright (C) 2004, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2009, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2006, International Business Machines Corporation and
|
||||
Copyright (C) 2011-2013, International Business Machines Corporation
|
||||
Copyright (C) 2000-2007, International Business Machines
|
||||
Copyright (c) 2001, International Business Machines Corporation and
|
||||
Copyright (C) 2012-2013, International Business Machines
|
||||
Copyright (c) 2010-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2010-2016, International Business Machines Corporation
|
||||
Copyright (c) 1997-2010, International Business Machines Corporation
|
||||
Copyright (c) 1997-2003, International Business Machines
|
||||
Copyright (C) 2014-2015, International Business Machines Corporation and
|
||||
Copyright (c) 1997-2013, International Business Machines Corporation
|
||||
Copyright (c) 1999-2016, International Business Machines
|
||||
Copyright (c) 1999-2016 International Business Machines Corporation and
|
||||
Copyright (c) 2016, International Business Machines Corporation and
|
||||
Copyright (c) 2016, International Business Machines
|
||||
Copyright (c) 2013-2016, International Business Machines Corporation
|
||||
Copyright (c) 2013, International Business Machines Corporation
|
||||
Copyright (C) 2013-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2001-2010, International Business Machines Corporation and
|
||||
Copyright (C) 2014, International Business Machines Corporation and
|
||||
Copyright (c) 1999-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2016, International Business Machines orporation
|
||||
Copyright (c) 2001-2008, International Business Machines Corporation and others
|
||||
Copyright (C) 2003-2016, International Business Machines Corporation and
|
||||
Copyright (c) 2004, International Business Machines Corporation
|
||||
Copyright (C) 2001-2009, International Business Machines
|
||||
Copyright (c) 2004,2011 International Business Machines
|
||||
Copyright (c) 2004-2011, International Business Machines
|
||||
Copyright (c) 2000-2016, International Business Machines Corporation
|
||||
Copyright (c) 2001-2005, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2004, International Business Machines
|
||||
Copyright (c) 2001-2009, International Business Machines
|
||||
Copyright (c) 1997-2009, International Business Machines Corporation
|
||||
Copyright (c) 1997-2013, International Business Machines
|
||||
Copyright (c) 1997-2012, International Business Machines Corporation
|
||||
Copyright (C) 2007-2015, International Business Machines Corporation and
|
||||
Copyright (C) 2007-2011, International Business Machines Corporation and
|
||||
Copyright (C) 2007, International Business Machines Corporation and
|
||||
Copyright (c) 1998-2005, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2010, International Business Machines Corporation and
|
||||
Copyright (C) 1999-2016 International Business Machines Corporation and
|
||||
Copyright (c) 2004-2011, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2007, International Business Machines Corporation and
|
||||
Copyright (C) 2003, International Business Machines Corporation and
|
||||
Copyright (C) 2005-2011, International Business Machines
|
||||
Copyright (C) 2011-2012, International Business Machines
|
||||
Copyright (C) 2007-2012, International Business Machines
|
||||
Copyright (C) 2006-2016, International Business Machines Corporation
|
||||
Copyright (C) 2006-2012, International Business Machines Corporation and others.
|
||||
Copyright 2007 Google Inc. All Rights Reserved.
|
||||
Copyright (c) 2001-2015, International Business Machines
|
||||
Copyright (C) 2006-2014, International Business Machines Corporation
|
||||
Copyright (C) 2008, International Business Machines Corporation and
|
||||
Copyright (C) 2009-2012, International Business Machines
|
||||
Copyright (C) 2006 International Business Machines Corporation
|
||||
Copyright (C) 2010-2016, International Business Machines Corporation and
|
||||
Copyright (C) 2002-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2002-2005, International Business Machines Corporation and
|
||||
Copyright (C) 2011, International Business Machines
|
||||
Copyright (c) 2003-2010 International Business Machines
|
||||
Copyright (C) 2003-2003, International Business Machines
|
||||
Copyright (C) 1999-2016 International Business Machines Corporation
|
||||
Copyright (C) 1999-2014 International Business Machines Corporation
|
||||
Copyright (C) 1999-2014 International Business Machines
|
||||
Copyright (C) 2002-2011, International Business Machines Corporation and others.
|
||||
Copyright (C) 2002-2008, International Business Machines Corporation and others.
|
||||
Copyright (C) 2002-2008 International Business Machines Corporation
|
||||
Copyright (c) 2001-2005, International Business Machines
|
||||
Copyright (C) 2002-2014 International Business Machines Corporation
|
||||
Copyright (c) 2003-2011, International Business Machines
|
||||
Copyright (C) 1998-2012, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2014, International Business Machines Corporation.
|
||||
Copyright (C) 2001-2011, International Business Machines Corporation.
|
||||
Copyright (C) 2001-2014, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2011, International Business Machines Corporation and
|
||||
Copyright (C) 2001-2012, International Business Machines Corporation and
|
||||
Copyright 2004 and onwards Google Inc.
|
||||
Copyright (C) 2004-2014, International Business Machines
|
||||
Copyright (C) 2006, International Business Machines
|
||||
Copyright (C) 2004-2012, International Business Machines
|
||||
Copyright (C) 2001-2013, International Business Machines
|
||||
Copyright (C) 1998-2004, International Business Machines
|
||||
Copyright (C) 2000-2013, International Business Machines
|
||||
Copyright (C) 1999-2015 International Business Machines
|
||||
Copyright (C) 2000-2006, International Business Machines
|
||||
Copyright (C) 1999-2004, International Business Machines
|
||||
Copyright (C) 2003-2007, International Business Machines
|
||||
Copyright (C) 2002-2006, International Business Machines
|
||||
Copyright (C) 2001-2015, International Business Machines
|
||||
Copyright (c) 2001-2012, International Business Machines
|
||||
Copyright (c) 2002-2004, International Business Machines
|
||||
Copyright (C) 1999-2016, International Business Machines Corporation and
|
||||
Copyright (c) 1996-2014, International Business Machines
|
||||
Copyright (C) 1999-2016, International Business Machines Corporation
|
||||
Copyright (C) 2009-2014 International Business Machines
|
||||
Copyright (C) 2004-2007, International Business Machines
|
||||
Copyright (c) 2001-2016, International Business Machines
|
||||
Copyright (C) 2003-2009, International Business Machines
|
||||
Copyright (C) 1999-2013, International Business Machines Corporation and
|
||||
Copyright (C) 1999-2015, International Business Machines Corporation and
|
||||
Copyright (c) 2002-2011, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 2001-2016 IBM, Inc. All Rights Reserved.
|
||||
Copyright (C) 1999-2016 International Business Machines
|
||||
Copyright (C) 2009-2010 IBM Corporation and Others. All Rights Reserved.
|
||||
Copyright (C) 1998-2012, International Business Machines
|
||||
Copyright (C) 1991 and later: Unicode, Inc. and others.
|
||||
Copyright (C) 1997-2000, International Business Machines
|
||||
Copyright (c) 1999-2007, International Business Machines Corporation and
|
||||
Copyright (c) 2000 IBM, Inc. and Others.
|
||||
Copyright (C) 2008-2013, International Business Machines
|
||||
Copyright (C) 1998-2003, 2006, International Business Machines Corporation
|
||||
Copyright (c) 2002-2003,International Business Machines
|
||||
Copyright (C) 2009 International Business Machines
|
||||
Copyright (C) 2010-2016 International Business Machines
|
||||
Copyright (C) 2008-2012 IBM, Inc. All Rights Reserved.
|
||||
Copyright (C) 1998-2008, International Business Machines
|
||||
Copyright (C) 2010-2016, International Business Machines
|
||||
Copyright (C) 1999-2006,2013 IBM Corp. All rights reserved.
|
||||
Copyright (C) 2008-2009, International Business Machines Corporation and
|
||||
Copyright (C) 2012,2014 International Business Machines
|
||||
Copyright (c) 1996-2015, International Business Machines Corporation and
|
||||
Copyright (C) 1997-2005, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 1999-2012, International Business Machines Corporation and
|
||||
Copyright (C) 1996-2013, International Business Machines Corporation
|
||||
Copyright (C) 1998-2005, International Business Machines
|
||||
Copyright 2001 and onwards Google Inc.
|
||||
Copyright (C) 2010-2012,2014, International Business Machines
|
||||
Copyright (C) 1996-2015, International Business Machines Corporation and others.
|
||||
Copyright (c) 2003-2004, International Business Machines
|
||||
Copyright (C) 2000-2004, International Business Machines
|
||||
Copyright (C) 2002-2013, International Business Machines
|
||||
Copyright (C) 2002-2011 International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (C) 1999-2010, International Business Machines Corporation and others.
|
||||
Copyright (C) 2001-2005, International Business Machines Corporation and others. All Rights Reserved.
|
||||
Copyright (c) 1996-2016, International Business Machines Corporation
|
||||
Copyright (C) 1997-2010, International Business Machines
|
||||
|
||||
Software: libtiff 4.1.0
|
||||
Copyright notice:
|
||||
Copyright © 2015 Open Microscopy Environment / University of Dundee
|
||||
|
|
1
akg
1
akg
|
@ -1 +0,0 @@
|
|||
Subproject commit 949a45538ccb7ae94ad73386b5e3e77005112eea
|
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2019 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
git submodule update --init --recursive
|
||||
|
||||
|
56
build.bat
56
build.bat
|
@ -1,56 +0,0 @@
|
|||
@rem Copyright 2020 Huawei Technologies Co., Ltd
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem http://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem ============================================================================
|
||||
@echo off
|
||||
@title mindspore_build
|
||||
|
||||
SET BASEPATH=%CD%
|
||||
IF NOT EXIST %BASEPATH%/build (
|
||||
md "build"
|
||||
)
|
||||
|
||||
cd %BASEPATH%/build
|
||||
SET BUILD_PATH=%CD%
|
||||
|
||||
IF NOT EXIST %BUILD_PATH%/mindspore (
|
||||
md "mindspore"
|
||||
)
|
||||
|
||||
cd %CD%/mindspore
|
||||
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DENABLE_CPU=ON -DENABLE_MINDDATA=ON -DUSE_GLOG=ON -G "CodeBlocks - MinGW Makefiles" ../..
|
||||
IF NOT %errorlevel% == 0 (
|
||||
echo "cmake fail."
|
||||
goto run_fail
|
||||
)
|
||||
|
||||
IF "%1%" == "" (
|
||||
cmake --build . --target package -- -j6
|
||||
) ELSE (
|
||||
cmake --build . --target package -- -j%1%
|
||||
)
|
||||
IF NOT %errorlevel% == 0 (
|
||||
echo "build fail."
|
||||
goto run_fail
|
||||
)
|
||||
|
||||
cd %BASEPATH%
|
||||
|
||||
goto run_eof
|
||||
|
||||
:run_fail
|
||||
cd %BASEPATH%
|
||||
set errorlevel=1
|
||||
|
||||
:run_eof
|
525
build.sh
525
build.sh
|
@ -16,6 +16,7 @@
|
|||
|
||||
set -e
|
||||
BASEPATH=$(cd "$(dirname $0)"; pwd)
|
||||
PROJECT_PATH="${BASEPATH}"
|
||||
CUDA_PATH=""
|
||||
CUDNN_PATH=""
|
||||
export BUILD_PATH="${BASEPATH}/build/"
|
||||
|
@ -23,42 +24,37 @@ export BUILD_PATH="${BASEPATH}/build/"
|
|||
usage()
|
||||
{
|
||||
echo "Usage:"
|
||||
echo "bash build.sh [-d] [-r] [-v] [-c on|off] [-t on|off] [-g on|off] [-h] [-b ge] [-m infer|train] \\"
|
||||
echo " [-a on|off] [-Q on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|d|cpu] \\"
|
||||
echo " [-P on|off] [-z [on|off]] [-M on|off] [-V 9.2|10.1] [-I arm64|arm32|x86_64] [-K] \\"
|
||||
echo " [-B on|off] [-w on|off] [-E] [-l on|off]"
|
||||
echo "bash build.sh [-d] [-r] [-v] [-c on|off] [-t on|off] [-g on|off] [-h] [-s] [-b ge|cpu] [-m infer|train] \\"
|
||||
echo " [-a on|off] [-g on|off] [-p on|off] [-i] [-L] [-R] [-D on|off] [-j[n]] [-e gpu|d|cpu] \\"
|
||||
echo " [-P on|off] [-z] [-M on|off] [-V 9.2|10.1] [-I] [-K]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " -d Debug mode"
|
||||
echo " -r Release mode, default mode"
|
||||
echo " -v Display build command"
|
||||
echo " -c Enable code coverage, default off"
|
||||
echo " -t Run testcases, default on"
|
||||
echo " -c Enable code coverage switch, default off"
|
||||
echo " -t Run testcases switch, default on"
|
||||
echo " -g Use glog to output log, default on"
|
||||
echo " -h Print usage"
|
||||
echo " -s Install or setup"
|
||||
echo " -b Select other backend, available: \\"
|
||||
echo " ge:graph engine"
|
||||
echo " -m Select graph engine backend mode, available: infer, train, default is infer"
|
||||
echo " ge:graph engine, cpu"
|
||||
echo " -m Select mode, available: infer, train, default is infer "
|
||||
echo " -a Enable ASAN, default off"
|
||||
echo " -p Enable pipeline profile, print to stdout, default off"
|
||||
echo " -R Enable pipeline profile, record to json, default off"
|
||||
echo " -p Enable pipeline profile, default off"
|
||||
echo " -i Enable increment building, default off"
|
||||
echo " -L Enable load ANF-IR as input of 'infer', default off"
|
||||
echo " -R Enable the time_line record, default off"
|
||||
echo " -j[n] Set the threads when building (Default: -j8)"
|
||||
echo " -e Use gpu, d or cpu"
|
||||
echo " -P Enable dump anf graph to file in ProtoBuffer format, default on"
|
||||
echo " -Q Enable dump memory, default off"
|
||||
echo " -Q Enable dump end to end, default off"
|
||||
echo " -D Enable dumping of function graph ir, default on"
|
||||
echo " -z Compile dataset & mindrecord, default on"
|
||||
echo " -M Enable MPI and NCCL for GPU training, gpu default on"
|
||||
echo " -V Specify the minimum required cuda version, default CUDA 10.1"
|
||||
echo " -I Compile lite"
|
||||
echo " -K Compile with AKG, default on"
|
||||
echo " -s Enable serving module, default off"
|
||||
echo " -w Enable acl module, default off"
|
||||
echo " -B Enable debugger, default off"
|
||||
echo " -E Enable IBVERBS for parameter server, default off"
|
||||
echo " -l Compile with python dependency, default on"
|
||||
echo " -z Compile dataset & mindrecord, default off"
|
||||
echo " -M Enable MPI and NCCL for GPU training, default off"
|
||||
echo " -V Specify the minimum required cuda version, default CUDA 9.2"
|
||||
echo " -I Compile predict, default off"
|
||||
echo " -K Compile with AKG, default off"
|
||||
}
|
||||
|
||||
# check value of input is 'on' or 'off'
|
||||
|
@ -81,6 +77,7 @@ checkopts()
|
|||
VERBOSE=""
|
||||
ENABLE_COVERAGE="off"
|
||||
RUN_TESTCASES="off"
|
||||
EXECUTE_SETUP="off"
|
||||
ENABLE_BACKEND=""
|
||||
TRAIN_MODE="INFER"
|
||||
ENABLE_ASAN="off"
|
||||
|
@ -91,22 +88,16 @@ checkopts()
|
|||
ENABLE_DUMP2PROTO="on"
|
||||
ENABLE_DUMPE2E="off"
|
||||
ENABLE_DUMP_IR="on"
|
||||
COMPILE_MINDDATA="on"
|
||||
COMPILE_MINDDATA="off"
|
||||
ENABLE_MPI="off"
|
||||
CUDA_VERSION="10.1"
|
||||
COMPILE_LITE="off"
|
||||
LITE_PLATFORM=""
|
||||
SUPPORT_TRAIN="off"
|
||||
CUDA_VERSION="9.2"
|
||||
COMPILE_PREDICT="off"
|
||||
USE_GLOG="on"
|
||||
ENABLE_AKG="on"
|
||||
ENABLE_SERVING="off"
|
||||
ENABLE_ACL="off"
|
||||
ENABLE_DEBUGGER="off"
|
||||
ENABLE_IBVERBS="off"
|
||||
ENABLE_PYTHON="on"
|
||||
PREDICT_PLATFORM=""
|
||||
ENABLE_AKG="off"
|
||||
|
||||
# Process the options
|
||||
while getopts 'drvj:c:t:hsb:a:g:p:ie:m:l:I:LRP:Q:D:zM:V:K:swB:E' opt
|
||||
while getopts 'drvj:c:t:hsb:a:g:p:ie:m:I:LRP:Q:D:zM:V:K' opt
|
||||
do
|
||||
OPTARG=$(echo ${OPTARG} | tr '[A-Z]' '[a-z]')
|
||||
case "${opt}" in
|
||||
|
@ -138,6 +129,9 @@ checkopts()
|
|||
usage
|
||||
exit 0
|
||||
;;
|
||||
s)
|
||||
EXECUTE_SETUP="on"
|
||||
;;
|
||||
b)
|
||||
if [[ "X$OPTARG" != "Xge" && "X$OPTARG" != "Xcpu" ]]; then
|
||||
echo "Invalid value ${OPTARG} for option -b"
|
||||
|
@ -145,6 +139,9 @@ checkopts()
|
|||
exit 1
|
||||
fi
|
||||
ENABLE_BACKEND=$(echo "$OPTARG" | tr '[a-z]' '[A-Z]')
|
||||
if [[ "X$ENABLE_BACKEND" == "XGE" ]]; then
|
||||
ENABLE_GE="on"
|
||||
fi
|
||||
if [[ "X$ENABLE_BACKEND" != "XCPU" ]]; then
|
||||
ENABLE_CPU="on"
|
||||
fi
|
||||
|
@ -157,10 +154,6 @@ checkopts()
|
|||
check_on_off $OPTARG p
|
||||
ENABLE_PROFILE="$OPTARG"
|
||||
;;
|
||||
l)
|
||||
check_on_off $OPTARG l
|
||||
ENABLE_PYTHON="$OPTARG"
|
||||
;;
|
||||
i)
|
||||
INC_BUILD="on"
|
||||
;;
|
||||
|
@ -184,11 +177,9 @@ checkopts()
|
|||
if [[ "X$OPTARG" == "Xgpu" ]]; then
|
||||
ENABLE_GPU="on"
|
||||
ENABLE_CPU="on"
|
||||
ENABLE_MPI="on"
|
||||
elif [[ "X$OPTARG" == "Xd" || "X$OPTARG" == "Xascend" ]]; then
|
||||
elif [[ "X$OPTARG" == "Xd" ]]; then
|
||||
ENABLE_D="on"
|
||||
ENABLE_CPU="on"
|
||||
ENABLE_SERVING="on"
|
||||
elif [[ "X$OPTARG" == "Xcpu" ]]; then
|
||||
ENABLE_CPU="on"
|
||||
else
|
||||
|
@ -207,10 +198,6 @@ checkopts()
|
|||
usage
|
||||
exit 1
|
||||
fi
|
||||
if [[ "X$OPTARG" == "X9.2" ]]; then
|
||||
echo "Unsupported CUDA version 9.2"
|
||||
exit 1
|
||||
fi
|
||||
CUDA_VERSION="$OPTARG"
|
||||
;;
|
||||
P)
|
||||
|
@ -229,29 +216,16 @@ checkopts()
|
|||
echo "enable dump function graph ir"
|
||||
;;
|
||||
z)
|
||||
eval ARG=\$\{$OPTIND\}
|
||||
if [[ -n $ARG && $ARG != -* ]]; then
|
||||
OPTARG=$ARG
|
||||
check_on_off $OPTARG z
|
||||
OPTIND=$((OPTIND + 1))
|
||||
else
|
||||
OPTARG=""
|
||||
fi
|
||||
if [[ "X$OPTARG" == "Xoff" ]]; then
|
||||
COMPILE_MINDDATA="off"
|
||||
fi
|
||||
COMPILE_MINDDATA="on"
|
||||
;;
|
||||
I)
|
||||
COMPILE_LITE="on"
|
||||
COMPILE_PREDICT="on"
|
||||
if [[ "$OPTARG" == "arm64" ]]; then
|
||||
LITE_PLATFORM="arm64"
|
||||
elif [[ "$OPTARG" == "arm32" ]]; then
|
||||
LITE_PLATFORM="arm32"
|
||||
PREDICT_PLATFORM="arm64"
|
||||
elif [[ "$OPTARG" == "x86_64" ]]; then
|
||||
ENABLE_CONVERTER="on"
|
||||
LITE_PLATFORM="x86_64"
|
||||
PREDICT_PLATFORM="x86_64"
|
||||
else
|
||||
echo "-I parameter must be arm64、arm32 or x86_64"
|
||||
echo "-I parameter must be arm64 or x86_64"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
|
@ -259,23 +233,6 @@ checkopts()
|
|||
ENABLE_AKG="on"
|
||||
echo "enable compile with akg"
|
||||
;;
|
||||
s)
|
||||
ENABLE_SERVING="on"
|
||||
echo "enable serving"
|
||||
;;
|
||||
w)
|
||||
ENABLE_ACL="on"
|
||||
echo "enable acl"
|
||||
;;
|
||||
B)
|
||||
check_on_off $OPTARG B
|
||||
ENABLE_DEBUGGER="on"
|
||||
echo "enable debugger"
|
||||
;;
|
||||
E)
|
||||
ENABLE_IBVERBS="on"
|
||||
echo "enable IBVERBS for parameter server"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option ${opt}!"
|
||||
usage
|
||||
|
@ -284,15 +241,9 @@ checkopts()
|
|||
done
|
||||
}
|
||||
checkopts "$@"
|
||||
if [[ "X$ENABLE_GPU" = "Xon" ]] && [[ "X$ENABLE_DUMPE2E" = "Xon" ]]; then
|
||||
ENABLE_DEBUGGER="on"
|
||||
fi
|
||||
echo "---------------- MindSpore: build start ----------------"
|
||||
echo "---------------- mindspore: build start ----------------"
|
||||
mkdir -pv "${BUILD_PATH}/package/mindspore/lib"
|
||||
git submodule update --init graphengine
|
||||
if [[ "X$ENABLE_AKG" = "Xon" ]] && [[ "X$ENABLE_D" = "Xon" || "X$ENABLE_GPU" = "Xon" ]]; then
|
||||
git submodule update --init --recursive akg
|
||||
fi
|
||||
|
||||
build_exit()
|
||||
{
|
||||
|
@ -336,8 +287,7 @@ build_mindspore()
|
|||
if [[ "X$ENABLE_DUMPE2E" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_DUMP_E2E=ON"
|
||||
fi
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_DUMP_IR=${ENABLE_DUMP_IR}"
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_PYTHON=${ENABLE_PYTHON}"
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_DUMP_IR=${ENABLE_DUMP_IR^^}"
|
||||
if [[ "X$ENABLE_MPI" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_MPI=ON"
|
||||
fi
|
||||
|
@ -345,7 +295,7 @@ build_mindspore()
|
|||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_D=ON"
|
||||
fi
|
||||
if [[ "X$ENABLE_GPU" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_GPU=ON -DUSE_CUDA=ON -DCUDA_PATH=$CUDA_PATH -DCUDNN_PATH=$CUDNN_PATH -DMS_REQUIRE_CUDA_VERSION=${CUDA_VERSION}"
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_GPU=ON -DCUDA_PATH=$CUDA_PATH -DCUDNN_PATH=$CUDNN_PATH -DMS_REQUIRE_CUDA_VERSION=${CUDA_VERSION}"
|
||||
fi
|
||||
if [[ "X$ENABLE_CPU" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_CPU=ON"
|
||||
|
@ -356,298 +306,163 @@ build_mindspore()
|
|||
if [[ "X$USE_GLOG" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DUSE_GLOG=ON"
|
||||
fi
|
||||
if [[ "X$ENABLE_AKG" = "Xon" ]] && [[ "X$ENABLE_D" = "Xon" || "X$ENABLE_GPU" = "Xon" ]]; then
|
||||
if [[ "X$ENABLE_AKG" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_AKG=ON"
|
||||
fi
|
||||
if [[ "X$ENABLE_SERVING" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_SERVING=ON"
|
||||
fi
|
||||
if [[ "X$ENABLE_ACL" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_ACL=ON"
|
||||
fi
|
||||
if [[ "X$ENABLE_DEBUGGER" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_DEBUGGER=ON"
|
||||
fi
|
||||
|
||||
if [[ "X$ENABLE_IBVERBS" = "Xon" ]]; then
|
||||
CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_IBVERBS=ON"
|
||||
fi
|
||||
echo "${CMAKE_ARGS}"
|
||||
if [[ "X$INC_BUILD" = "Xoff" ]]; then
|
||||
cmake ${CMAKE_ARGS} ../..
|
||||
fi
|
||||
if [[ -n "$VERBOSE" ]]; then
|
||||
CMAKE_VERBOSE="--verbose"
|
||||
fi
|
||||
if [[ "X$ENABLE_ACL" = "Xon" ]]; then
|
||||
cmake --build . ${CMAKE_VERBOSE} -j$THREAD_NUM
|
||||
else
|
||||
cmake --build . --target package ${CMAKE_VERBOSE} -j$THREAD_NUM
|
||||
make ${VERBOSE} -j$THREAD_NUM
|
||||
if [[ "X$EXECUTE_SETUP" = "Xon" ]]; then
|
||||
make install
|
||||
fi
|
||||
echo "success to build mindspore project!"
|
||||
}
|
||||
|
||||
checkndk() {
|
||||
if [ "${ANDROID_NDK}" ]; then
|
||||
echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
|
||||
else
|
||||
echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r20b/ \e[0m"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
gene_flatbuffer() {
|
||||
FLAT_DIR="${BASEPATH}/mindspore/lite/schema"
|
||||
cd ${FLAT_DIR} && rm -rf "${FLAT_DIR}/inner" && mkdir -p "${FLAT_DIR}/inner"
|
||||
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b
|
||||
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o "${FLAT_DIR}/inner"
|
||||
|
||||
FLAT_DIR="${BASEPATH}/mindspore/lite/tools/converter/parser/tflite"
|
||||
cd ${FLAT_DIR}
|
||||
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o "${FLAT_DIR}/"
|
||||
}
|
||||
|
||||
build_flatbuffer() {
|
||||
cd ${BASEPATH}
|
||||
FLATC="${BASEPATH}"/third_party/flatbuffers/build/flatc
|
||||
if [[ ! -f "${FLATC}" ]]; then
|
||||
git submodule update --init --recursive third_party/flatbuffers
|
||||
cd ${BASEPATH}/third_party/flatbuffers
|
||||
rm -rf build && mkdir -pv build && cd build && cmake .. && make -j$THREAD_NUM
|
||||
gene_flatbuffer
|
||||
fi
|
||||
if [[ "${INC_BUILD}" == "off" ]]; then
|
||||
gene_flatbuffer
|
||||
fi
|
||||
}
|
||||
|
||||
gene_protobuf() {
|
||||
PROTO_SRC_DIR="${BASEPATH}/mindspore/lite/tools/converter/parser/caffe"
|
||||
find ${PROTO_SRC_DIR} -name "*.proto" -print0 | xargs -0 "${PROTOC}" -I"${PROTO_SRC_DIR}" --cpp_out="${PROTO_SRC_DIR}"
|
||||
PROTO_SRC_DIR="${BASEPATH}/mindspore/lite/tools/converter/parser/onnx"
|
||||
find ${PROTO_SRC_DIR} -name "*.proto" -print0 | xargs -0 "${PROTOC}" -I"${PROTO_SRC_DIR}" --cpp_out="${PROTO_SRC_DIR}"
|
||||
}
|
||||
|
||||
build_protobuf() {
|
||||
cd ${BASEPATH}
|
||||
PROTOC="${BASEPATH}"/third_party/protobuf/build/bin/protoc
|
||||
if [[ ! -f "${PROTOC}" ]]; then
|
||||
git submodule update --init --recursive third_party/protobuf
|
||||
cd ${BASEPATH}/third_party/protobuf
|
||||
rm -rf build && mkdir -pv build && ./autogen.sh
|
||||
./configure --prefix=${BASEPATH}/third_party/protobuf/build
|
||||
make clean && make -j$THREAD_NUM && make install
|
||||
gene_protobuf
|
||||
fi
|
||||
if [[ "${INC_BUILD}" == "off" ]]; then
|
||||
gene_protobuf
|
||||
fi
|
||||
}
|
||||
|
||||
build_gtest() {
|
||||
cd ${BASEPATH}
|
||||
git submodule update --init --recursive third_party/googletest
|
||||
}
|
||||
|
||||
gene_clhpp() {
|
||||
CL_SRC_DIR="${BASEPATH}/mindspore/lite/src/runtime/kernel/opencl/cl"
|
||||
for sub_dir in "${CL_SRC_DIR}"/*
|
||||
do
|
||||
data_type="$(basename ${sub_dir})"
|
||||
if [ ! -d ${CL_SRC_DIR}/${data_type} ]; then
|
||||
continue
|
||||
fi
|
||||
cd ${CL_SRC_DIR}/${data_type}
|
||||
rm -rf *.inc
|
||||
echo "$(cd "$(dirname $0)"; pwd)"
|
||||
for file_path in "${CL_SRC_DIR}/${data_type}"/*
|
||||
do
|
||||
file="$(basename ${file_path})"
|
||||
inc_file=`echo ${CL_SRC_DIR}/${data_type}/${file} | sed 's/$/.inc/'`
|
||||
sed 's/^/\"/;s/$/ \\n\" \\/' ${CL_SRC_DIR}/${data_type}/${file} > ${inc_file}
|
||||
kernel_name=`echo ${file} | sed s'/.\{3\}$//'`
|
||||
sed -i "1i\static const char *${kernel_name}_source_${data_type} =\"\\n\" \\" ${inc_file}
|
||||
sed -i '$a\;' ${inc_file}
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
gene_ocl_program() {
|
||||
CL_SRC_DIR="${BASEPATH}/mindspore/lite/src/runtime/kernel/opencl/cl"
|
||||
SPIRV_DIR=build/spirv
|
||||
rm -rf ${SPIRV_DIR}
|
||||
mkdir -pv ${SPIRV_DIR}
|
||||
for sub_dir in "${CL_SRC_DIR}"/*
|
||||
do
|
||||
data_type="$(basename ${sub_dir})"
|
||||
if [ ! -d ${CL_SRC_DIR}/${data_type} ]; then
|
||||
continue
|
||||
fi
|
||||
#echo $(cd "$(dirname $0)"; pwd)
|
||||
for file_path in "${CL_SRC_DIR}/${data_type}"/*
|
||||
do
|
||||
file="$(basename ${file_path})"
|
||||
if [ "${file##*.}" != "cl" ]; then
|
||||
continue
|
||||
fi
|
||||
clang -Xclang -finclude-default-header -cl-std=CL2.0 --target=spir64-unknown-unknown -emit-llvm \
|
||||
-c -O0 -o ${SPIRV_DIR}/${file%.*}.bc ${CL_SRC_DIR}/${data_type}/${file}
|
||||
done
|
||||
done
|
||||
|
||||
bcs=`ls ${SPIRV_DIR}/*.bc`
|
||||
llvm-link ${bcs} -o ${SPIRV_DIR}/program.bc
|
||||
llvm-spirv -o ${SPIRV_DIR}/program.spv ${SPIRV_DIR}/program.bc
|
||||
|
||||
CL_PROGRAM_PATH="${BASEPATH}/mindspore/lite/src/runtime/kernel/opencl/cl/program.inc"
|
||||
echo "#include <vector>" > ${CL_PROGRAM_PATH}
|
||||
echo "std::vector<unsigned char> g_program_binary = {" >> ${CL_PROGRAM_PATH}
|
||||
#hexdump -v -e '16/1 "0x%02x, " "\n"' ${SPIRV_DIR}/program.spv >> ${CL_PROGRAM_PATH}
|
||||
hexdump -v -e '1/1 "0x%02x, "' ${SPIRV_DIR}/program.spv >> ${CL_PROGRAM_PATH}
|
||||
echo "};" >> ${CL_PROGRAM_PATH}
|
||||
echo "Compile SPIRV done"
|
||||
}
|
||||
|
||||
build_opencl() {
|
||||
cd ${BASEPATH}
|
||||
if [[ ! -d "third_party/OpenCL-Headers" ]]; then
|
||||
git submodule update --init third_party/OpenCL-Headers
|
||||
fi
|
||||
if [[ ! -d "third_party/OpenCL-CLHPP" ]]; then
|
||||
git submodule update --init third_party/OpenCL-CLHPP
|
||||
fi
|
||||
if [[ "${OPENCL_OFFLINE_COMPILE}" == "on" ]]; then
|
||||
gene_ocl_program
|
||||
else
|
||||
gene_clhpp
|
||||
fi
|
||||
}
|
||||
|
||||
build_lite()
|
||||
build_predict()
|
||||
{
|
||||
echo "start build mindspore lite project"
|
||||
git submodule update --init --recursive third_party/incubator-tvm
|
||||
echo "start build predict project"
|
||||
|
||||
if [[ "${ENABLE_GPU}" == "on" ]]; then
|
||||
echo "start build opencl"
|
||||
build_opencl
|
||||
fi
|
||||
if [[ "${LITE_PLATFORM}" == "x86_64" ]]; then
|
||||
build_protobuf
|
||||
fi
|
||||
build_flatbuffer
|
||||
build_gtest
|
||||
git submodule update --init --recursive third_party/flatbuffers
|
||||
git submodule update --init --recursive third_party/googletest
|
||||
git submodule update --init --recursive third_party/protobuf
|
||||
|
||||
cd "${BASEPATH}/mindspore/lite"
|
||||
if [[ "${INC_BUILD}" == "off" ]]; then
|
||||
rm -rf build
|
||||
fi
|
||||
mkdir -pv build
|
||||
cd build
|
||||
BUILD_TYPE="Release"
|
||||
if [[ "${DEBUG_MODE}" == "on" ]]; then
|
||||
BUILD_TYPE="Debug"
|
||||
rm -rf "${BASEPATH}/predict/build"
|
||||
mkdir -pv "${BASEPATH}/predict/build"
|
||||
rm -rf "${BASEPATH}/predict/output"
|
||||
mkdir -pv "${BASEPATH}/predict/output"
|
||||
|
||||
if [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
|
||||
if [ "${ANDROID_NDK}" ]; then
|
||||
echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
|
||||
else
|
||||
echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r16b/ \e[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "${LITE_PLATFORM}" == "arm64" ]]; then
|
||||
checkndk
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
|
||||
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="arm64-v8a" -DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" \
|
||||
-DANDROID_STL="c++_shared" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DSUPPORT_TRAIN=${SUPPORT_TRAIN} \
|
||||
-DBUILD_DEVICE=on -DPLATFORM_ARM64=on -DBUILD_CONVERTER=off -DENABLE_NEON=on -DENABLE_FP16="off" \
|
||||
-DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OPENCL_OFFLINE_COMPILE} "${BASEPATH}/mindspore/lite"
|
||||
elif [[ "${LITE_PLATFORM}" == "arm32" ]]; then
|
||||
checkndk
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" -DANDROID_NATIVE_API_LEVEL="19" \
|
||||
-DANDROID_NDK="${ANDROID_NDK}" -DANDROID_ABI="armeabi-v7a" -DANDROID_TOOLCHAIN_NAME="clang" \
|
||||
-DANDROID_STL="c++_shared" -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
|
||||
-DBUILD_DEVICE=on -DPLATFORM_ARM32=on -DENABLE_NEON=on -DSUPPORT_TRAIN=${SUPPORT_TRAIN} -DBUILD_CONVERTER=off \
|
||||
-DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OPENCL_OFFLINE_COMPILE} "${BASEPATH}/mindspore/lite"
|
||||
else
|
||||
cmake -DBUILD_DEVICE=on -DPLATFORM_ARM64=off -DBUILD_CONVERTER=${ENABLE_CONVERTER} -DSUPPORT_TRAIN=${SUPPORT_TRAIN} \
|
||||
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DSUPPORT_GPU=${ENABLE_GPU} -DOFFLINE_COMPILE=${OPENCL_OFFLINE_COMPILE} "${BASEPATH}/mindspore/lite"
|
||||
fi
|
||||
VERBOSE=2 make -j$THREAD_NUM
|
||||
COMPILE_RET=$?
|
||||
#build flatbuf
|
||||
cd "${BASEPATH}/third_party/flatbuffers"
|
||||
rm -rf build && mkdir -p build && cd build && cmake .. && make -j$THREAD_NUM
|
||||
FLATC="${BASEPATH}"/third_party/flatbuffers/build/flatc
|
||||
cd "${BASEPATH}"/predict/schema && mkdir -p "${BASEPATH}"/predict/schema/inner
|
||||
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b
|
||||
find . -name "*.fbs" -print0 | xargs -0 "${FLATC}" -c -b --reflect-types --gen-mutable --reflect-names --gen-object-api -o ${BASEPATH}/predict/schema/inner
|
||||
|
||||
if [[ "${COMPILE_RET}" -ne 0 ]]; then
|
||||
echo "---------------- mindspore lite: build failed ----------------"
|
||||
else
|
||||
mkdir -pv ${BASEPATH}/mindspore/lite/output/
|
||||
if [[ "$LITE_PLATFORM" == "x86_64" ]]; then
|
||||
OUTPUT_DIR=${BASEPATH}/output/MSLite-0.6.0-linux_x86_64
|
||||
rm -rf ${OUTPUT_DIR} && mkdir -p ${OUTPUT_DIR} && cd ${OUTPUT_DIR}
|
||||
mkdir -p ${OUTPUT_DIR}/converter && mkdir -p ${OUTPUT_DIR}/time_profile
|
||||
mkdir -p ${OUTPUT_DIR}/benchmark && mkdir -p ${OUTPUT_DIR}/include && mkdir -p ${OUTPUT_DIR}/lib
|
||||
mkdir -p ${OUTPUT_DIR}/third_party
|
||||
cp ${BASEPATH}/mindspore/lite/build/tools/converter/converter_lite ${OUTPUT_DIR}/converter/
|
||||
cp ${BASEPATH}/mindspore/lite/build/tools/benchmark/benchmark ${OUTPUT_DIR}/benchmark/
|
||||
cp ${BASEPATH}/mindspore/lite/include/*.h ${OUTPUT_DIR}/include/
|
||||
mkdir -p ${OUTPUT_DIR}/include/ir/dtype/
|
||||
cp ${BASEPATH}/mindspore/core/ir/dtype/type_id.h ${OUTPUT_DIR}/include/ir/dtype/
|
||||
mkdir -p ${OUTPUT_DIR}/include/schema/
|
||||
cp ${BASEPATH}/mindspore/lite/schema/*.h ${OUTPUT_DIR}/include/schema/
|
||||
cp ${BASEPATH}/mindspore/lite/build/src/libmindspore-lite.so ${OUTPUT_DIR}/lib/
|
||||
mkdir -p ${OUTPUT_DIR}/third_party/protobuf/lib
|
||||
cp -r ${BASEPATH}/third_party/protobuf/build/include/ ${OUTPUT_DIR}/third_party/protobuf/
|
||||
cp -r ${BASEPATH}/third_party/protobuf/build/lib/libprotobuf.so.19 ${OUTPUT_DIR}/third_party/protobuf/lib/
|
||||
cp -r ${BASEPATH}/third_party/protobuf/build/lib/libprotobuf.so.19.0.0 ${OUTPUT_DIR}/third_party/protobuf/lib/
|
||||
mkdir -p ${OUTPUT_DIR}/third_party/flatbuffers
|
||||
cp -r ${BASEPATH}/third_party/flatbuffers/include/ ${OUTPUT_DIR}/third_party/flatbuffers/
|
||||
cd ..
|
||||
tar -czf MSLite-0.6.0-linux_x86_64.tar.gz MSLite-0.6.0-linux_x86_64/ --warning=no-file-changed
|
||||
sha256sum MSLite-0.6.0-linux_x86_64.tar.gz > MSLite-0.6.0-linux_x86_64.tar.gz.sha256
|
||||
rm -rf MSLite-0.6.0-linux_x86_64/
|
||||
elif [[ "$LITE_PLATFORM" == "arm64" ]]; then
|
||||
OUTPUT_DIR=${BASEPATH}/output/MSLite-0.6.0-linux_arm64
|
||||
rm -rf ${OUTPUT_DIR} && mkdir -p ${OUTPUT_DIR} && cd ${OUTPUT_DIR}
|
||||
mkdir -p ${OUTPUT_DIR}/time_profile && mkdir -p ${OUTPUT_DIR}/benchmark
|
||||
mkdir -p ${OUTPUT_DIR}/include && mkdir -p ${OUTPUT_DIR}/lib
|
||||
mkdir -p ${OUTPUT_DIR}/third_party
|
||||
cp ${BASEPATH}/mindspore/lite/build/tools/benchmark/benchmark ${OUTPUT_DIR}/benchmark/
|
||||
cp ${BASEPATH}/mindspore/lite/include/*.h ${OUTPUT_DIR}/include/
|
||||
mkdir -p ${OUTPUT_DIR}/include/ir/dtype/
|
||||
cp ${BASEPATH}/mindspore/core/ir/dtype/type_id.h ${OUTPUT_DIR}/include/ir/dtype/
|
||||
mkdir -p ${OUTPUT_DIR}/include/schema/
|
||||
cp ${BASEPATH}/mindspore/lite/schema/*.h ${OUTPUT_DIR}/include/schema/
|
||||
cp ${BASEPATH}/mindspore/lite/build/src/libmindspore-lite.so ${OUTPUT_DIR}/lib/
|
||||
mkdir -p ${OUTPUT_DIR}/third_party/flatbuffers
|
||||
cp -r ${BASEPATH}/third_party/flatbuffers/include/ ${OUTPUT_DIR}/third_party/flatbuffers/
|
||||
cd ..
|
||||
tar -czf MSLite-0.6.0-linux_arm64.tar.gz MSLite-0.6.0-linux_arm64/ --warning=no-file-changed
|
||||
sha256sum MSLite-0.6.0-linux_arm64.tar.gz > MSLite-0.6.0-linux_arm64.tar.gz.sha256
|
||||
rm -rf MSLite-0.6.0-linux_arm64/
|
||||
elif [[ "$LITE_PLATFORM" == "arm32" ]]; then
|
||||
OUTPUT_DIR=${BASEPATH}/output/MSLite-0.6.0-linux_arm32
|
||||
rm -rf ${OUTPUT_DIR} && mkdir -p ${OUTPUT_DIR} && cd ${OUTPUT_DIR}
|
||||
mkdir -p ${OUTPUT_DIR}/time_profile && mkdir -p ${OUTPUT_DIR}/benchmark
|
||||
mkdir -p ${OUTPUT_DIR}/include && mkdir -p ${OUTPUT_DIR}/lib
|
||||
mkdir -p ${OUTPUT_DIR}/third_party
|
||||
cp ${BASEPATH}/mindspore/lite/build/tools/benchmark/benchmark ${OUTPUT_DIR}/benchmark/
|
||||
cp ${BASEPATH}/mindspore/lite/include/*.h ${OUTPUT_DIR}/include/
|
||||
mkdir -p ${OUTPUT_DIR}/include/ir/dtype/
|
||||
cp ${BASEPATH}/mindspore/core/ir/dtype/type_id.h ${OUTPUT_DIR}/include/ir/dtype/
|
||||
mkdir -p ${OUTPUT_DIR}/include/schema/
|
||||
cp ${BASEPATH}/mindspore/lite/schema/*.h ${OUTPUT_DIR}/include/schema/
|
||||
cp ${BASEPATH}/mindspore/lite/build/src/libmindspore-lite.so ${OUTPUT_DIR}/lib/
|
||||
mkdir -p ${OUTPUT_DIR}/third_party/flatbuffers
|
||||
cp -r ${BASEPATH}/third_party/flatbuffers/include/ ${OUTPUT_DIR}/third_party/flatbuffers/
|
||||
cd ..
|
||||
tar -czf MSLite-0.6.0-linux_arm32.tar.gz MSLite-0.6.0-linux_arm32/ --warning=no-file-changed
|
||||
sha256sum MSLite-0.6.0-linux_arm32.tar.gz > MSLite-0.6.0-linux_arm32.tar.gz.sha256
|
||||
rm -rf MSLite-0.6.0-linux_arm32/
|
||||
# check LLVM_PATH
|
||||
if [ "${LLVM_PATH}" == "" ]; then
|
||||
echo "Please set LLVM_PATH in env for example export LLVM_PATH=/xxxx/bin/llvm-config"
|
||||
exit
|
||||
fi
|
||||
|
||||
#build tvm
|
||||
tvm_open_source="${BASEPATH}/third_party/incubator-tvm"
|
||||
tvm_kernel_build="${BASEPATH}/predict/module/tvm_kernel"
|
||||
if [ ! -f "${tvm_kernel_build}"/incubator-tvm/build/libtvm.so ]; then
|
||||
rm -fr "${tvm_kernel_build}"/incubator-tvm
|
||||
cp -fr "${tvm_open_source}" "${tvm_kernel_build}"
|
||||
mkdir -p "${tvm_kernel_build}"/incubator-tvm/build
|
||||
patch -d "${tvm_kernel_build}"/incubator-tvm -p1 < "${BASEPATH}"/third_party/patch/predict/0001-RetBugFix-CustomRuntime_v06.patch
|
||||
cp "${tvm_kernel_build}"/lite/src/codegen/llvm/lite_rtfunc_reset.cc "${tvm_kernel_build}"/incubator-tvm/src/codegen/llvm/
|
||||
cp "${tvm_open_source}"/cmake/config.cmake "${tvm_kernel_build}"/incubator-tvm
|
||||
if [ "${LLVM_PATH}" ]; then
|
||||
sed -i "s#set(USE_LLVM .*)#set(USE_LLVM \"${LLVM_PATH}\")#g" "${tvm_kernel_build}"/incubator-tvm/config.cmake
|
||||
else
|
||||
echo "need set LLVM_PATH in env for example export LLVM_PATH=/xxxx/bin/llvm-config"
|
||||
fi
|
||||
echo "---------------- mindspore lite: build success ----------------"
|
||||
cd "${tvm_kernel_build}"/incubator-tvm/build
|
||||
cmake ..
|
||||
make -j$THREAD_NUM
|
||||
else
|
||||
cd "${tvm_kernel_build}"/incubator-tvm/build
|
||||
make -j$THREAD_NUM
|
||||
fi
|
||||
|
||||
#gen op
|
||||
predict_tvm_op_lib_path="${BASEPATH}/predict/module/tvm_kernel/build/lib_x86"
|
||||
predict_platform="x86"
|
||||
if [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
|
||||
predict_tvm_op_lib_path="${BASEPATH}/predict/module/tvm_kernel/build/lib_arm64"
|
||||
predict_platform="arm64"
|
||||
fi
|
||||
|
||||
need_get_libs=true
|
||||
if [ -d "${predict_tvm_op_lib_path}" ]; then
|
||||
file_list=$(ls "${predict_tvm_op_lib_path}")
|
||||
if [ -n "${file_list}" ]; then
|
||||
libstime=$(stat -c %Y "${predict_tvm_op_lib_path}"/* | sort -u | tail -n1)
|
||||
pythontime=$(find "${BASEPATH}"/predict/module/tvm_kernel/lite/python/ -name "*.py" -exec stat -c %Y {} \; |
|
||||
sort -u | tail -n1)
|
||||
if [ "${libstime}" -ge "${pythontime}" ]; then
|
||||
need_get_libs=false
|
||||
else
|
||||
rm -fr "${predict_tvm_op_lib_path}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if $need_get_libs; then
|
||||
PYTHONPATH_OLD=${PYTHONPATH}
|
||||
export PYTHONPATH="${tvm_kernel_build}/incubator-tvm/python:${tvm_kernel_build}/incubator-tvm/topi/python:${tvm_kernel_build}/incubator-tvm/nnvm/python:${tvm_kernel_build}/lite/python:"
|
||||
cd "${BASEPATH}"/predict/module/tvm_kernel/lite/python/at_ops
|
||||
python3 at_gen_strip.py ${predict_platform}
|
||||
export PYTHONPATH=${PYTHONPATH_OLD}
|
||||
fi
|
||||
|
||||
cd "${BASEPATH}/predict/build"
|
||||
if [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
|
||||
cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK}/build/cmake/android.toolchain.cmake" \
|
||||
-DANDROID_NATIVE_API_LEVEL=android-19 -DANDROID_NDK="${ANDROID_NDK}" \
|
||||
-DANDROID_TOOLCHAIN_NAME="aarch64-linux-android-clang" -DANDROID_STL="c++_shared" \
|
||||
-DANDROID_ABI="arm64-v8a" -DENABLE_PREDICT_ARM64=ON -DANDROID_ALLOW_UNDEFINED_SYMBOLS=TRUE ..
|
||||
elif [[ "$PREDICT_PLATFORM" == "x86_64" ]]; then
|
||||
cmake ..
|
||||
fi
|
||||
|
||||
make ${VERBOSE} -j$THREAD_NUM
|
||||
if [[ "$PREDICT_PLATFORM" == "x86_64" ]]; then
|
||||
cd "${BASEPATH}/predict/build/test" && ./run_tests.sh
|
||||
fi
|
||||
|
||||
# copy securec include files
|
||||
mkdir -p "${BASEPATH}/predict/output/include/securec/include"
|
||||
cp "${BASEPATH}"/third_party/securec/include/* "${BASEPATH}"/predict/output/include/securec/include
|
||||
|
||||
cd "${BASEPATH}/predict/output/"
|
||||
if [[ "$PREDICT_PLATFORM" == "x86_64" ]]; then
|
||||
tar -cf MSPredict-0.1.0-linux_x86_64.tar.gz include/ lib/ --warning=no-file-changed
|
||||
elif [[ "$PREDICT_PLATFORM" == "arm64" ]]; then
|
||||
tar -cf MSPredict-0.1.0-linux_aarch64.tar.gz include/ lib/ --warning=no-file-changed
|
||||
fi
|
||||
echo "success to build predict project!"
|
||||
}
|
||||
|
||||
if [[ "X$COMPILE_LITE" = "Xon" ]]; then
|
||||
build_lite
|
||||
if [[ "X$COMPILE_PREDICT" = "Xon" ]]; then
|
||||
build_predict
|
||||
echo "---------------- mindspore: build end ----------------"
|
||||
exit
|
||||
else
|
||||
build_mindspore
|
||||
fi
|
||||
|
||||
if [[ "X$INC_BUILD" = "Xoff" ]]; then
|
||||
if [[ "X$ENABLE_GE" = "Xon" ]]; then
|
||||
bash "${PROJECT_PATH}/package.sh" ge
|
||||
elif [[ "X$ENABLE_GPU" = "Xon" ]]; then
|
||||
bash "${PROJECT_PATH}/package.sh" ms gpu
|
||||
elif [[ "X$ENABLE_D" = "Xon" ]] || [[ "X$ENABLE_CPU" = "Xon" ]]; then
|
||||
bash "${PROJECT_PATH}/package.sh" ms
|
||||
else
|
||||
bash "${PROJECT_PATH}/package.sh" debug
|
||||
fi
|
||||
fi
|
||||
|
||||
cp -rf ${BUILD_PATH}/package/mindspore/lib ${BUILD_PATH}/../mindspore
|
||||
cp -rf ${BUILD_PATH}/package/mindspore/*.so ${BUILD_PATH}/../mindspore
|
||||
|
||||
if [[ -d "${BUILD_PATH}/package/build" ]]; then
|
||||
rm -rf "${BUILD_PATH}/package/build"
|
||||
fi
|
||||
echo "---------------- mindspore: build end ----------------"
|
||||
|
|
|
@ -9,13 +9,12 @@ include(${GE_SOURCE_DIR}/cmake/external_libs/eigen.cmake)
|
|||
include(${GE_SOURCE_DIR}/cmake/external_libs/gtest.cmake)
|
||||
include(${GE_SOURCE_DIR}/cmake/external_libs/protobuf.cmake)
|
||||
include(${GE_SOURCE_DIR}/cmake/external_libs/onnx.cmake)
|
||||
include(${GE_SOURCE_DIR}/cmake/external_libs/securec.cmake)
|
||||
|
||||
# for CPU/GPU mode, find slog from local prebuild
|
||||
# for CPU/GPU mode, find c_sec and slog from local prebuild
|
||||
if (NOT ENABLE_D)
|
||||
set(GE_PREBUILD_PATH ${GE_SOURCE_DIR}/third_party/prebuild/${CMAKE_HOST_SYSTEM_PROCESSOR})
|
||||
find_library(c_sec libc_sec.so ${GE_PREBUILD_PATH})
|
||||
find_library(slog libslog.so ${GE_PREBUILD_PATH})
|
||||
find_library(error_manager liberror_manager.so ${GE_PREBUILD_PATH})
|
||||
elseif (DEFINED ENV{D_LINK_PATH})
|
||||
set(GE_LIB_PATH $ENV{D_LINK_PATH})
|
||||
set(GE_SYS_ARCH "")
|
||||
|
@ -29,6 +28,7 @@ elseif (DEFINED ENV{D_LINK_PATH})
|
|||
message(FATAL_ERROR "Running on a unsupported architecture: ${SYSTEM_TYPE}, build terminated")
|
||||
endif()
|
||||
set(GE_LIB_PATH ${GE_LIB_PATH}/${GE_SYS_ARCH})
|
||||
find_library(c_sec libc_sec.so ${GE_LIB_PATH})
|
||||
find_library(slog libslog.so ${GE_LIB_PATH})
|
||||
find_library(mmpa libmmpa.so ${GE_LIB_PATH})
|
||||
find_library(runtime libruntime.so ${GE_LIB_PATH})
|
||||
|
@ -37,26 +37,20 @@ elseif (DEFINED ENV{D_LINK_PATH})
|
|||
find_library(hccl libhccl.so ${GE_LIB_PATH})
|
||||
find_library(cce libcce.so ${GE_LIB_PATH})
|
||||
find_library(resource libresource.so ${GE_LIB_PATH})
|
||||
find_library(error_manager liberror_manager.so ${GE_LIB_PATH})
|
||||
else()
|
||||
# Ascend mode
|
||||
if(DEFINED ENV{ASCEND_CUSTOM_PATH})
|
||||
set(ASCEND_PATH $ENV{ASCEND_CUSTOM_PATH})
|
||||
else()
|
||||
set(ASCEND_PATH /usr/local/Ascend)
|
||||
endif()
|
||||
set(ASCEND_DRIVER_PATH ${ASCEND_PATH}/driver/lib64/common)
|
||||
set(ASCEND_RUNTIME_PATH ${ASCEND_PATH}/fwkacllib/lib64)
|
||||
find_library(c_sec libc_sec.so ${ASCEND_DRIVER_PATH})
|
||||
find_library(slog libslog.so ${ASCEND_DRIVER_PATH})
|
||||
find_library(mmpa libmmpa.so ${ASCEND_DRIVER_PATH})
|
||||
find_library(cce libcce.so ${ASCEND_RUNTIME_PATH})
|
||||
find_library(hccl libhccl.so ${ASCEND_RUNTIME_PATH})
|
||||
find_library(runtime libruntime.so ${ASCEND_RUNTIME_PATH})
|
||||
find_library(msprof libmsprof.so ${ASCEND_RUNTIME_PATH})
|
||||
find_library(register libregister.so ${ASCEND_RUNTIME_PATH})
|
||||
find_library(resource libresource.so ${ASCEND_RUNTIME_PATH})
|
||||
find_library(error_manager liberror_manager.so ${ASCEND_RUNTIME_PATH})
|
||||
set(HIAI_INSTALLED_DIR /usr/local/HiAI)
|
||||
set(HIAI_DRIVER_DIR ${HIAI_INSTALLED_DIR}/driver/lib64)
|
||||
set(HIAI_RUNTIME_DIR ${HIAI_INSTALLED_DIR}/runtime/lib64)
|
||||
find_library(c_sec libc_sec.so ${HIAI_DRIVER_DIR})
|
||||
find_library(slog libslog.so ${HIAI_DRIVER_DIR})
|
||||
find_library(mmpa libmmpa.so ${HIAI_DRIVER_DIR})
|
||||
|
||||
find_library(cce libcce.so ${HIAI_RUNTIME_DIR})
|
||||
find_library(hccl libhccl.so ${HIAI_RUNTIME_DIR})
|
||||
find_library(runtime libruntime.so ${HIAI_RUNTIME_DIR})
|
||||
find_library(msprof libmsprof.so ${HIAI_RUNTIME_DIR})
|
||||
find_library(register libregister.so ${HIAI_RUNTIME_DIR})
|
||||
find_library(resource libresource.so ${HIAI_RUNTIME_DIR})
|
||||
endif()
|
||||
|
||||
# compile libraries from following directories
|
||||
|
@ -66,7 +60,7 @@ set(_ge_tmp_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
|
|||
string(REPLACE " -Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
# force __FILE__ to show relative path of file, from source directory
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D__FILE__='\"$(subst $(realpath ${CMAKE_SOURCE_DIR})/,,$(abspath $<))\"' -Wno-builtin-macro-redefined")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D__FILE__='\"$(subst ${CMAKE_SOURCE_DIR}/,,$(abspath $<))\"' -Wno-builtin-macro-redefined")
|
||||
add_subdirectory(${GE_SOURCE_DIR}/src/common/graph)
|
||||
if(ENABLE_D)
|
||||
add_subdirectory(${GE_SOURCE_DIR}/src/ge/common)
|
||||
|
|
|
@ -9,9 +9,6 @@ if (NOT TARGET securec)
|
|||
set(_ms_tmp_CMAKE_C_FLAGS ${CMAKE_C_FLAGS})
|
||||
|
||||
set(CMAKE_C_FLAGS "${SECURE_CXX_FLAGS}")
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
add_compile_definitions(SECUREC_ONLY_DECLARE_MEMSET)
|
||||
endif()
|
||||
add_subdirectory(${CMAKE_CURRENT_LIST_DIR}/../third_party/securec ${CMAKE_BINARY_DIR}/securec)
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE ${_ms_tmp_CMAKE_POSITION_INDEPENDENT_CODE})
|
||||
set(CMAKE_C_FLAGS ${_ms_tmp_CMAKE_C_FLAGS})
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
mindspore_add_pkg(absl
|
||||
VER 20200225.2
|
||||
LIBS absl_strings absl_throw_delegate absl_raw_logging_internal absl_int128 absl_bad_optional_access
|
||||
URL https://github.com/abseil/abseil-cpp/archive/20200225.2.tar.gz
|
||||
MD5 73f2b6e72f1599a9139170c29482ddc4
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE:STRING=Release -DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=TRUE)
|
||||
|
||||
include_directories(${absl_INC})
|
||||
|
||||
add_library(mindspore::absl_strings ALIAS absl::absl_strings)
|
||||
add_library(mindspore::absl_throw_delegate ALIAS absl::absl_throw_delegate)
|
||||
add_library(mindspore::absl_raw_logging_internal ALIAS absl::absl_raw_logging_internal)
|
||||
add_library(mindspore::absl_int128 ALIAS absl::absl_int128)
|
||||
add_library(mindspore::absl_bad_optional_access ALIAS absl::absl_bad_optional_access)
|
|
@ -1,12 +0,0 @@
|
|||
mindspore_add_pkg(c-ares
|
||||
VER 1.15.0
|
||||
LIBS cares
|
||||
URL https://github.com/c-ares/c-ares/releases/download/cares-1_15_0/c-ares-1.15.0.tar.gz
|
||||
MD5 d2391da274653f7643270623e822dff7
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DCARES_SHARED:BOOL=OFF
|
||||
-DCARES_STATIC:BOOL=ON
|
||||
-DCARES_STATIC_PIC:BOOL=ON)
|
||||
|
||||
include_directories(${c-ares_INC})
|
||||
add_library(mindspore::cares ALIAS c-ares::cares)
|
|
@ -1,11 +0,0 @@
|
|||
set(cppjieba_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
set(cppjieba_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(cppjieba
|
||||
VER 5.0.3
|
||||
HEAD_ONLY ./
|
||||
URL https://github.com/yanyiwu/cppjieba/archive/v5.0.3.tar.gz
|
||||
MD5 b8b3f7a73032c9ce9daafa4f67196c8c
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/cppjieba/cppjieba.patch001)
|
||||
include_directories(${cppjieba_INC}include)
|
||||
include_directories(${cppjieba_INC}deps)
|
||||
add_library(mindspore::cppjieba ALIAS cppjieba)
|
|
@ -3,5 +3,3 @@ mindspore_add_pkg(dlpack
|
|||
HEAD_ONLY ./
|
||||
URL https://github.com/dmlc/dlpack/archive/0acb731e0e43d15deee27b66f10e4c5b4e667913.zip
|
||||
MD5 6b8093f17ad4e830d3c63eb3171c4b45)
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
mindspore_add_pkg(dmlc-core
|
||||
mindspore_add_pkg(dmlc_core
|
||||
VER 0.3
|
||||
HEAD_ONLY ./
|
||||
URL https://github.com/dmlc/dmlc-core/archive/808f485387f9a03f78fa9f1159f387d0d91b7a28.zip
|
||||
MD5 ea36f94c57752bf40fb02dfc362f1ed9)
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
set(flatbuffers_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
set(flatbuffers_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
if (WIN32)
|
||||
set(flatbuffers_USE_STATIC_LIBS ON)
|
||||
endif()
|
||||
mindspore_add_pkg(flatbuffers
|
||||
VER 1.11.0
|
||||
LIBS flatbuffers
|
||||
|
@ -11,7 +8,6 @@ mindspore_add_pkg(flatbuffers
|
|||
MD5 02c64880acb89dbd57eebacfd67200d8
|
||||
CMAKE_OPTION -DFLATBUFFERS_BUILD_TESTS=OFF )
|
||||
|
||||
include_directories(${flatbuffers_INC})
|
||||
add_library(mindspore::flatbuffers ALIAS flatbuffers::flatbuffers)
|
||||
add_executable(mindspore::flatc ALIAS flatbuffers::flatc)
|
||||
include_directories(${flatbuffers_INC})
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
set(glog_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2 ${SECURE_CXX_FLAGS} -D_GLIBCXX_USE_CXX11_ABI=0")
|
||||
set(glog_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2 ${SECURE_CXX_FLAGS}")
|
||||
set(glog_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(glog
|
||||
VER 0.4.0
|
||||
|
|
|
@ -1,110 +0,0 @@
|
|||
set(grpc_USE_STATIC_LIBS ON)
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(grpc_CXXFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -fPIC -fvisibility=hidden -D_FORTIFY_SOURCE=2 -O2")
|
||||
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||
set(grpc_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -fvisibility=hidden -D_FORTIFY_SOURCE=2 -O2")
|
||||
else()
|
||||
set(grpc_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -fvisibility=hidden -D_FORTIFY_SOURCE=2 -D_GLIBCXX_USE_CXX11_ABI=0 -O2")
|
||||
endif()
|
||||
|
||||
set(grpc_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
|
||||
|
||||
if (EXISTS ${protobuf_ROOT}/lib64)
|
||||
set(_FINDPACKAGE_PROTOBUF_CONFIG_DIR "${protobuf_ROOT}/lib64/cmake/protobuf")
|
||||
else()
|
||||
set(_FINDPACKAGE_PROTOBUF_CONFIG_DIR "${protobuf_ROOT}/lib/cmake/protobuf")
|
||||
endif()
|
||||
message("grpc using Protobuf_DIR : " ${_FINDPACKAGE_PROTOBUF_CONFIG_DIR})
|
||||
|
||||
if (EXISTS ${absl_ROOT}/lib64)
|
||||
set(_FINDPACKAGE_ABSL_CONFIG_DIR "${absl_ROOT}/lib64/cmake/absl")
|
||||
else()
|
||||
set(_FINDPACKAGE_ABSL_CONFIG_DIR "${absl_ROOT}/lib/cmake/absl")
|
||||
endif()
|
||||
message("grpc using absl_DIR : " ${_FINDPACKAGE_ABSL_CONFIG_DIR})
|
||||
|
||||
set(_CMAKE_ARGS_OPENSSL_ROOT_DIR "")
|
||||
if (OPENSSL_ROOT_DIR)
|
||||
set(_CMAKE_ARGS_OPENSSL_ROOT_DIR "-DOPENSSL_ROOT_DIR:PATH=${OPENSSL_ROOT_DIR}")
|
||||
endif()
|
||||
|
||||
mindspore_add_pkg(grpc
|
||||
VER 1.27.3
|
||||
LIBS grpc++ grpc gpr upb address_sorting
|
||||
EXE grpc_cpp_plugin
|
||||
URL https://github.com/grpc/grpc/archive/v1.27.3.tar.gz
|
||||
MD5 0c6c3fc8682d4262dd0e5e6fabe1a7e2
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE:STRING=Release
|
||||
-DgRPC_INSTALL:BOOL=ON
|
||||
-DgRPC_BUILD_TESTS:BOOL=OFF
|
||||
-DgRPC_PROTOBUF_PROVIDER:STRING=package
|
||||
-DgRPC_PROTOBUF_PACKAGE_TYPE:STRING=CONFIG
|
||||
-DProtobuf_DIR:PATH=${_FINDPACKAGE_PROTOBUF_CONFIG_DIR}
|
||||
-DgRPC_ZLIB_PROVIDER:STRING=package
|
||||
-DZLIB_ROOT:PATH=${zlib_ROOT}
|
||||
-DgRPC_ABSL_PROVIDER:STRING=package
|
||||
-Dabsl_DIR:PATH=${_FINDPACKAGE_ABSL_CONFIG_DIR}
|
||||
-DgRPC_CARES_PROVIDER:STRING=package
|
||||
-Dc-ares_DIR:PATH=${c-ares_ROOT}/lib/cmake/c-ares
|
||||
-DgRPC_SSL_PROVIDER:STRING=package
|
||||
${_CMAKE_ARGS_OPENSSL_ROOT_DIR}
|
||||
)
|
||||
|
||||
include_directories(${grpc_INC})
|
||||
|
||||
add_library(mindspore::grpc++ ALIAS grpc::grpc++)
|
||||
|
||||
# link other grpc libs
|
||||
target_link_libraries(grpc::grpc++ INTERFACE grpc::grpc grpc::gpr grpc::upb grpc::address_sorting)
|
||||
|
||||
# link built dependencies
|
||||
target_link_libraries(grpc::grpc++ INTERFACE mindspore::z)
|
||||
target_link_libraries(grpc::grpc++ INTERFACE mindspore::cares)
|
||||
target_link_libraries(grpc::grpc++ INTERFACE mindspore::absl_strings mindspore::absl_throw_delegate
|
||||
mindspore::absl_raw_logging_internal mindspore::absl_int128 mindspore::absl_bad_optional_access)
|
||||
|
||||
# link system openssl
|
||||
find_package(OpenSSL REQUIRED)
|
||||
target_link_libraries(grpc::grpc++ INTERFACE OpenSSL::SSL OpenSSL::Crypto)
|
||||
|
||||
|
||||
function(ms_grpc_generate c_var h_var)
|
||||
if(NOT ARGN)
|
||||
message(SEND_ERROR "Error: ms_grpc_generate() called without any proto files")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(${c_var})
|
||||
set(${h_var})
|
||||
|
||||
foreach(file ${ARGN})
|
||||
get_filename_component(abs_file ${file} ABSOLUTE)
|
||||
get_filename_component(file_name ${file} NAME_WE)
|
||||
get_filename_component(file_dir ${abs_file} PATH)
|
||||
file(RELATIVE_PATH rel_path ${CMAKE_CURRENT_SOURCE_DIR} ${file_dir})
|
||||
|
||||
list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc")
|
||||
list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h")
|
||||
list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.grpc.pb.cc")
|
||||
list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.grpc.pb.h")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}.grpc.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}.grpc.pb.h"
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/proto"
|
||||
COMMAND protobuf::protoc --version
|
||||
COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/proto
|
||||
--grpc_out=${CMAKE_BINARY_DIR}/proto --plugin=protoc-gen-grpc=$<TARGET_FILE:grpc::grpc_cpp_plugin> ${abs_file}
|
||||
DEPENDS protobuf::protoc grpc::grpc_cpp_plugin ${abs_file}
|
||||
COMMENT "Running C++ gRPC compiler on ${file}" VERBATIM)
|
||||
endforeach()
|
||||
|
||||
set_source_files_properties(${${c_var}} ${${h_var}} PROPERTIES GENERATED TRUE)
|
||||
set(${c_var} ${${c_var}} PARENT_SCOPE)
|
||||
set(${h_var} ${${h_var}} PARENT_SCOPE)
|
||||
|
||||
endfunction()
|
|
@ -1,4 +1,4 @@
|
|||
set(gtest_CXXFLAGS "-D_FORTIFY_SOURCE=2 -D_GLIBCXX_USE_CXX11_ABI=0 -O2")
|
||||
set(gtest_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
set(gtest_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(gtest
|
||||
VER 1.8.0
|
||||
|
@ -9,5 +9,5 @@ mindspore_add_pkg(gtest
|
|||
-DCMAKE_MACOSX_RPATH=TRUE -Dgtest_disable_pthreads=ON)
|
||||
include_directories(${gtest_INC})
|
||||
add_library(mindspore::gtest ALIAS gtest::gtest)
|
||||
file(COPY ${gtest_LIBPATH}/libgtest${CMAKE_SHARED_LIBRARY_SUFFIX} DESTINATION ${CMAKE_BINARY_DIR}/googletest/googlemock/gtest)
|
||||
file(COPY ${gtest_LIBPATH}/libgtest_main${CMAKE_SHARED_LIBRARY_SUFFIX} DESTINATION ${CMAKE_BINARY_DIR}/googletest/googlemock/gtest)
|
||||
file(COPY ${gtest_LIBPATH}/libgtest.so DESTINATION ${CMAKE_BINARY_DIR}/googletest/googlemock/gtest)
|
||||
file(COPY ${gtest_LIBPATH}/libgtest_main.so DESTINATION ${CMAKE_BINARY_DIR}/googletest/googlemock/gtest)
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
set(LIB_ICU_COMMON icuuc)
|
||||
set(LIB_ICU_DATA icudata)
|
||||
set(LIB_ICU_I18N icui18n)
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
message("icu4c thirdparty do not support windows currently.")
|
||||
else()
|
||||
mindspore_add_pkg(icu4c
|
||||
VER 67.1
|
||||
LIBS ${LIB_ICU_COMMON} ${LIB_ICU_DATA} ${LIB_ICU_I18N}
|
||||
URL https://github.com/unicode-org/icu/archive/release-67-1.tar.gz
|
||||
MD5 0c2662a2b0bc80b0eb56495205247c8f
|
||||
CONFIGURE_COMMAND ${CMAKE_SOURCE_DIR}/scripts/build_icu4c.sh
|
||||
)
|
||||
include_directories(${icu4c_INC})
|
||||
add_library(mindspore::icuuc ALIAS icu4c::${LIB_ICU_COMMON})
|
||||
add_library(mindspore::icudata ALIAS icu4c::${LIB_ICU_DATA})
|
||||
add_library(mindspore::icui18n ALIAS icu4c::${LIB_ICU_I18N})
|
||||
add_definitions(-D ENABLE_ICU4C)
|
||||
endif()
|
|
@ -1,10 +1,6 @@
|
|||
set(jpeg_turbo_USE_STATIC_LIBS ON)
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(jpeg_turbo_CFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
else()
|
||||
set(jpeg_turbo_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
endif()
|
||||
|
||||
set(jpeg_turbo_USE_STATIC_LIBS ON)
|
||||
set(jpeg_turbo_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(jpeg_turbo_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
mindspore_add_pkg(jpeg_turbo
|
||||
VER 2.0.4
|
||||
|
@ -12,7 +8,6 @@ mindspore_add_pkg(jpeg_turbo
|
|||
URL https://github.com/libjpeg-turbo/libjpeg-turbo/archive/2.0.4.tar.gz
|
||||
MD5 44c43e4a9fb352f47090804529317c88
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release -DCMAKE_SKIP_RPATH=TRUE
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/jpeg_turbo/jpeg_turbo.patch001
|
||||
)
|
||||
include_directories(${jpeg_turbo_INC})
|
||||
add_library(mindspore::jpeg_turbo ALIAS jpeg_turbo::jpeg)
|
||||
|
|
|
@ -1,18 +1,8 @@
|
|||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(tiff_CXXFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -Wno-unused-result \
|
||||
-Wno-unused-but-set-variable -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(tiff_CFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -Wno-unused-result \
|
||||
-Wno-unused-but-set-variable -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
else()
|
||||
set(tiff_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -Wno-unused-result \
|
||||
-Wno-unused-but-set-variable -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(tiff_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -Wno-unused-result \
|
||||
-Wno-unused-but-set-variable -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||
set(tiff_CFLAGS "${tiff_CFLAGS} -Wno-int-to-pointer-cast -Wno-implicit-fallthrough -Wno-pointer-to-int-cast")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(tiff_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -Wno-unused-result \
|
||||
-Wno-unused-but-set-variable -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(tiff_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -Wno-unused-result \
|
||||
-Wno-unused-but-set-variable -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(tiff_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
|
||||
mindspore_add_pkg(tiff
|
||||
|
|
|
@ -1,22 +1,11 @@
|
|||
set(onednn_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
set(onednn_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
mindspore_add_pkg(onednn
|
||||
set(mkl_dnn_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
set(mkl_dnn_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(mkl_dnn
|
||||
VER 1.1.1
|
||||
LIBS dnnl mkldnn
|
||||
HEAD_ONLY ./include
|
||||
RELEASE on
|
||||
URL https://github.com/oneapi-src/oneDNN/releases/download/v1.1.1/dnnl_win_1.1.1_cpu_vcomp.zip
|
||||
MD5 ecaab9ed549643067699c80e5cea1c23)
|
||||
else()
|
||||
mindspore_add_pkg(onednn
|
||||
VER 1.1.2
|
||||
LIBS dnnl mkldnn
|
||||
URL https://github.com/oneapi-src/oneDNN/archive/v1.1.2.tar.gz
|
||||
MD5 ab40d52230f3ad1d7a6f06ce0f6bc17a
|
||||
URL https://github.com/intel/mkl-dnn/archive/v1.1.1.tar.gz
|
||||
MD5 d6a422b00459600bdc22242590953f38
|
||||
CMAKE_OPTION -DDNNL_ARCH_OPT_FLAGS='' -DDNNL_CPU_RUNTIME='SEQ' -DDNNL_BUILD_EXAMPLES=OFF -DDNNL_BUILD_TESTS=OFF)
|
||||
endif()
|
||||
|
||||
include_directories(${onednn_INC})
|
||||
add_library(mindspore::dnnl ALIAS onednn::dnnl)
|
||||
add_library(mindspore::mkldnn ALIAS onednn::mkldnn)
|
||||
include_directories(${mkl_dnn_INC})
|
||||
add_library(mindspore::dnnl ALIAS mkl_dnn::dnnl)
|
||||
add_library(mindspore::mkldnn ALIAS mkl_dnn::mkldnn)
|
||||
|
|
|
@ -1,76 +1,31 @@
|
|||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(opencv_CXXFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_CFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_LDFLAGS "-Wl")
|
||||
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||
set(opencv_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_CXXFLAGS "${opencv_CXXFLAGS} -Wno-attributes -Wno-unknown-pragmas")
|
||||
set(opencv_CXXFLAGS "${opencv_CXXFLAGS} -Wno-unused-value -Wno-implicit-fallthrough")
|
||||
else()
|
||||
set(opencv_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -D_GLIBCXX_USE_CXX11_ABI=0 -O2")
|
||||
set(opencv_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
mindspore_add_pkg(opencv
|
||||
VER 4.2.0
|
||||
LIBS libopencv_core420.dll.a libopencv_imgcodecs420.dll.a libopencv_imgproc420.dll.a
|
||||
LIB_PATH x64/mingw/lib
|
||||
URL https://github.com/opencv/opencv/archive/4.2.0.tar.gz
|
||||
MD5 e8cb208ce2723481408b604b480183b6
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release -DWITH_PROTOBUF=OFF -DWITH_WEBP=OFF -DWITH_IPP=OFF -DWITH_ADE=OFF
|
||||
-DBUILD_ZLIB=ON
|
||||
-DBUILD_JPEG=ON
|
||||
-DBUILD_PNG=ON
|
||||
-DBUILD_OPENEXR=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_PERF_TESTS=OFF
|
||||
-DBUILD_opencv_apps=OFF
|
||||
-DCMAKE_SKIP_RPATH=TRUE
|
||||
-DBUILD_opencv_python3=OFF
|
||||
-DBUILD_opencv_videoio=OFF
|
||||
-DWITH_FFMPEG=OFF
|
||||
-DWITH_TIFF=ON
|
||||
-DBUILD_TIFF=OFF
|
||||
-DWITH_JASPER=OFF
|
||||
-DBUILD_JASPER=OFF
|
||||
-DTIFF_INCLUDE_DIR=${tiff_INC}
|
||||
-DTIFF_LIBRARY=${tiff_LIB})
|
||||
else()
|
||||
mindspore_add_pkg(opencv
|
||||
VER 4.2.0
|
||||
LIBS opencv_core opencv_imgcodecs opencv_imgproc
|
||||
URL https://github.com/opencv/opencv/archive/4.2.0.tar.gz
|
||||
MD5 e8cb208ce2723481408b604b480183b6
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release -DWITH_PROTOBUF=OFF -DWITH_WEBP=OFF -DWITH_IPP=OFF -DWITH_ADE=OFF
|
||||
-DBUILD_ZLIB=ON
|
||||
-DBUILD_JPEG=ON
|
||||
-DBUILD_PNG=ON
|
||||
-DBUILD_OPENEXR=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_PERF_TESTS=OFF
|
||||
-DBUILD_opencv_apps=OFF
|
||||
-DCMAKE_SKIP_RPATH=TRUE
|
||||
-DBUILD_opencv_python3=OFF
|
||||
-DWITH_FFMPEG=OFF
|
||||
-DWITH_TIFF=ON
|
||||
-DBUILD_TIFF=OFF
|
||||
-DWITH_JASPER=OFF
|
||||
-DBUILD_JASPER=OFF
|
||||
-DTIFF_INCLUDE_DIR=${tiff_INC}
|
||||
-DTIFF_LIBRARY=${tiff_LIB})
|
||||
endif()
|
||||
set(opencv_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(opencv_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
|
||||
if (WIN32)
|
||||
include_directories(${opencv_INC})
|
||||
add_library(mindspore::opencv_core ALIAS opencv::libopencv_core420.dll.a)
|
||||
add_library(mindspore::opencv_imgcodecs ALIAS opencv::libopencv_imgcodecs420.dll.a)
|
||||
add_library(mindspore::opencv_imgproc ALIAS opencv::libopencv_imgproc420.dll.a)
|
||||
else()
|
||||
include_directories(${opencv_INC}/opencv4)
|
||||
add_library(mindspore::opencv_core ALIAS opencv::opencv_core)
|
||||
add_library(mindspore::opencv_imgcodecs ALIAS opencv::opencv_imgcodecs)
|
||||
add_library(mindspore::opencv_imgproc ALIAS opencv::opencv_imgproc)
|
||||
endif()
|
||||
mindspore_add_pkg(opencv
|
||||
VER 4.2.0
|
||||
LIBS opencv_core opencv_imgcodecs opencv_imgproc
|
||||
URL https://github.com/opencv/opencv/archive/4.2.0.tar.gz
|
||||
MD5 e8cb208ce2723481408b604b480183b6
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release -DWITH_PROTOBUF=OFF -DWITH_WEBP=OFF -DWITH_IPP=OFF -DWITH_ADE=OFF
|
||||
-DBUILD_ZLIB=ON
|
||||
-DBUILD_JPEG=ON
|
||||
-DBUILD_PNG=ON
|
||||
-DBUILD_OPENEXR=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_PERF_TESTS=OFF
|
||||
-DBUILD_opencv_apps=OFF
|
||||
-DCMAKE_SKIP_RPATH=TRUE
|
||||
-DBUILD_opencv_python3=OFF
|
||||
-DWITH_FFMPEG=OFF
|
||||
-DWITH_TIFF=ON
|
||||
-DBUILD_TIFF=OFF
|
||||
-DWITH_JASPER=OFF
|
||||
-DBUILD_JASPER=OFF
|
||||
-DTIFF_INCLUDE_DIR=${tiff_INC}
|
||||
-DTIFF_LIBRARY=${tiff_LIB})
|
||||
include_directories(${opencv_INC}/opencv4)
|
||||
add_library(mindspore::opencv_core ALIAS opencv::opencv_core)
|
||||
add_library(mindspore::opencv_imgcodecs ALIAS opencv::opencv_imgcodecs)
|
||||
add_library(mindspore::opencv_imgproc ALIAS opencv::opencv_imgproc)
|
||||
|
|
|
@ -1,30 +1,22 @@
|
|||
set(protobuf_USE_STATIC_LIBS ON)
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(protobuf_CXXFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -fPIC -fvisibility=hidden -D_FORTIFY_SOURCE=2 -O2")
|
||||
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Windows")
|
||||
set(protobuf_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -fvisibility=hidden -D_FORTIFY_SOURCE=2 -O2")
|
||||
else()
|
||||
set(protobuf_CXXFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -fvisibility=hidden -D_FORTIFY_SOURCE=2 -D_GLIBCXX_USE_CXX11_ABI=0 -O2")
|
||||
endif()
|
||||
|
||||
set(protobuf_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
set(_ms_tmp_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
|
||||
set(CMAKE_CXX_FLAGS ${_ms_tmp_CMAKE_CXX_FLAGS})
|
||||
string(REPLACE " -Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
|
||||
mindspore_add_pkg(protobuf
|
||||
VER 3.8.0
|
||||
LIBS protobuf
|
||||
EXE protoc
|
||||
HEAD_ONLY ./
|
||||
URL https://github.com/protocolbuffers/protobuf/archive/v3.8.0.tar.gz
|
||||
MD5 3d9e32700639618a4d2d342c99d4507a
|
||||
CMAKE_PATH cmake/
|
||||
CMAKE_OPTION -Dprotobuf_BUILD_TESTS=OFF -Dprotobuf_BUILD_SHARED_LIBS=OFF)
|
||||
MD5 3d9e32700639618a4d2d342c99d4507a)
|
||||
|
||||
include_directories(${protobuf_INC})
|
||||
add_library(mindspore::protobuf ALIAS protobuf::protobuf)
|
||||
set(CMAKE_CXX_FLAGS ${_ms_tmp_CMAKE_CXX_FLAGS})
|
||||
set(protobuf_BUILD_TESTS OFF CACHE BOOL "Disable protobuf test")
|
||||
set(protobuf_BUILD_SHARED_LIBS OFF CACHE BOOL "Gen shared library")
|
||||
set(_ms_tmp_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS})
|
||||
|
||||
string(REPLACE " -Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
add_subdirectory(${protobuf_DIRPATH}/cmake ${protobuf_DIRPATH}/build)
|
||||
|
||||
set(CMAKE_CXX_FLAGS ${_ms_tmp_CMAKE_CXX_FLAGS})
|
||||
|
||||
set(PROTOBUF_LIBRARY protobuf::libprotobuf)
|
||||
include_directories(${protobuf_DIRPATH}/src)
|
||||
add_library(mindspore::protobuf ALIAS libprotobuf)
|
||||
|
||||
function(ms_protobuf_generate c_var h_var)
|
||||
if(NOT ARGN)
|
||||
|
@ -41,17 +33,17 @@ function(ms_protobuf_generate c_var h_var)
|
|||
get_filename_component(file_dir ${abs_file} PATH)
|
||||
file(RELATIVE_PATH rel_path ${CMAKE_CURRENT_SOURCE_DIR} ${file_dir})
|
||||
|
||||
list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc")
|
||||
list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h")
|
||||
list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.cc")
|
||||
list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.h")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h"
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.h"
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/proto"
|
||||
COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/${rel_path}"
|
||||
COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/${rel_path} ${abs_file}
|
||||
DEPENDS protobuf::protoc ${abs_file}
|
||||
COMMENT "Running C++ protocol buffer compiler on ${file}" VERBATIM)
|
||||
COMMENT "Running C++ protocol buffer compiler on ${file}" VERBATIM )
|
||||
endforeach()
|
||||
|
||||
set_source_files_properties(${${c_var}} ${${h_var}} PROPERTIES GENERATED TRUE)
|
||||
|
@ -74,40 +66,28 @@ function(ms_protobuf_generate_py c_var h_var py_var)
|
|||
get_filename_component(abs_file ${file} ABSOLUTE)
|
||||
get_filename_component(file_name ${file} NAME_WE)
|
||||
get_filename_component(file_dir ${abs_file} PATH)
|
||||
file(RELATIVE_PATH rel_path ${CMAKE_CURRENT_SOURCE_DIR} ${file_dir})
|
||||
|
||||
list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc")
|
||||
list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h")
|
||||
list(APPEND ${py_var} "${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py")
|
||||
if (WIN32)
|
||||
add_custom_command(
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py"
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/proto"
|
||||
COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND protobuf::protoc -I${file_dir} --python_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND protobuf::protoc -I${file_dir} --python_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND perl -pi.bak -e "s/import (.+_pb2.*)/from . import \\1/" "${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py"
|
||||
COMMAND ${CMAKE_COMMAND} -E copy "${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py" "${PROJECT_SOURCE_DIR}/mindspore/train/"
|
||||
DEPENDS protobuf::protoc ${abs_file}
|
||||
COMMENT "Running C++ protocol buffer compiler on ${file}" VERBATIM )
|
||||
else()
|
||||
add_custom_command(
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/proto/${file_name}.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}.pb.h"
|
||||
"${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py"
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/proto"
|
||||
COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND protobuf::protoc -I${file_dir} --python_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND protobuf::protoc -I${file_dir} --python_out=${CMAKE_BINARY_DIR}/proto ${abs_file}
|
||||
COMMAND perl -pi -e "s/import (.+_pb2.*)/from . import \\1/" "${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py"
|
||||
COMMAND cp "${CMAKE_BINARY_DIR}/proto/${file_name}_pb2.py" "${PROJECT_SOURCE_DIR}/mindspore/train/"
|
||||
DEPENDS protobuf::protoc ${abs_file}
|
||||
COMMENT "Running C++ protocol buffer compiler on ${file}" VERBATIM)
|
||||
endif()
|
||||
|
||||
list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.cc")
|
||||
list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.h")
|
||||
list(APPEND ${py_var} "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}_pb2.py")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.cc"
|
||||
"${CMAKE_BINARY_DIR}/${rel_path}/${file_name}.pb.h"
|
||||
"${CMAKE_BINARY_DIR}/${rel_path}/${file_name}_pb2.py"
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/${rel_path}"
|
||||
COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/${rel_path} ${abs_file}
|
||||
COMMAND protobuf::protoc -I${file_dir} --python_out=${CMAKE_BINARY_DIR}/${rel_path} ${abs_file}
|
||||
COMMAND protobuf::protoc -I${file_dir} --python_out=${CMAKE_BINARY_DIR}/${rel_path} ${abs_file}
|
||||
COMMAND perl -pi -e "s/import (.+_pb2.*)/from . import \\1/" "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}_pb2.py"
|
||||
COMMAND cp "${CMAKE_BINARY_DIR}/${rel_path}/${file_name}_pb2.py" "${PROJECT_SOURCE_DIR}/mindspore/train/"
|
||||
DEPENDS protobuf::protoc ${abs_file}
|
||||
COMMENT "Running C++ protocol buffer compiler on ${file}" VERBATIM )
|
||||
endforeach()
|
||||
|
||||
set_source_files_properties(${${c_var}} ${${h_var}} ${${py_var}} PROPERTIES GENERATED TRUE)
|
||||
set(${c_var} ${${c_var}} PARENT_SCOPE)
|
||||
set(${h_var} ${${h_var}} PARENT_SCOPE)
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
set(pslite_USE_STATIC_LIBS ON)
|
||||
if (${ENABLE_IBVERBS} STREQUAL "ON")
|
||||
set(pslite_CXXFLAGS "USE_IBVERBS=1")
|
||||
endif()
|
||||
mindspore_add_pkg(pslite
|
||||
LIBS ps
|
||||
URL https://github.com/dmlc/ps-lite/archive/34fd45cae457d59850fdcb2066467778d0673f21.zip
|
||||
MD5 393c0e27b68bfaf96718caa3aa96f5a3
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/pslite/ps_lite.patch001
|
||||
ONLY_MAKE True
|
||||
ONLY_MAKE_INCS include/*
|
||||
ONLY_MAKE_LIBS build/*)
|
||||
include_directories(${pslite_INC})
|
||||
add_library(mindspore::pslite ALIAS pslite::ps)
|
|
@ -1,7 +1,6 @@
|
|||
|
||||
mindspore_add_pkg(rang
|
||||
VER 3.1.0
|
||||
HEAD_ONLY ./
|
||||
URL https://github.com/agauniyal/rang/archive/cabe04d6d6b05356fa8f9741704924788f0dd762.zip
|
||||
MD5 0c5c9b251fea9ee7ce32f188655be0ea)
|
||||
|
||||
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
if (WIN32)
|
||||
set(sentencepiece_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2 -Wno-unused-result -Wno-stringop-overflow -Wno-format-extra-args -Wno-format")
|
||||
set(sentencepiece_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(sentencepiece
|
||||
VER 0.1.92
|
||||
LIBS sentencepiece sentencepiece_train
|
||||
URL https://github.com/google/sentencepiece/archive/v0.1.92.tar.gz
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release -DSPM_USE_BUILTIN_PROTOBUF=ON
|
||||
MD5 5dfd2241914b5598a68b2a8542ed8e91
|
||||
)
|
||||
else ()
|
||||
set(sentencepiece_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2 -Wno-unused-result -Wno-sign-compare")
|
||||
set(sentencepiece_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(sentencepiece
|
||||
VER 0.1.92
|
||||
LIBS sentencepiece sentencepiece_train
|
||||
URL https://github.com/google/sentencepiece/archive/v0.1.92.tar.gz
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release -DSPM_USE_BUILTIN_PROTOBUF=OFF -DSPM_ENABLE_SHARED=OFF -DPROTOBUF_INC=${protobuf_INC}
|
||||
MD5 5dfd2241914b5598a68b2a8542ed8e91
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/sentencepiece/sentencepiece.patch001
|
||||
)
|
||||
endif ()
|
||||
include_directories(${sentencepiece_INC})
|
||||
add_library(mindspore::sentencepiece ALIAS sentencepiece::sentencepiece)
|
||||
add_library(mindspore::sentencepiece_train ALIAS sentencepiece::sentencepiece_train)
|
|
@ -1,30 +1,15 @@
|
|||
if (WIN32)
|
||||
mindspore_add_pkg(sqlite
|
||||
VER 3.32.2
|
||||
LIBS sqlite3
|
||||
URL https://sqlite.org/2020/sqlite-amalgamation-3320200.zip
|
||||
MD5 1eccea18d248eb34c7378b2b3f63f1db
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/sqlite/sqlite.windows.patch001
|
||||
CMAKE_OPTION " "
|
||||
)
|
||||
|
||||
else ()
|
||||
set(sqlite_USE_STATIC_LIBS ON)
|
||||
set(sqlite_CXXFLAGS)
|
||||
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
|
||||
set(sqlite_CFLAGS "-fstack-protector-all -Wno-uninitialized -Wno-unused-parameter -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
else()
|
||||
set(sqlite_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
endif()
|
||||
set(sqlite_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
mindspore_add_pkg(sqlite
|
||||
VER 3.32.2
|
||||
set(sqlite_USE_STATIC_LIBS ON)
|
||||
set(sqlite_CXXFLAGS)
|
||||
set(sqlite_CFLAGS "-fstack-protector-all -Wno-maybe-uninitialized -Wno-unused-parameter -fPIC -D_FORTIFY_SOURCE=2 -O2")
|
||||
set(sqlite_LDFLAGS "-Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
|
||||
mindspore_add_pkg(sqlite
|
||||
VER 3.31.1
|
||||
LIBS sqlite3
|
||||
URL https://github.com/sqlite/sqlite/archive/version-3.32.2.tar.gz
|
||||
MD5 ea6d3b3289b4ac216fb06081a01ef101
|
||||
URL https://github.com/sqlite/sqlite/archive/version-3.31.1.tar.gz
|
||||
MD5 5f4e7b4016c15f4fb5855615279819da
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/sqlite/sqlite.patch001
|
||||
CONFIGURE_COMMAND ./configure --enable-shared=no --disable-tcl --disable-editline --enable-json1)
|
||||
endif ()
|
||||
|
||||
include_directories(${sqlite_INC})
|
||||
add_library(mindspore::sqlite ALIAS sqlite::sqlite3)
|
||||
add_library(mindspore::sqlite ALIAS sqlite::sqlite3)
|
|
@ -1,10 +0,0 @@
|
|||
set(tinyxml2_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2 -Wno-unused-result")
|
||||
set(tinyxml2_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(tinyxml2
|
||||
VER 8.0.0
|
||||
LIBS tinyxml2
|
||||
URL https://github.com/leethomason/tinyxml2/archive/8.0.0.tar.gz
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE=Release
|
||||
MD5 5dc535c8b34ee621fe2128f072d275b5)
|
||||
include_directories(${tinyxml2_INC})
|
||||
add_library(mindspore::tinyxml2 ALIAS tinyxml2::tinyxml2)
|
|
@ -2,14 +2,7 @@ set(incubator_tvm_gpu_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
|||
set(incubator_tvm_gpu_CFLAGS "-D_FORTIFY_SOURCE=2 -O2")
|
||||
mindspore_add_pkg(incubator_tvm_gpu
|
||||
VER 0.6.0
|
||||
LIBS tvm
|
||||
HEAD_ONLY ./
|
||||
URL https://github.com/apache/incubator-tvm/archive/v0.6.0.tar.gz
|
||||
MD5 9cbbd32545a776023acabbba270449fe
|
||||
CUSTOM_CMAKE ${CMAKE_SOURCE_DIR}/third_party/patch/incubator-tvm/
|
||||
SUBMODULES ${dlpack_DIRPATH} ${dmlc-core_DIRPATH} ${rang_DIRPATH}
|
||||
SOURCEMODULES topi/python/topi python/tvm
|
||||
PATCHES ${CMAKE_SOURCE_DIR}/third_party/patch/incubator-tvm/find_library.patch
|
||||
${CMAKE_SOURCE_DIR}/third_party/patch/incubator-tvm/include.patch
|
||||
${CMAKE_SOURCE_DIR}/third_party/patch/incubator-tvm/src_pass.patch
|
||||
CMAKE_OPTION " ")
|
||||
add_library(mindspore::tvm ALIAS incubator_tvm_gpu::tvm)
|
||||
MD5 9cbbd32545a776023acabbba270449fe)
|
||||
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
mindspore_add_pkg(zeromq
|
||||
VER 4.1.4
|
||||
HEAD_ONLY ./
|
||||
URL https://raw.githubusercontent.com/mli/deps/master/build/zeromq-4.1.4.tar.gz
|
||||
MD5 a611ecc93fffeb6d058c0e6edf4ad4fb)
|
|
@ -1,9 +0,0 @@
|
|||
mindspore_add_pkg(zlib
|
||||
VER 1.2.11
|
||||
LIBS z
|
||||
URL https://github.com/madler/zlib/archive/v1.2.11.tar.gz
|
||||
MD5 0095d2d2d1f3442ce1318336637b695f
|
||||
CMAKE_OPTION -DCMAKE_BUILD_TYPE:STRING=Release)
|
||||
|
||||
include_directories(${zlib_INC})
|
||||
add_library(mindspore::z ALIAS zlib::z)
|
|
@ -1,10 +1,6 @@
|
|||
set(SECURE_CXX_FLAGS "")
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
if (WIN32)
|
||||
set(SECURE_CXX_FLAGS "-fstack-protector-all")
|
||||
else()
|
||||
set(SECURE_CXX_FLAGS "-fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
endif()
|
||||
set(SECURE_CXX_FLAGS "-fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
endif()
|
||||
set(_ms_tmp_CMAKE_CXX_FLAGS_F ${CMAKE_CXX_FLAGS})
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fvisibility=hidden")
|
||||
|
@ -14,35 +10,18 @@ include(${CMAKE_SOURCE_DIR}/cmake/external_libs/eigen.cmake)
|
|||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/json.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/dependency_securec.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/protobuf.cmake)
|
||||
|
||||
if (ENABLE_DEBUGGER OR ENABLE_SERVING)
|
||||
# build dependencies of gRPC
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/absl.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/c-ares.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/zlib.cmake)
|
||||
# build gRPC
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/grpc.cmake)
|
||||
endif()
|
||||
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/pybind11.cmake)
|
||||
MESSAGE("go to link flatbuffers")
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/flatbuffers.cmake)
|
||||
if(USE_GLOG)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/glog.cmake)
|
||||
endif()
|
||||
if (ENABLE_CPU AND (ENABLE_D OR ENABLE_GPU))
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/zeromq.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/pslite.cmake)
|
||||
endif()
|
||||
|
||||
find_package(Python3)
|
||||
include_directories(${Python3_INCLUDE_DIRS})
|
||||
include_directories(${CMAKE_SOURCE_DIR}/third_party)
|
||||
if (ENABLE_CPU)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/mkl_dnn.cmake)
|
||||
if (ENABLE_MPI)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/ompi.cmake)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ENABLE_GPU)
|
||||
|
@ -50,17 +29,18 @@ if (ENABLE_GPU)
|
|||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/dmlc_core.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/rang.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/tvm_gpu.cmake)
|
||||
endif()
|
||||
|
||||
if (ENABLE_MPI)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/nccl.cmake)
|
||||
endif()
|
||||
if (ENABLE_MPI)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/nccl.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/ompi.cmake)
|
||||
endif()
|
||||
|
||||
if (ENABLE_GE)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/third_party/ge/include)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/third_party/ge/include/external)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/third_party/ge/include/external/graph)
|
||||
elseif(ENABLE_D OR ENABLE_TESTCASES)
|
||||
else()
|
||||
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/ops)
|
||||
include_directories(${CMAKE_SOURCE_DIR}/graphengine/inc/external)
|
||||
|
@ -68,14 +48,10 @@ elseif(ENABLE_D OR ENABLE_TESTCASES)
|
|||
endif()
|
||||
|
||||
if (ENABLE_MINDDATA)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/icu4c.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/jpeg_turbo.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/libtiff.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/opencv.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/sqlite.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/tinyxml2.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/cppjieba.cmake)
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/sentencepiece.cmake)
|
||||
endif()
|
||||
|
||||
include(${CMAKE_SOURCE_DIR}/cmake/external_libs/gtest.cmake)
|
||||
|
|
|
@ -17,16 +17,9 @@ option(ENABLE_DUMP_E2E "Enable dump e2e file, default on" OFF)
|
|||
option(ENABLE_DUMP_IR "Enable dump funciton graph ir, default on" ON)
|
||||
option(ENABLE_MPI "enable mpi" OFF)
|
||||
option(ENABLE_AKG "enable akg" OFF)
|
||||
option(ENABLE_DEBUGGER "enable debugger" OFF)
|
||||
option(ENABLE_IBVERBS "enable IBVERBS for parameter server" OFF)
|
||||
option(ENABLE_PYTHON "Enable python" ON)
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (WIN32)
|
||||
set(OPTION_CXX_FLAGS "${OPTION_CXX_FLAGS} -fstack-protector-all")
|
||||
else()
|
||||
set(OPTION_CXX_FLAGS "${OPTION_CXX_FLAGS} -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
endif()
|
||||
set(OPTION_CXX_FLAGS "${OPTION_CXX_FLAGS} -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack")
|
||||
endif()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
||||
|
@ -48,8 +41,8 @@ endif()
|
|||
|
||||
if (DEBUG_MODE)
|
||||
set(CMAKE_BUILD_TYPE "Debug")
|
||||
add_compile_definitions(MEM_REUSE_DEBUG)
|
||||
else()
|
||||
add_compile_definitions(MEM_REUSE_DEBUG)
|
||||
set(CMAKE_BUILD_TYPE "Release")
|
||||
endif()
|
||||
|
||||
|
@ -67,11 +60,6 @@ endif()
|
|||
|
||||
if (ENABLE_GPU)
|
||||
set(ENABLE_GPUQUE ON)
|
||||
add_compile_definitions(ENABLE_GPU_COLLECTIVE)
|
||||
endif()
|
||||
|
||||
if (ENABLE_CPU)
|
||||
add_compile_definitions(ENABLE_CPU)
|
||||
endif()
|
||||
|
||||
if (ENABLE_GE)
|
||||
|
@ -118,8 +106,4 @@ endif()
|
|||
|
||||
if(ENABLE_DUMP_E2E)
|
||||
add_compile_definitions(ENABLE_DUMP_E2E)
|
||||
endif()
|
||||
|
||||
if(ENABLE_DEBUGGER)
|
||||
add_compile_definitions(ENABLE_DEBUGGER)
|
||||
endif()
|
||||
endif()
|
|
@ -1,293 +0,0 @@
|
|||
# include dependency
|
||||
include(CMakePackageConfigHelpers)
|
||||
include(GNUInstallDirs)
|
||||
|
||||
# set package information
|
||||
set(CPACK_PACKAGE_NAME ${PROJECT_NAME})
|
||||
set(CPACK_GENERATOR "External")
|
||||
set(CPACK_EXTERNAL_PACKAGE_SCRIPT ${CMAKE_SOURCE_DIR}/cmake/package_script.cmake)
|
||||
set(CPACK_EXTERNAL_ENABLE_STAGING true)
|
||||
set(CPACK_TEMPORARY_PACKAGE_FILE_NAME ${CMAKE_SOURCE_DIR}/build/package/mindspore)
|
||||
set(CPACK_TEMPORARY_INSTALL_DIRECTORY ${CMAKE_SOURCE_DIR}/build/package/mindspore)
|
||||
if (ENABLE_GE)
|
||||
set(CPACK_MS_BACKEND "ge")
|
||||
set(CPACK_MS_PACKAGE_NAME "mindspore")
|
||||
elseif (ENABLE_GPU)
|
||||
set(CPACK_MS_BACKEND "ms")
|
||||
set(CPACK_MS_PACKAGE_NAME "mindspore-gpu")
|
||||
elseif (ENABLE_D)
|
||||
set(CPACK_MS_BACKEND "ms")
|
||||
set(CPACK_MS_PACKAGE_NAME "mindspore-ascend")
|
||||
elseif (ENABLE_CPU)
|
||||
set(CPACK_MS_BACKEND "ms")
|
||||
set(CPACK_MS_PACKAGE_NAME "mindspore")
|
||||
else ()
|
||||
set(CPACK_MS_BACKEND "debug")
|
||||
set(CPACK_MS_PACKAGE_NAME "mindspore")
|
||||
endif ()
|
||||
include(CPack)
|
||||
|
||||
# set install path
|
||||
set(INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR} CACHE PATH "Installation directory for libraries")
|
||||
set(INSTALL_PY_DIR ".")
|
||||
set(INSTALL_BASE_DIR ".")
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
set(INSTALL_LIB_DIR ".")
|
||||
set(onednn_LIBPATH ${onednn_LIBPATH}/../bin/)
|
||||
set(glog_LIBPATH ${glog_LIBPATH}/../bin/)
|
||||
set(opencv_LIBPATH ${opencv_LIBPATH}/../bin/)
|
||||
set(jpeg_turbo_LIBPATH ${jpeg_turbo_LIBPATH}/../bin/)
|
||||
set(sqlite_LIBPATH ${sqlite_LIBPATH}/../bin/)
|
||||
set(tinyxml2_LIBPATH ${tinyxml2_LIBPATH}/../bin/)
|
||||
set(sentencepiece_LIBPATH ${sentencepiece_LIBPATH}/../bin/)
|
||||
else ()
|
||||
set(INSTALL_LIB_DIR "lib")
|
||||
endif ()
|
||||
|
||||
# set package files
|
||||
install(
|
||||
TARGETS _c_expression
|
||||
DESTINATION ${INSTALL_BASE_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
install(
|
||||
TARGETS mindspore_gvar
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
if (USE_GLOG)
|
||||
file(GLOB_RECURSE GLOG_LIB_LIST ${glog_LIBPATH}/libglog*)
|
||||
install(
|
||||
FILES ${GLOG_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_MINDDATA)
|
||||
install(
|
||||
TARGETS _c_dataengine _c_mindrecord
|
||||
DESTINATION ${INSTALL_BASE_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
file(GLOB_RECURSE OPENCV_LIB_LIST
|
||||
${opencv_LIBPATH}/libopencv_core*
|
||||
${opencv_LIBPATH}/libopencv_imgcodecs*
|
||||
${opencv_LIBPATH}/libopencv_imgproc*
|
||||
)
|
||||
install(
|
||||
FILES ${OPENCV_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
file(GLOB_RECURSE TINYXML2_LIB_LIST
|
||||
${tinyxml2_LIBPATH}/libtinyxml2*
|
||||
)
|
||||
install(
|
||||
FILES ${TINYXML2_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
file(GLOB_RECURSE SENTENCEPIECE_LIB_LIST
|
||||
${sentencepiece_LIBPATH}/libsentencepiece*
|
||||
)
|
||||
install(
|
||||
FILES ${SENTENCEPIECE_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
message("icu4c does not support windows system temporarily")
|
||||
else()
|
||||
file(GLOB_RECURSE ICU4C_LIB_LIST
|
||||
${icu4c_LIBPATH}/libicuuc*
|
||||
${icu4c_LIBPATH}/libicudata*
|
||||
${icu4c_LIBPATH}/libicui18n*
|
||||
)
|
||||
install(
|
||||
FILES ${ICU4C_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (ENABLE_CPU)
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux")
|
||||
file(GLOB_RECURSE DNNL_LIB_LIST ${onednn_LIBPATH}/libdnnl${CMAKE_SHARED_LIBRARY_SUFFIX}*)
|
||||
elseif (CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
||||
file(GLOB_RECURSE DNNL_LIB_LIST ${onednn_LIBPATH}/libdnnl*${CMAKE_SHARED_LIBRARY_SUFFIX}*)
|
||||
elseif (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
file(GLOB_RECURSE DNNL_LIB_LIST ${onednn_LIBPATH}/dnnl.dll)
|
||||
endif ()
|
||||
install(
|
||||
FILES ${DNNL_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_MPI)
|
||||
install(
|
||||
TARGETS _ms_mpi
|
||||
DESTINATION ${INSTALL_BASE_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
install(
|
||||
TARGETS mpi_adapter
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_GPU)
|
||||
install(
|
||||
TARGETS gpu_collective
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
install(
|
||||
TARGETS gpu_queue
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (NOT ENABLE_GE)
|
||||
if (ENABLE_D)
|
||||
if (DEFINED ENV{ASCEND_CUSTOM_PATH})
|
||||
set(ASCEND_PATH $ENV{ASCEND_CUSTOM_PATH})
|
||||
else ()
|
||||
set(ASCEND_PATH /usr/local/Ascend)
|
||||
endif ()
|
||||
set(ASCEND_DRIVER_PATH ${ASCEND_PATH}/driver/lib64/common)
|
||||
set(ASCEND_FWK_PATH ${ASCEND_PATH}/fwkacllib/lib64)
|
||||
|
||||
install(
|
||||
FILES
|
||||
${CMAKE_BINARY_DIR}/graphengine/src/common/graph/libgraph.so
|
||||
${CMAKE_BINARY_DIR}/graphengine/src/ge/common/libge_common.so
|
||||
${CMAKE_BINARY_DIR}/graphengine/src/ge/ge_runtime/libge_runtime.so
|
||||
${ASCEND_DRIVER_PATH}/libslog.so
|
||||
${ASCEND_DRIVER_PATH}/libc_sec.so
|
||||
${ASCEND_FWK_PATH}/liberror_manager.so
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
elseif (ENABLE_TESTCASES)
|
||||
install(
|
||||
FILES
|
||||
${CMAKE_BINARY_DIR}/graphengine/src/common/graph/libgraph.so
|
||||
${CMAKE_SOURCE_DIR}/graphengine/third_party/prebuild/${CMAKE_HOST_SYSTEM_PROCESSOR}/libslog.so
|
||||
${CMAKE_SOURCE_DIR}/graphengine/third_party/prebuild/${CMAKE_HOST_SYSTEM_PROCESSOR}/liberror_manager.so
|
||||
${CMAKE_SOURCE_DIR}/build/graphengine/libc_sec.so
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
get_filename_component(CXX_DIR ${CMAKE_CXX_COMPILER} PATH)
|
||||
file(GLOB CXX_LIB_LIST ${CXX_DIR}/*.dll)
|
||||
|
||||
string(REPLACE "\\" "/" SystemRoot $ENV{SystemRoot})
|
||||
file(GLOB VC_LIB_LIST ${SystemRoot}/System32/msvcp140.dll ${SystemRoot}/System32/vcomp140.dll)
|
||||
|
||||
file(GLOB JPEG_LIB_LIST ${jpeg_turbo_LIBPATH}/*.dll)
|
||||
file(GLOB SQLITE_LIB_LIST ${sqlite_LIBPATH}/*.dll)
|
||||
install(
|
||||
FILES ${CXX_LIB_LIST} ${JPEG_LIB_LIST} ${SQLITE_LIB_LIST} ${VC_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
# set python files
|
||||
file(GLOB MS_PY_LIST ${CMAKE_SOURCE_DIR}/mindspore/*.py)
|
||||
install(
|
||||
FILES ${MS_PY_LIST}
|
||||
DESTINATION ${INSTALL_PY_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
install(
|
||||
DIRECTORY
|
||||
${CMAKE_SOURCE_DIR}/mindspore/nn
|
||||
${CMAKE_SOURCE_DIR}/mindspore/_extends
|
||||
${CMAKE_SOURCE_DIR}/mindspore/parallel
|
||||
${CMAKE_SOURCE_DIR}/mindspore/mindrecord
|
||||
${CMAKE_SOURCE_DIR}/mindspore/train
|
||||
${CMAKE_SOURCE_DIR}/mindspore/common
|
||||
${CMAKE_SOURCE_DIR}/mindspore/ops
|
||||
${CMAKE_SOURCE_DIR}/mindspore/communication
|
||||
${CMAKE_SOURCE_DIR}/mindspore/profiler
|
||||
DESTINATION ${INSTALL_PY_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
if (ENABLE_GPU)
|
||||
install(
|
||||
DIRECTORY ${CMAKE_SOURCE_DIR}/mindspore/_akg
|
||||
DESTINATION ${INSTALL_PY_DIR}/../
|
||||
COMPONENT mindspore
|
||||
)
|
||||
if (EXISTS ${incubator_tvm_gpu_ROOT})
|
||||
file(GLOB_RECURSE GLOG_LIB_LIST ${incubator_tvm_gpu_LIBPATH}/lib*)
|
||||
install(
|
||||
FILES ${GLOG_LIB_LIST}
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
install(
|
||||
DIRECTORY
|
||||
${incubator_tvm_gpu_ROOT}/topi/python/topi
|
||||
${incubator_tvm_gpu_ROOT}/python/tvm
|
||||
DESTINATION ${INSTALL_PY_DIR}/../_akg
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if ((ENABLE_D OR ENABLE_GPU) AND ENABLE_AKG)
|
||||
set (AKG_PATH ${CMAKE_SOURCE_DIR}/build/mindspore/akg)
|
||||
install(
|
||||
DIRECTORY
|
||||
${AKG_PATH}/akg
|
||||
DESTINATION ${INSTALL_PY_DIR}/..
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (EXISTS ${CMAKE_SOURCE_DIR}/mindspore/dataset)
|
||||
install(
|
||||
DIRECTORY ${CMAKE_SOURCE_DIR}/mindspore/dataset
|
||||
DESTINATION ${INSTALL_PY_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_SERVING)
|
||||
install(
|
||||
TARGETS ms_serving
|
||||
DESTINATION ${INSTALL_BASE_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
install(
|
||||
FILES ${CMAKE_SOURCE_DIR}/build/mindspore/serving/ms_service_pb2.py
|
||||
${CMAKE_SOURCE_DIR}/build/mindspore/serving/ms_service_pb2_grpc.py
|
||||
DESTINATION ${INSTALL_PY_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
|
||||
install(
|
||||
TARGETS inference
|
||||
DESTINATION ${INSTALL_LIB_DIR}
|
||||
COMPONENT mindspore
|
||||
)
|
||||
endif ()
|
|
@ -1,93 +0,0 @@
|
|||
# find exec
|
||||
find_package(Python3 3.7 COMPONENTS Interpreter Development)
|
||||
if (NOT Python3_FOUND)
|
||||
message(FATAL_ERROR "No python3 found.")
|
||||
endif ()
|
||||
|
||||
set(PYTHON ${Python3_EXECUTABLE})
|
||||
set(PYTHON_VERSION ${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR})
|
||||
|
||||
if (NOT PYTHON_VERSION MATCHES "3.7")
|
||||
message(FATAL_ERROR "FIND PYTHON VERSION ${PYTHON_VERSION} BUT CAN NOT MATCH PYTHON VERSION 3.7")
|
||||
endif ()
|
||||
|
||||
find_package(Git)
|
||||
if (NOT GIT_FOUND)
|
||||
message("No git found.")
|
||||
return ()
|
||||
endif ()
|
||||
set(GIT ${GIT_EXECUTABLE})
|
||||
|
||||
# set path
|
||||
set(MS_ROOT_DIR ${CPACK_PACKAGE_DIRECTORY}/../../)
|
||||
set(MS_PACK_ROOT_DIR ${MS_ROOT_DIR}/build/package)
|
||||
|
||||
# set package file name
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux")
|
||||
if (PYTHON_VERSION MATCHES "3.7")
|
||||
set(PY_TAGS "cp37-cp37m")
|
||||
else ()
|
||||
message("Could not find 'Python 3.7'")
|
||||
return()
|
||||
endif ()
|
||||
string(TOLOWER linux_${CMAKE_HOST_SYSTEM_PROCESSOR} PLATFORM_TAG)
|
||||
elseif (CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
||||
if (PYTHON_VERSION MATCHES "3.7")
|
||||
set(PY_TAGS "py37-none")
|
||||
else ()
|
||||
message("Could not find 'Python 3.7'")
|
||||
return()
|
||||
endif ()
|
||||
set(PLATFORM_TAG "any")
|
||||
elseif (CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
if (PYTHON_VERSION MATCHES "3.7")
|
||||
set(PY_TAGS "cp37-cp37m")
|
||||
else ()
|
||||
message("Could not find 'Python 3.7'")
|
||||
return()
|
||||
endif ()
|
||||
set(PLATFORM_TAG "win_amd64")
|
||||
else ()
|
||||
message(FATAL_ERROR "other platform: ${CMAKE_SYSTEM_NAME}")
|
||||
endif ()
|
||||
|
||||
# get git commit id
|
||||
set(GIT_COMMIT_ID "")
|
||||
execute_process(
|
||||
COMMAND ${GIT} log --format='[sha1]:%h,[branch]:%d' --abbrev=8 -1
|
||||
OUTPUT_VARIABLE GIT_COMMIT_ID
|
||||
WORKING_DIRECTORY ${MS_ROOT_DIR}
|
||||
ERROR_QUIET)
|
||||
string(REPLACE " " "" GIT_COMMIT_ID ${GIT_COMMIT_ID})
|
||||
|
||||
set(ENV{BACKEND_POLICY} ${CPACK_MS_BACKEND})
|
||||
set(ENV{MS_PACKAGE_NAME} ${CPACK_MS_PACKAGE_NAME})
|
||||
set(ENV{COMMIT_ID} ${GIT_COMMIT_ID})
|
||||
|
||||
execute_process(
|
||||
COMMAND ${PYTHON} ${MS_ROOT_DIR}/setup.py "bdist_wheel"
|
||||
WORKING_DIRECTORY ${MS_PACK_ROOT_DIR}
|
||||
)
|
||||
|
||||
# finally
|
||||
set(PACKAGE_NAME ${CPACK_MS_PACKAGE_NAME})
|
||||
if (NOT CMAKE_SYSTEM_NAME MATCHES "Windows")
|
||||
string(REPLACE "-" "_" PACKAGE_NAME ${PACKAGE_NAME})
|
||||
execute_process(
|
||||
COMMAND chmod -R 700 ${MS_PACK_ROOT_DIR}/mindspore/
|
||||
COMMAND chmod -R 700 ${MS_PACK_ROOT_DIR}/${PACKAGE_NAME}.egg-info/
|
||||
)
|
||||
endif ()
|
||||
|
||||
file(GLOB WHL_FILE ${MS_PACK_ROOT_DIR}/dist/*.whl)
|
||||
get_filename_component(ORIGIN_FILE_NAME ${WHL_FILE} NAME)
|
||||
string(REPLACE "-" ";" ORIGIN_FILE_NAME ${ORIGIN_FILE_NAME})
|
||||
list(GET ORIGIN_FILE_NAME 1 VERSION)
|
||||
set(NEW_FILE_NAME ${PACKAGE_NAME}-${VERSION}-${PY_TAGS}-${PLATFORM_TAG}.whl)
|
||||
file(RENAME ${WHL_FILE} ${MS_PACK_ROOT_DIR}/${NEW_FILE_NAME})
|
||||
file(REMOVE_RECURSE ${MS_ROOT_DIR}/output)
|
||||
file(MAKE_DIRECTORY ${MS_ROOT_DIR}/output)
|
||||
file(COPY ${MS_PACK_ROOT_DIR}/${NEW_FILE_NAME} DESTINATION ${MS_ROOT_DIR}/output/)
|
||||
|
||||
file(SHA256 ${MS_ROOT_DIR}/output/${NEW_FILE_NAME} SHA256_VAR)
|
||||
file(WRITE ${MS_ROOT_DIR}/output/${NEW_FILE_NAME}.sha256 ${SHA256_VAR} " " ${NEW_FILE_NAME})
|
|
@ -1,10 +1,6 @@
|
|||
include(FetchContent)
|
||||
set(FETCHCONTENT_QUIET OFF)
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Windows" AND ${CMAKE_VERSION} VERSION_GREATER_EQUAL 3.17.0)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .dll ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
endif ()
|
||||
|
||||
function(mindspore_add_submodule_obj des_submodule_objs sub_dir submodule_name_obj)
|
||||
|
||||
add_subdirectory(${sub_dir})
|
||||
|
@ -20,36 +16,16 @@ function(mindspore_add_submodule_obj des_submodule_objs sub_dir submodule_name_o
|
|||
|
||||
endfunction()
|
||||
|
||||
if (DEFINED ENV{MSLIBS_CACHE_PATH})
|
||||
set(_MS_LIB_CACHE $ENV{MSLIBS_CACHE_PATH})
|
||||
else()
|
||||
set(_MS_LIB_CACHE ${CMAKE_BINARY_DIR}/.mslib)
|
||||
endif ()
|
||||
message("MS LIBS CACHE PATH: ${_MS_LIB_CACHE}")
|
||||
|
||||
get_filename_component(_MS_LIB_CACHE ~/.mslib REALPATH)
|
||||
if (NOT EXISTS ${_MS_LIB_CACHE})
|
||||
file(MAKE_DIRECTORY ${_MS_LIB_CACHE})
|
||||
endif ()
|
||||
|
||||
# set(FETCHCONTENT_BASE_DIR ${_MS_LIB_CACHE})
|
||||
# set(CMAKE_PREFIX_PATH ${_MS_LIB_CACHE})
|
||||
if (DEFINED ENV{MSLIBS_SERVER})
|
||||
set(LOCAL_LIBS_SERVER $ENV{MSLIBS_SERVER})
|
||||
message("LOCAL_LIBS_SERVER: ${LOCAL_LIBS_SERVER}")
|
||||
endif ()
|
||||
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(N)
|
||||
if (JOBS)
|
||||
set(THNUM ${JOBS})
|
||||
else()
|
||||
set(JOBS 8)
|
||||
if (${JOBS} GREATER ${N})
|
||||
set(THNUM ${N})
|
||||
else()
|
||||
set(THNUM ${JOBS})
|
||||
endif()
|
||||
endif ()
|
||||
message("set make thread num: ${THNUM}")
|
||||
|
||||
if(LOCAL_LIBS_SERVER)
|
||||
if (NOT ENV{no_proxy})
|
||||
set(ENV{no_proxy} "${LOCAL_LIBS_SERVER}")
|
||||
|
@ -82,32 +58,7 @@ function(__download_pkg pkg_name pkg_url pkg_md5)
|
|||
|
||||
endfunction()
|
||||
|
||||
function(__download_pkg_with_git pkg_name pkg_url pkg_git_commit pkg_md5)
|
||||
|
||||
if(LOCAL_LIBS_SERVER)
|
||||
set(pkg_url "http://${LOCAL_LIBS_SERVER}:8081/libs/${pkg_name}/${pkg_git_commit}")
|
||||
FetchContent_Declare(
|
||||
${pkg_name}
|
||||
URL ${pkg_url}
|
||||
URL_HASH MD5=${pkg_md5}
|
||||
)
|
||||
else()
|
||||
FetchContent_Declare(
|
||||
${pkg_name}
|
||||
GIT_REPOSITORY ${pkg_url}
|
||||
GIT_TAG ${pkg_git_commit})
|
||||
endif()
|
||||
FetchContent_GetProperties(${pkg_name})
|
||||
message("download: ${${pkg_name}_SOURCE_DIR} , ${pkg_name} , ${pkg_url}")
|
||||
if(NOT ${pkg_name}_POPULATED)
|
||||
FetchContent_Populate(${pkg_name})
|
||||
set(${pkg_name}_SOURCE_DIR ${${pkg_name}_SOURCE_DIR} PARENT_SCOPE)
|
||||
endif()
|
||||
|
||||
endfunction()
|
||||
|
||||
|
||||
function(__find_pkg_then_add_target pkg_name pkg_exe lib_path)
|
||||
function(__find_pkg_then_add_target pkg_name pkg_exe)
|
||||
|
||||
unset(${pkg_name}_LIBS)
|
||||
|
||||
|
@ -133,24 +84,15 @@ function(__find_pkg_then_add_target pkg_name pkg_exe lib_path)
|
|||
set(_LIB_TYPE STATIC)
|
||||
endif ()
|
||||
set(${_LIB_NAME}_LIB ${_LIB_NAME}_LIB-NOTFOUND)
|
||||
find_library(${_LIB_NAME}_LIB ${_LIB_SEARCH_NAME} PATHS ${${pkg_name}_BASE_DIR}/${lib_path} NO_DEFAULT_PATH)
|
||||
|
||||
find_library(${_LIB_NAME}_LIB ${_LIB_SEARCH_NAME} PATHS ${${pkg_name}_BASE_DIR}/lib NO_DEFAULT_PATH)
|
||||
if(NOT ${_LIB_NAME}_LIB)
|
||||
return()
|
||||
endif()
|
||||
|
||||
add_library(${pkg_name}::${_LIB_NAME} ${_LIB_TYPE} IMPORTED GLOBAL)
|
||||
if (WIN32 AND ${_LIB_TYPE} STREQUAL "SHARED")
|
||||
set_target_properties(${pkg_name}::${_LIB_NAME} PROPERTIES IMPORTED_IMPLIB_RELEASE ${${_LIB_NAME}_LIB})
|
||||
else()
|
||||
set_target_properties(${pkg_name}::${_LIB_NAME} PROPERTIES IMPORTED_LOCATION ${${_LIB_NAME}_LIB})
|
||||
endif()
|
||||
|
||||
if (EXISTS ${${pkg_name}_BASE_DIR}/include)
|
||||
set_target_properties(${pkg_name}::${_LIB_NAME} PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${${pkg_name}_BASE_DIR}/include")
|
||||
endif ()
|
||||
|
||||
set_target_properties(${pkg_name}::${_LIB_NAME} PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${${pkg_name}_BASE_DIR}/include"
|
||||
IMPORTED_LOCATION ${${_LIB_NAME}_LIB}
|
||||
)
|
||||
list(APPEND ${pkg_name}_LIBS ${pkg_name}::${_LIB_NAME})
|
||||
message("found ${${_LIB_NAME}_LIB}")
|
||||
STRING( REGEX REPLACE "(.+)/(.+)" "\\1" LIBPATH ${${_LIB_NAME}_LIB})
|
||||
|
@ -205,18 +147,10 @@ set(MS_FIND_NO_DEFAULT_PATH ${MS_FIND_NO_DEFAULT_PATH} PARENT_SCOPE)
|
|||
function(mindspore_add_pkg pkg_name )
|
||||
|
||||
set(options )
|
||||
set(oneValueArgs URL MD5 GIT_REPOSITORY GIT_TAG VER EXE DIR HEAD_ONLY CMAKE_PATH RELEASE LIB_PATH CUSTOM_CMAKE)
|
||||
set(multiValueArgs CMAKE_OPTION LIBS PRE_CONFIGURE_COMMAND CONFIGURE_COMMAND BUILD_OPTION INSTALL_INCS INSTALL_LIBS PATCHES SUBMODULES SOURCEMODULES ONLY_MAKE ONLY_MAKE_INCS ONLY_MAKE_LIBS)
|
||||
set(oneValueArgs URL MD5 VER EXE DIR HEAD_ONLY)
|
||||
set(multiValueArgs CMAKE_OPTION LIBS PRE_CONFIGURE_COMMAND CONFIGURE_COMMAND BUILD_OPTION INSTALL_INCS INSTALL_LIBS PATCHES)
|
||||
cmake_parse_arguments(PKG "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} )
|
||||
|
||||
if (NOT PKG_LIB_PATH)
|
||||
set(PKG_LIB_PATH lib)
|
||||
endif ()
|
||||
|
||||
if(NOT PKG_EXE)
|
||||
set(PKG_EXE 0)
|
||||
endif()
|
||||
|
||||
set(__FIND_PKG_NAME ${pkg_name})
|
||||
string(TOLOWER ${pkg_name} pkg_name)
|
||||
message("pkg name:${__FIND_PKG_NAME},${pkg_name}")
|
||||
|
@ -244,17 +178,18 @@ function(mindspore_add_pkg pkg_name )
|
|||
set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/${PKG_HEAD_ONLY} PARENT_SCOPE)
|
||||
add_library(${pkg_name} INTERFACE)
|
||||
target_include_directories(${pkg_name} INTERFACE ${${pkg_name}_INC})
|
||||
if (${PKG_RELEASE})
|
||||
__find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIB_PATH} ${PKG_LIBS})
|
||||
endif ()
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if(NOT PKG_EXE)
|
||||
set(PKG_EXE 0)
|
||||
endif()
|
||||
|
||||
set(${__FIND_PKG_NAME}_ROOT ${${pkg_name}_BASE_DIR})
|
||||
set(${__FIND_PKG_NAME}_ROOT ${${pkg_name}_BASE_DIR} PARENT_SCOPE)
|
||||
|
||||
if (PKG_LIBS)
|
||||
__find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIB_PATH} ${PKG_LIBS})
|
||||
__find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIBS})
|
||||
if(${pkg_name}_LIBS)
|
||||
set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/include PARENT_SCOPE)
|
||||
message("Found libs: ${${pkg_name}_LIBS}")
|
||||
|
@ -270,17 +205,7 @@ function(mindspore_add_pkg pkg_name )
|
|||
endif ()
|
||||
|
||||
if (NOT PKG_DIR)
|
||||
if (PKG_GIT_REPOSITORY)
|
||||
__download_pkg_with_git(${pkg_name} ${PKG_GIT_REPOSITORY} ${PKG_GIT_TAG} ${PKG_MD5})
|
||||
else()
|
||||
__download_pkg(${pkg_name} ${PKG_URL} ${PKG_MD5})
|
||||
endif()
|
||||
foreach(_SUBMODULE_FILE ${PKG_SUBMODULES})
|
||||
STRING( REGEX REPLACE "(.+)_(.+)" "\\1" _SUBMODEPATH ${_SUBMODULE_FILE})
|
||||
STRING( REGEX REPLACE "(.+)/(.+)" "\\2" _SUBMODENAME ${_SUBMODEPATH})
|
||||
file(GLOB ${pkg_name}_INSTALL_SUBMODULE ${_SUBMODULE_FILE}/*)
|
||||
file(COPY ${${pkg_name}_INSTALL_SUBMODULE} DESTINATION ${${pkg_name}_SOURCE_DIR}/3rdparty/${_SUBMODENAME})
|
||||
endforeach (_SUBMODULE_FILE)
|
||||
__download_pkg(${pkg_name} ${PKG_URL} ${PKG_MD5})
|
||||
else()
|
||||
set(${pkg_name}_SOURCE_DIR ${PKG_DIR})
|
||||
endif ()
|
||||
|
@ -288,51 +213,27 @@ function(mindspore_add_pkg pkg_name )
|
|||
message("${pkg_name}_SOURCE_DIR : ${${pkg_name}_SOURCE_DIR}")
|
||||
|
||||
foreach(_PATCH_FILE ${PKG_PATCHES})
|
||||
get_filename_component(_PATCH_FILE_NAME ${_PATCH_FILE} NAME)
|
||||
set(_LF_PATCH_FILE ${CMAKE_BINARY_DIR}/_ms_patch/${_PATCH_FILE_NAME})
|
||||
configure_file(${_PATCH_FILE} ${_LF_PATCH_FILE} NEWLINE_STYLE LF @ONLY)
|
||||
|
||||
message("patching ${${pkg_name}_SOURCE_DIR} -p1 < ${_LF_PATCH_FILE}")
|
||||
execute_process(COMMAND ${Patch_EXECUTABLE} -p1 INPUT_FILE ${_LF_PATCH_FILE}
|
||||
message("patching ${${pkg_name}_SOURCE_DIR} -p1 < ${_PATCH_FILE}")
|
||||
execute_process(COMMAND patch -p1 INPUT_FILE ${_PATCH_FILE}
|
||||
WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}
|
||||
RESULT_VARIABLE Result)
|
||||
if(NOT Result EQUAL "0")
|
||||
message(FATAL_ERROR "Failed patch: ${_LF_PATCH_FILE}")
|
||||
message(FATAL_ERROR "Failed patch: ${_PATCH_FILE}")
|
||||
endif()
|
||||
endforeach(_PATCH_FILE)
|
||||
foreach(_SOURCE_DIR ${PKG_SOURCEMODULES})
|
||||
file(GLOB ${pkg_name}_INSTALL_SOURCE ${${pkg_name}_SOURCE_DIR}/${_SOURCE_DIR}/*)
|
||||
file(COPY ${${pkg_name}_INSTALL_SOURCE} DESTINATION ${${pkg_name}_BASE_DIR}/${_SOURCE_DIR}/)
|
||||
endforeach (_SUBMODULE_FILE)
|
||||
|
||||
file(LOCK ${${pkg_name}_BASE_DIR} DIRECTORY GUARD FUNCTION RESULT_VARIABLE ${pkg_name}_LOCK_RET TIMEOUT 600)
|
||||
if(NOT ${pkg_name}_LOCK_RET EQUAL "0")
|
||||
message(FATAL_ERROR "error! when try lock ${${pkg_name}_BASE_DIR} : ${${pkg_name}_LOCK_RET}")
|
||||
endif()
|
||||
|
||||
if (PKG_CUSTOM_CMAKE)
|
||||
file(GLOB ${pkg_name}_cmake ${PKG_CUSTOM_CMAKE}/CMakeLists.txt)
|
||||
file(COPY ${${pkg_name}_cmake} DESTINATION ${${pkg_name}_SOURCE_DIR})
|
||||
endif ()
|
||||
|
||||
if(${pkg_name}_SOURCE_DIR)
|
||||
if (PKG_HEAD_ONLY)
|
||||
file(GLOB ${pkg_name}_SOURCE_SUBDIRS ${${pkg_name}_SOURCE_DIR}/*)
|
||||
file(COPY ${${pkg_name}_SOURCE_SUBDIRS} DESTINATION ${${pkg_name}_BASE_DIR})
|
||||
set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/${PKG_HEAD_ONLY} PARENT_SCOPE)
|
||||
if (NOT PKG_RELEASE)
|
||||
add_library(${pkg_name} INTERFACE)
|
||||
target_include_directories(${pkg_name} INTERFACE ${${pkg_name}_INC})
|
||||
endif ()
|
||||
|
||||
elseif (PKG_ONLY_MAKE)
|
||||
__exec_cmd(COMMAND ${CMAKE_MAKE_PROGRAM} ${${pkg_name}_CXXFLAGS} -j${THNUM}
|
||||
WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR})
|
||||
set(PKG_INSTALL_INCS ${PKG_ONLY_MAKE_INCS})
|
||||
set(PKG_INSTALL_LIBS ${PKG_ONLY_MAKE_LIBS})
|
||||
file(GLOB ${pkg_name}_INSTALL_INCS ${${pkg_name}_SOURCE_DIR}/${PKG_INSTALL_INCS})
|
||||
file(GLOB ${pkg_name}_INSTALL_LIBS ${${pkg_name}_SOURCE_DIR}/${PKG_INSTALL_LIBS})
|
||||
file(COPY ${${pkg_name}_INSTALL_INCS} DESTINATION ${${pkg_name}_BASE_DIR}/include)
|
||||
file(COPY ${${pkg_name}_INSTALL_LIBS} DESTINATION ${${pkg_name}_BASE_DIR}/lib)
|
||||
add_library(${pkg_name} INTERFACE)
|
||||
target_include_directories(${pkg_name} INTERFACE ${${pkg_name}_INC})
|
||||
|
||||
elseif (PKG_CMAKE_OPTION)
|
||||
# in cmake
|
||||
|
@ -354,10 +255,10 @@ function(mindspore_add_pkg pkg_name )
|
|||
|
||||
__exec_cmd(COMMAND ${CMAKE_COMMAND} ${PKG_CMAKE_OPTION} -G ${CMAKE_GENERATOR}
|
||||
${${pkg_name}_CMAKE_CFLAGS} ${${pkg_name}_CMAKE_CXXFLAGS} ${${pkg_name}_CMAKE_LDFLAGS}
|
||||
-DCMAKE_INSTALL_PREFIX=${${pkg_name}_BASE_DIR} ${${pkg_name}_SOURCE_DIR}/${PKG_CMAKE_PATH}
|
||||
-DCMAKE_INSTALL_PREFIX=${${pkg_name}_BASE_DIR} ..
|
||||
WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}/_build)
|
||||
|
||||
__exec_cmd(COMMAND ${CMAKE_COMMAND} --build . --target install -- -j${THNUM}
|
||||
__exec_cmd(COMMAND ${CMAKE_COMMAND} --build . --target install -- -j8
|
||||
WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}/_build)
|
||||
|
||||
else()
|
||||
|
@ -388,7 +289,7 @@ function(mindspore_add_pkg pkg_name )
|
|||
${${pkg_name}_MAKE_CFLAGS} ${${pkg_name}_MAKE_CXXFLAGS} ${${pkg_name}_MAKE_LDFLAGS})
|
||||
endif ()
|
||||
# build
|
||||
__exec_cmd(COMMAND ${CMAKE_MAKE_PROGRAM} ${${pkg_name}_BUILD_OPTION} -j${THNUM}
|
||||
__exec_cmd(COMMAND ${CMAKE_MAKE_PROGRAM} ${${pkg_name}_BUILD_OPTION} -j8
|
||||
WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR})
|
||||
|
||||
if (PKG_INSTALL_INCS OR PKG_INSTALL_LIBS)
|
||||
|
@ -403,7 +304,7 @@ function(mindspore_add_pkg pkg_name )
|
|||
endif()
|
||||
|
||||
if (PKG_LIBS)
|
||||
__find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIB_PATH} ${PKG_LIBS})
|
||||
__find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIBS})
|
||||
set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/include PARENT_SCOPE)
|
||||
if(NOT ${pkg_name}_LIBS)
|
||||
message(FATAL_ERROR "Can not find pkg: ${pkg_name}")
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
{
|
||||
"DumpSettings": {
|
||||
"net_name": "ResNet50",
|
||||
"dump_mode": 1,
|
||||
"op_debug_mode": 3,
|
||||
"iteration": 0,
|
||||
"kernels": ["Default/Conv2D-op2", "Default/TensorAdd-op10"]
|
||||
},
|
||||
|
||||
"DumpSettingsSpec": {
|
||||
"net_name": "net name eg:ResNet50",
|
||||
"dump_mode": "0: dump all kernels, 1: dump kernels in kernels list",
|
||||
"op_debug_mode": "0: close debug, 1: debug ai-core overflow, 2: debug atomic overflow, 3: debug all overflow",
|
||||
"iteration": "specified iteration ",
|
||||
"kernels": "op's full scope name which need to be dump"
|
||||
}
|
||||
}
|
|
@ -6,17 +6,17 @@
|
|||
"net_name": "ResNet50",
|
||||
"mode": 0,
|
||||
"iteration": 0,
|
||||
"kernels": ["Default/Conv2D-op2", "Default/TensorAdd-op10"]
|
||||
"kernels": ["TensorAdd"]
|
||||
},
|
||||
|
||||
"DumpSettingsSpec": {
|
||||
"enable": "true: dump enable, false: dump disable",
|
||||
"trans_flag": "true: trans to host format, false: not trans format",
|
||||
"enable": "true: dump enable false: dump disable",
|
||||
"trans_flag": "true: trans to host format,false: not trans format",
|
||||
"path": "the dump file folder",
|
||||
"net_name": "net name eg:ResNet50",
|
||||
"mode": "0: dump all kernels, 1: dump kernels in kernels list",
|
||||
"iteration": "0: all iteration, others: specified iteration ",
|
||||
"kernels": "op's full scope name which need to be dump"
|
||||
"mode": "0: dump all kernels 1: dump kernels in kernels list",
|
||||
"iteration": "0: all iteration others: specified iteration ",
|
||||
"kernels": "kernel name list need to be dump"
|
||||
},
|
||||
"other": {}
|
||||
}
|
|
@ -6,17 +6,17 @@
|
|||
"net_name": "ResNet50",
|
||||
"mode": 0,
|
||||
"iteration": 0,
|
||||
"kernels": ["Default/Conv2D-op2", "Default/TensorAdd-op10"]
|
||||
"kernels": ["AllReduce","BiasAddGrad","Conv2DBackpropFilter","SparseSoftmaxCrossEntropyWithLogits"]
|
||||
},
|
||||
|
||||
"DumpSettingsSpec": {
|
||||
"enable": "true: dump enable, false: dump disable",
|
||||
"trans_flag": "true: trans to host format, false: not trans format",
|
||||
"enable": "true: dump enable false: dump disable",
|
||||
"trans_flag": "true: trans to host format,false: not trans format",
|
||||
"path": "the dump file folder",
|
||||
"net_name": "net name eg:ResNet50",
|
||||
"mode": "0: dump all kernels, 1: dump kernels in kernels list",
|
||||
"iteration": "0: all iteration, others: specified iteration ",
|
||||
"kernels": "op's full scope name which need to be dump"
|
||||
"mode": "0: dump all kernels 1: dump kernels in kernels list",
|
||||
"iteration": "0: all iteration others: specified iteration ",
|
||||
"kernels": "kernel name list need to be dump"
|
||||
},
|
||||
"other": {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,17 +6,17 @@
|
|||
"net_name": "ResNet50",
|
||||
"mode": 0,
|
||||
"iteration": 0,
|
||||
"kernels": ["Default/Conv2D-op2", "Default/TensorAdd-op10"]
|
||||
"kernels": ["AllReduce","BiasAddGrad","Conv2DBackpropFilter","SparseSoftmaxCrossEntropyWithLogits"]
|
||||
},
|
||||
|
||||
"DumpSettingsSpec": {
|
||||
"enable": "true: dump enable, false: dump disable",
|
||||
"trans_flag": "true: trans to host format, false: not trans format",
|
||||
"enable": "true: dump enable false: dump disable",
|
||||
"trans_flag": "true: trans to host format,false: not trans format",
|
||||
"path": "the dump file folder",
|
||||
"net_name": "net name eg:ResNet50",
|
||||
"mode": "0: dump all kernels, 1: dump kernels in kernels list",
|
||||
"iteration": "0: all iteration, others: specified iteration ",
|
||||
"kernels": "op's full scope name which need to be dump"
|
||||
"mode": "0: dump all kernels 1: dump kernels in kernels list",
|
||||
"iteration": "0: all iteration others: specified iteration ",
|
||||
"kernels": "kernel name list need to be dump"
|
||||
},
|
||||
"other": {}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -81,8 +81,6 @@ function checkopts()
|
|||
# check options
|
||||
checkopts "$@"
|
||||
|
||||
CUR_PATH=$(pwd)
|
||||
cd "`dirname $0`/.."
|
||||
|
||||
cd build/mindspore/
|
||||
make -j8
|
||||
|
@ -120,5 +118,3 @@ if [[ "${mode}" == "${MODE_DBG}" || "${mode}" == "${MODE_ALL}" ]]; then
|
|||
echo "MS_IR_FILE=$(pwd)/anf_ir_file.dbg MS_IR_PATH=$(pwd)/pkl_objs.dbg/ pytest -s ${UT_NAME}"
|
||||
MS_IR_FILE=$(pwd)/anf_ir_file.dbg MS_IR_PATH=$(pwd)/pkl_objs.dbg/ pytest -s "${UT_NAME}"
|
||||
fi
|
||||
|
||||
cd $CUR_PATH
|
|
@ -1,16 +0,0 @@
|
|||
## MindSpore Dockerfile Repository
|
||||
|
||||
This folder hosts all the `Dockerfile` to build MindSpore container images with various hardware platforms.
|
||||
|
||||
### MindSpore docker build command
|
||||
|
||||
| Hardware Platform | Version | Build Command |
|
||||
| :---------------- | :------ | :------------ |
|
||||
| CPU | `x.y.z` | cd mindspore-cpu/x.y.z && docker build . -t mindspore/mindspore-cpu:x.y.z |
|
||||
| | `devel` | cd mindspore-cpu/devel && docker build . -t mindspore/mindspore-cpu:devel |
|
||||
| | `runtime` | cd mindspore-cpu/runtime && docker build . -t mindspore/mindspore-cpu:runtime |
|
||||
| GPU | `x.y.z` | cd mindspore-gpu/x.y.z && docker build . -t mindspore/mindspore-gpu:x.y.z |
|
||||
| | `devel` | cd mindspore-gpu/devel && docker build . -t mindspore/mindspore-gpu:devel |
|
||||
| | `runtime` | cd mindspore-gpu/runtime && docker build . -t mindspore/mindspore-gpu:runtime |
|
||||
|
||||
> **NOTICE:** The `x.y.z` version shown above should be replaced with the real version number.
|
|
@ -1,67 +0,0 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV PATH /usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install MindSpore cpu whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/cpu/ubuntu-x86/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,67 +0,0 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV PATH /usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install MindSpore cpu whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/cpu/x86_ubuntu/mindspore-0.2.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,67 +0,0 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV PATH /usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install MindSpore cpu whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/cpu/ubuntu_x86/mindspore-0.3.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,67 +0,0 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV PATH /usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install MindSpore cpu whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/cpu/ubuntu_x86/mindspore-0.5.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,73 +0,0 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV CMAKE_ROOT_PATH /usr/local/cmake-3.14.1
|
||||
ENV PATH ${PYTHON_ROOT_PATH}/bin:${CMAKE_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf \
|
||||
&& pip install --no-cache-dir wheel
|
||||
|
||||
# Install cmake (v3.14.1)
|
||||
RUN cd /tmp \
|
||||
&& wget https://github.com/Kitware/CMake/releases/download/v3.14.1/cmake-3.14.1-Linux-x86_64.sh \
|
||||
&& mkdir -p ${CMAKE_ROOT_PATH} \
|
||||
&& bash ./cmake-3.14.1-Linux-x86_64.sh --prefix=${CMAKE_ROOT_PATH} --exclude-subdir --skip-license \
|
||||
&& rm -f /tmp/cmake-3.14.1-Linux-x86_64.sh
|
|
@ -1,64 +0,0 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV PATH /usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
|
@ -1,83 +0,0 @@
|
|||
FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV OMPI_ROOT_PATH /usr/local/openmpi-3.1.5
|
||||
ENV PATH ${OMPI_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
ENV LD_LIBRARY_PATH ${OMPI_ROOT_PATH}/lib:$LD_LIBRARY_PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex \
|
||||
libnccl2=2.4.8-1+cuda10.1 \
|
||||
libnccl-dev=2.4.8-1+cuda10.1
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install openmpi (v3.1.5)
|
||||
RUN cd /tmp \
|
||||
&& wget https://download.open-mpi.org/release/open-mpi/v3.1/openmpi-3.1.5.tar.gz \
|
||||
&& tar -xvf openmpi-3.1.5.tar.gz \
|
||||
&& cd /tmp/openmpi-3.1.5 \
|
||||
&& mkdir -p ${OMPI_ROOT_PATH} \
|
||||
&& ./configure --prefix=${OMPI_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -rf /tmp/openmpi-3.1.5 \
|
||||
&& rm -f /tmp/openmpi-3.1.5.tar.gz
|
||||
|
||||
# Install MindSpore cuda-10.1 whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.1.0-alpha/MindSpore/gpu/cuda-10.1/mindspore-0.1.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,83 +0,0 @@
|
|||
FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV OMPI_ROOT_PATH /usr/local/openmpi-3.1.5
|
||||
ENV PATH ${OMPI_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
ENV LD_LIBRARY_PATH ${OMPI_ROOT_PATH}/lib:$LD_LIBRARY_PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex \
|
||||
libnccl2=2.4.8-1+cuda10.1 \
|
||||
libnccl-dev=2.4.8-1+cuda10.1
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install openmpi (v3.1.5)
|
||||
RUN cd /tmp \
|
||||
&& wget https://download.open-mpi.org/release/open-mpi/v3.1/openmpi-3.1.5.tar.gz \
|
||||
&& tar -xvf openmpi-3.1.5.tar.gz \
|
||||
&& cd /tmp/openmpi-3.1.5 \
|
||||
&& mkdir -p ${OMPI_ROOT_PATH} \
|
||||
&& ./configure --prefix=${OMPI_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -rf /tmp/openmpi-3.1.5 \
|
||||
&& rm -f /tmp/openmpi-3.1.5.tar.gz
|
||||
|
||||
# Install MindSpore cuda-10.1 whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.2.0-alpha/MindSpore/gpu/cuda-10.1/mindspore_gpu-0.2.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,83 +0,0 @@
|
|||
FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV OMPI_ROOT_PATH /usr/local/openmpi-3.1.5
|
||||
ENV PATH ${OMPI_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
ENV LD_LIBRARY_PATH ${OMPI_ROOT_PATH}/lib:$LD_LIBRARY_PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex \
|
||||
libnccl2=2.4.8-1+cuda10.1 \
|
||||
libnccl-dev=2.4.8-1+cuda10.1
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install openmpi (v3.1.5)
|
||||
RUN cd /tmp \
|
||||
&& wget https://download.open-mpi.org/release/open-mpi/v3.1/openmpi-3.1.5.tar.gz \
|
||||
&& tar -xvf openmpi-3.1.5.tar.gz \
|
||||
&& cd /tmp/openmpi-3.1.5 \
|
||||
&& mkdir -p ${OMPI_ROOT_PATH} \
|
||||
&& ./configure --prefix=${OMPI_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -rf /tmp/openmpi-3.1.5 \
|
||||
&& rm -f /tmp/openmpi-3.1.5.tar.gz
|
||||
|
||||
# Install MindSpore cuda-10.1 whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.3.0-alpha/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.3.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,83 +0,0 @@
|
|||
FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV OMPI_ROOT_PATH /usr/local/openmpi-3.1.5
|
||||
ENV PATH ${OMPI_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
ENV LD_LIBRARY_PATH ${OMPI_ROOT_PATH}/lib:$LD_LIBRARY_PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex \
|
||||
libnccl2=2.4.8-1+cuda10.1 \
|
||||
libnccl-dev=2.4.8-1+cuda10.1
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install openmpi (v3.1.5)
|
||||
RUN cd /tmp \
|
||||
&& wget https://download.open-mpi.org/release/open-mpi/v3.1/openmpi-3.1.5.tar.gz \
|
||||
&& tar -xvf openmpi-3.1.5.tar.gz \
|
||||
&& cd /tmp/openmpi-3.1.5 \
|
||||
&& mkdir -p ${OMPI_ROOT_PATH} \
|
||||
&& ./configure --prefix=${OMPI_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -rf /tmp/openmpi-3.1.5 \
|
||||
&& rm -f /tmp/openmpi-3.1.5.tar.gz
|
||||
|
||||
# Install MindSpore cuda-10.1 whl package
|
||||
RUN pip install --no-cache-dir https://ms-release.obs.cn-north-4.myhuaweicloud.com/0.5.0-beta/MindSpore/gpu/ubuntu_x86/cuda-10.1/mindspore_gpu-0.5.0-cp37-cp37m-linux_x86_64.whl
|
|
@ -1,76 +0,0 @@
|
|||
FROM nvidia/cuda:10.1-cudnn7-devel-ubuntu18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV CMAKE_ROOT_PATH /usr/local/cmake-3.14.1
|
||||
ENV PATH ${CMAKE_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex
|
||||
|
||||
# Configure cuDNN (v7.6.5)
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libcudnn.so.7.6.5 /usr/local/cuda/lib64/libcudnn.so
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf \
|
||||
&& pip install --no-cache-dir wheel
|
||||
|
||||
# Install cmake (v3.14.1)
|
||||
RUN cd /tmp \
|
||||
&& wget https://github.com/Kitware/CMake/releases/download/v3.14.1/cmake-3.14.1-Linux-x86_64.sh \
|
||||
&& mkdir -p ${CMAKE_ROOT_PATH} \
|
||||
&& bash ./cmake-3.14.1-Linux-x86_64.sh --prefix=${CMAKE_ROOT_PATH} --exclude-subdir --skip-license \
|
||||
&& rm -f /tmp/cmake-3.14.1-Linux-x86_64.sh
|
|
@ -1,80 +0,0 @@
|
|||
FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04
|
||||
|
||||
MAINTAINER leonwanghui <leon.wanghui@huawei.com>
|
||||
|
||||
# Set env
|
||||
ENV PYTHON_ROOT_PATH /usr/local/python-3.7.5
|
||||
ENV OMPI_ROOT_PATH /usr/local/openmpi-3.1.5
|
||||
ENV PATH ${OMPI_ROOT_PATH}/bin:/usr/local/bin:$PATH
|
||||
ENV LD_LIBRARY_PATH ${OMPI_ROOT_PATH}/lib:$LD_LIBRARY_PATH
|
||||
|
||||
# Install base tools
|
||||
RUN apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
vim \
|
||||
wget \
|
||||
curl \
|
||||
xz-utils \
|
||||
net-tools \
|
||||
openssh-client \
|
||||
git \
|
||||
ntpdate \
|
||||
tzdata \
|
||||
tcl \
|
||||
sudo \
|
||||
bash-completion
|
||||
|
||||
# Install compile tools
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt install -y \
|
||||
gcc \
|
||||
g++ \
|
||||
zlibc \
|
||||
make \
|
||||
libgmp-dev \
|
||||
patch \
|
||||
autoconf \
|
||||
libtool \
|
||||
automake \
|
||||
flex \
|
||||
libnccl2=2.4.8-1+cuda10.1 \
|
||||
libnccl-dev=2.4.8-1+cuda10.1
|
||||
|
||||
# Set bash
|
||||
RUN echo "dash dash/sh boolean false" | debconf-set-selections
|
||||
RUN DEBIAN_FRONTEND=noninteractive dpkg-reconfigure dash
|
||||
|
||||
# Install python (v3.7.5)
|
||||
RUN apt install -y libffi-dev libssl-dev zlib1g-dev libbz2-dev libncurses5-dev \
|
||||
libgdbm-dev libgdbm-compat-dev liblzma-dev libreadline-dev libsqlite3-dev \
|
||||
&& cd /tmp \
|
||||
&& wget https://github.com/python/cpython/archive/v3.7.5.tar.gz \
|
||||
&& tar -xvf v3.7.5.tar.gz \
|
||||
&& cd /tmp/cpython-3.7.5 \
|
||||
&& mkdir -p ${PYTHON_ROOT_PATH} \
|
||||
&& ./configure --prefix=${PYTHON_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -f /usr/local/bin/python \
|
||||
&& rm -f /usr/local/bin/pip \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/python3.7 /usr/local/bin/python \
|
||||
&& ln -s ${PYTHON_ROOT_PATH}/bin/pip3.7 /usr/local/bin/pip \
|
||||
&& rm -rf /tmp/cpython-3.7.5 \
|
||||
&& rm -f /tmp/v3.7.5.tar.gz
|
||||
|
||||
# Set pip source
|
||||
RUN mkdir -pv /root/.pip \
|
||||
&& echo "[global]" > /root/.pip/pip.conf \
|
||||
&& echo "trusted-host=mirrors.aliyun.com" >> /root/.pip/pip.conf \
|
||||
&& echo "index-url=http://mirrors.aliyun.com/pypi/simple/" >> /root/.pip/pip.conf
|
||||
|
||||
# Install openmpi (v3.1.5)
|
||||
RUN cd /tmp \
|
||||
&& wget https://download.open-mpi.org/release/open-mpi/v3.1/openmpi-3.1.5.tar.gz \
|
||||
&& tar -xvf openmpi-3.1.5.tar.gz \
|
||||
&& cd /tmp/openmpi-3.1.5 \
|
||||
&& mkdir -p ${OMPI_ROOT_PATH} \
|
||||
&& ./configure --prefix=${OMPI_ROOT_PATH} \
|
||||
&& make -j4 \
|
||||
&& make install -j4 \
|
||||
&& rm -rf /tmp/openmpi-3.1.5 \
|
||||
&& rm -f /tmp/openmpi-3.1.5.tar.gz
|
Binary file not shown.
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 121 KiB |
|
@ -0,0 +1,55 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
"""
|
||||
network config setting, will be used in train.py
|
||||
"""
|
||||
|
||||
from easydict import EasyDict as edict
|
||||
import mindspore.common.dtype as mstype
|
||||
from mindspore.model_zoo.Bert_NEZHA import BertConfig
|
||||
bert_train_cfg = edict({
|
||||
'epoch_size': 10,
|
||||
'num_warmup_steps': 0,
|
||||
'start_learning_rate': 1e-4,
|
||||
'end_learning_rate': 0.0,
|
||||
'decay_steps': 1000,
|
||||
'power': 10.0,
|
||||
'save_checkpoint_steps': 2000,
|
||||
'keep_checkpoint_max': 10,
|
||||
'checkpoint_prefix': "checkpoint_bert",
|
||||
'DATA_DIR': "/your/path/examples.tfrecord",
|
||||
'SCHEMA_DIR': "/your/path/datasetSchema.json"
|
||||
})
|
||||
bert_net_cfg = BertConfig(
|
||||
batch_size=16,
|
||||
seq_length=128,
|
||||
vocab_size=21136,
|
||||
hidden_size=1024,
|
||||
num_hidden_layers=24,
|
||||
num_attention_heads=16,
|
||||
intermediate_size=4096,
|
||||
hidden_act="gelu",
|
||||
hidden_dropout_prob=0.0,
|
||||
attention_probs_dropout_prob=0.0,
|
||||
max_position_embeddings=512,
|
||||
type_vocab_size=2,
|
||||
initializer_range=0.02,
|
||||
use_relative_positions=True,
|
||||
input_mask_from_dataset=True,
|
||||
token_type_ids_from_dataset=True,
|
||||
dtype=mstype.float32,
|
||||
compute_type=mstype.float16,
|
||||
)
|
|
@ -0,0 +1,98 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
"""
|
||||
NEZHA (NEural contextualiZed representation for CHinese lAnguage understanding) is the Chinese pretrained language model currently based on BERT developed by Huawei.
|
||||
1. Prepare data
|
||||
Following the data preparation as in BERT, run command as below to get dataset for training:
|
||||
python ./create_pretraining_data.py \
|
||||
--input_file=./sample_text.txt \
|
||||
--output_file=./examples.tfrecord \
|
||||
--vocab_file=./your/path/vocab.txt \
|
||||
--do_lower_case=True \
|
||||
--max_seq_length=128 \
|
||||
--max_predictions_per_seq=20 \
|
||||
--masked_lm_prob=0.15 \
|
||||
--random_seed=12345 \
|
||||
--dupe_factor=5
|
||||
2. Pretrain
|
||||
First, prepare the distributed training environment, then adjust configurations in config.py, finally run train.py.
|
||||
"""
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import numpy as np
|
||||
from numpy import allclose
|
||||
from config import bert_train_cfg, bert_net_cfg
|
||||
import mindspore.common.dtype as mstype
|
||||
import mindspore.dataset.engine.datasets as de
|
||||
import mindspore._c_dataengine as deMap
|
||||
from mindspore import context
|
||||
from mindspore.common.tensor import Tensor
|
||||
from mindspore.train.model import Model
|
||||
from mindspore.train.callback import Callback, ModelCheckpoint, CheckpointConfig, LossMonitor
|
||||
from mindspore.model_zoo.Bert_NEZHA import BertConfig, BertNetworkWithLoss, BertTrainOneStepCell
|
||||
from mindspore.nn.optim import Lamb
|
||||
from mindspore import log as logger
|
||||
_current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
def create_train_dataset(batch_size):
|
||||
"""create train dataset"""
|
||||
# apply repeat operations
|
||||
repeat_count = bert_train_cfg.epoch_size
|
||||
ds = de.StorageDataset([bert_train_cfg.DATA_DIR], bert_train_cfg.SCHEMA_DIR, columns_list=["input_ids", "input_mask", "segment_ids",
|
||||
"next_sentence_labels", "masked_lm_positions",
|
||||
"masked_lm_ids", "masked_lm_weights"])
|
||||
type_cast_op = deMap.TypeCastOp("int32")
|
||||
ds = ds.map(input_columns="masked_lm_ids", operations=type_cast_op)
|
||||
ds = ds.map(input_columns="masked_lm_positions", operations=type_cast_op)
|
||||
ds = ds.map(input_columns="next_sentence_labels", operations=type_cast_op)
|
||||
ds = ds.map(input_columns="segment_ids", operations=type_cast_op)
|
||||
ds = ds.map(input_columns="input_mask", operations=type_cast_op)
|
||||
ds = ds.map(input_columns="input_ids", operations=type_cast_op)
|
||||
# apply batch operations
|
||||
ds = ds.batch(batch_size, drop_remainder=True)
|
||||
ds = ds.repeat(repeat_count)
|
||||
return ds
|
||||
|
||||
|
||||
def weight_variable(shape):
|
||||
"""weight variable"""
|
||||
np.random.seed(1)
|
||||
ones = np.random.uniform(-0.1, 0.1, size=shape).astype(np.float32)
|
||||
return Tensor(ones)
|
||||
|
||||
def train_bert():
|
||||
"""train bert"""
|
||||
context.set_context(mode=context.GRAPH_MODE)
|
||||
context.set_context(device_target="Ascend")
|
||||
context.set_context(enable_task_sink=True)
|
||||
context.set_context(enable_loop_sink=True)
|
||||
context.set_context(enable_mem_reuse=True)
|
||||
ds = create_train_dataset(bert_net_cfg.batch_size)
|
||||
netwithloss = BertNetworkWithLoss(bert_net_cfg, True)
|
||||
optimizer = Lamb(netwithloss.trainable_params(), decay_steps=bert_train_cfg.decay_steps,
|
||||
start_learning_rate=bert_train_cfg.start_learning_rate, end_learning_rate=bert_train_cfg.end_learning_rate,
|
||||
power=bert_train_cfg.power, warmup_steps=bert_train_cfg.num_warmup_steps, decay_filter=lambda x: False)
|
||||
netwithgrads = BertTrainOneStepCell(netwithloss, optimizer=optimizer)
|
||||
netwithgrads.set_train(True)
|
||||
model = Model(netwithgrads)
|
||||
config_ck = CheckpointConfig(save_checkpoint_steps=bert_train_cfg.save_checkpoint_steps,
|
||||
keep_checkpoint_max=bert_train_cfg.keep_checkpoint_max)
|
||||
ckpoint_cb = ModelCheckpoint(prefix=bert_train_cfg.checkpoint_prefix, config=config_ck)
|
||||
model.train(ds.get_repeat_count(), ds, callbacks=[LossMonitor(), ckpoint_cb], dataset_sink_mode=False)
|
||||
|
||||
if __name__ == '__main__':
|
||||
train_bert()
|
|
@ -0,0 +1,31 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
network config setting, will be used in main.py
|
||||
"""
|
||||
from easydict import EasyDict as edict
|
||||
|
||||
alexnet_cfg = edict({
|
||||
'num_classes': 10,
|
||||
'learning_rate': 0.002,
|
||||
'momentum': 0.9,
|
||||
'epoch_size': 1,
|
||||
'batch_size': 32,
|
||||
'buffer_size': 1000,
|
||||
'image_height': 227,
|
||||
'image_width': 227,
|
||||
'save_checkpoint_steps': 1562,
|
||||
'keep_checkpoint_max': 10,
|
||||
})
|
|
@ -0,0 +1,56 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
AlexNet example tutorial
|
||||
Usage:
|
||||
python alexnet.py
|
||||
with --device_target=GPU: After 20 epoch training, the accuracy is up to 80%
|
||||
"""
|
||||
|
||||
import mindspore.dataset as ds
|
||||
import mindspore.dataset.transforms.c_transforms as C
|
||||
import mindspore.dataset.transforms.vision.c_transforms as CV
|
||||
from config import alexnet_cfg as cfg
|
||||
from mindspore import context
|
||||
from mindspore.common import dtype as mstype
|
||||
|
||||
|
||||
def create_dataset(data_path, batch_size=32, repeat_size=1):
|
||||
"""
|
||||
create dataset for train or test
|
||||
"""
|
||||
cifar_ds = ds.Cifar10Dataset(data_path)
|
||||
rescale = 1.0 / 255.0
|
||||
shift = 0.0
|
||||
|
||||
resize_op = CV.Resize((cfg.image_height, cfg.image_width))
|
||||
rescale_op = CV.Rescale(rescale, shift)
|
||||
normalize_op = CV.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
|
||||
random_crop_op = CV.RandomCrop([32, 32], [4, 4, 4, 4])
|
||||
random_horizontal_op = CV.RandomHorizontalFlip()
|
||||
channel_swap_op = CV.HWC2CHW()
|
||||
typecast_op = C.TypeCast(mstype.int32)
|
||||
cifar_ds = cifar_ds.map(input_columns="label", operations=typecast_op)
|
||||
cifar_ds = cifar_ds.map(input_columns="image", operations=random_crop_op)
|
||||
cifar_ds = cifar_ds.map(input_columns="image", operations=random_horizontal_op)
|
||||
cifar_ds = cifar_ds.map(input_columns="image", operations=resize_op)
|
||||
cifar_ds = cifar_ds.map(input_columns="image", operations=rescale_op)
|
||||
cifar_ds = cifar_ds.map(input_columns="image", operations=normalize_op)
|
||||
cifar_ds = cifar_ds.map(input_columns="image", operations=channel_swap_op)
|
||||
|
||||
cifar_ds = cifar_ds.shuffle(buffer_size=cfg.buffer_size)
|
||||
cifar_ds = cifar_ds.repeat(repeat_size)
|
||||
cifar_ds = cifar_ds.batch(batch_size, drop_remainder=True)
|
||||
return cifar_ds
|
|
@ -0,0 +1,56 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
AlexNet example tutorial
|
||||
Usage:
|
||||
python alexnet.py
|
||||
with --device_target=GPU: After 20 epoch training, the accuracy is up to 80%
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import mindspore.nn as nn
|
||||
from config import alexnet_cfg as cfg
|
||||
from mindspore.model_zoo.alexnet import AlexNet
|
||||
from dataset import create_dataset
|
||||
from mindspore import context
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
from mindspore.train import Model
|
||||
from mindspore.nn.metrics import Accuracy
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='MindSpore AlexNet Example')
|
||||
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU'],
|
||||
help='device where the code will be implemented (default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default="./", help='path where the dataset is saved')
|
||||
parser.add_argument('--ckpt_path', type=str, default="./ckpt", help='if is test, must provide\
|
||||
path where the trained ckpt file')
|
||||
parser.add_argument('--dataset_sink_mode', type=bool, default=False, help='dataset_sink_mode is False or True')
|
||||
args = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target, enable_mem_reuse=False)
|
||||
|
||||
network = AlexNet(cfg.num_classes)
|
||||
loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
|
||||
repeat_size = cfg.epoch_size
|
||||
opt = nn.Momentum(network.trainable_params(), cfg.learning_rate, cfg.momentum)
|
||||
model = Model(network, loss, opt, metrics={"Accuracy": Accuracy()}) # test
|
||||
|
||||
print("============== Starting Testing ==============")
|
||||
param_dict = load_checkpoint(args.ckpt_path)
|
||||
load_param_into_net(network, param_dict)
|
||||
ds_eval = create_dataset(args.data_path)
|
||||
acc = model.eval(ds_eval, dataset_sink_mode=args.dataset_sink_mode)
|
||||
print("============== Accuracy:{} ==============".format(acc))
|
|
@ -0,0 +1,59 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
AlexNet example tutorial
|
||||
Usage:
|
||||
python alexnet.py
|
||||
with --device_target=GPU: After 20 epoch training, the accuracy is up to 80%
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import mindspore.nn as nn
|
||||
from config import alexnet_cfg as cfg
|
||||
from mindspore.model_zoo.alexnet import AlexNet
|
||||
from dataset import create_dataset
|
||||
from mindspore import context
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
|
||||
from mindspore.train import Model
|
||||
from mindspore.nn.metrics import Accuracy
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='MindSpore AlexNet Example')
|
||||
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU'],
|
||||
help='device where the code will be implemented (default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default="./", help='path where the dataset is saved')
|
||||
parser.add_argument('--ckpt_path', type=str, default="./ckpt", help='if is test, must provide\
|
||||
path where the trained ckpt file')
|
||||
parser.add_argument('--dataset_sink_mode', type=bool, default=False, help='dataset_sink_mode is False or True')
|
||||
args = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target, enable_mem_reuse=False)
|
||||
|
||||
network = AlexNet(cfg.num_classes)
|
||||
loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
|
||||
repeat_size = cfg.epoch_size
|
||||
opt = nn.Momentum(network.trainable_params(), cfg.learning_rate, cfg.momentum)
|
||||
model = Model(network, loss, opt, metrics={"Accuracy": Accuracy()}) # test
|
||||
|
||||
print("============== Starting Training ==============")
|
||||
ds_train = create_dataset(args.data_path,
|
||||
cfg.batch_size,
|
||||
repeat_size)
|
||||
config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps,
|
||||
keep_checkpoint_max=cfg.keep_checkpoint_max)
|
||||
ckpoint_cb = ModelCheckpoint(prefix="checkpoint_alexnet", directory=args.ckpt_path, config=config_ck)
|
||||
model.train(cfg.epoch_size, ds_train, callbacks=[ckpoint_cb, LossMonitor()],
|
||||
dataset_sink_mode=args.dataset_sink_mode)
|
|
@ -0,0 +1,31 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
network config setting, will be used in main.py
|
||||
"""
|
||||
from easydict import EasyDict as edict
|
||||
|
||||
mnist_cfg = edict({
|
||||
'num_classes': 10,
|
||||
'lr': 0.01,
|
||||
'momentum': 0.9,
|
||||
'epoch_size': 1,
|
||||
'batch_size': 32,
|
||||
'buffer_size': 1000,
|
||||
'image_height': 32,
|
||||
'image_width': 32,
|
||||
'save_checkpoint_steps': 1875,
|
||||
'keep_checkpoint_max': 10,
|
||||
})
|
|
@ -0,0 +1,59 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
Produce the dataset
|
||||
"""
|
||||
import mindspore.dataset as ds
|
||||
import mindspore.dataset.transforms.vision.c_transforms as CV
|
||||
import mindspore.dataset.transforms.c_transforms as C
|
||||
from mindspore.dataset.transforms.vision import Inter
|
||||
from mindspore.common import dtype as mstype
|
||||
|
||||
|
||||
def create_dataset(data_path, batch_size=32, repeat_size=1,
|
||||
num_parallel_workers=1):
|
||||
"""
|
||||
create dataset for train or test
|
||||
"""
|
||||
# define dataset
|
||||
mnist_ds = ds.MnistDataset(data_path)
|
||||
|
||||
resize_height, resize_width = 32, 32
|
||||
rescale = 1.0 / 255.0
|
||||
shift = 0.0
|
||||
rescale_nml = 1 / 0.3081
|
||||
shift_nml = -1 * 0.1307 / 0.3081
|
||||
|
||||
# define map operations
|
||||
resize_op = CV.Resize((resize_height, resize_width), interpolation=Inter.LINEAR) # Bilinear mode
|
||||
rescale_nml_op = CV.Rescale(rescale_nml, shift_nml)
|
||||
rescale_op = CV.Rescale(rescale, shift)
|
||||
hwc2chw_op = CV.HWC2CHW()
|
||||
type_cast_op = C.TypeCast(mstype.int32)
|
||||
|
||||
# apply map operations on images
|
||||
mnist_ds = mnist_ds.map(input_columns="label", operations=type_cast_op, num_parallel_workers=num_parallel_workers)
|
||||
mnist_ds = mnist_ds.map(input_columns="image", operations=resize_op, num_parallel_workers=num_parallel_workers)
|
||||
mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_op, num_parallel_workers=num_parallel_workers)
|
||||
mnist_ds = mnist_ds.map(input_columns="image", operations=rescale_nml_op, num_parallel_workers=num_parallel_workers)
|
||||
mnist_ds = mnist_ds.map(input_columns="image", operations=hwc2chw_op, num_parallel_workers=num_parallel_workers)
|
||||
|
||||
# apply DatasetOps
|
||||
buffer_size = 10000
|
||||
mnist_ds = mnist_ds.shuffle(buffer_size=buffer_size) # 10000 as in LeNet train script
|
||||
mnist_ds = mnist_ds.batch(batch_size, drop_remainder=True)
|
||||
mnist_ds = mnist_ds.repeat(repeat_size)
|
||||
|
||||
return mnist_ds
|
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
######################## train and test lenet example ########################
|
||||
test lenet according to model file:
|
||||
python main.py --data_path /YourDataPath --ckpt_path Your.ckpt
|
||||
"""
|
||||
import os
|
||||
import argparse
|
||||
import mindspore.nn as nn
|
||||
from dataset import create_dataset
|
||||
from config import mnist_cfg as cfg
|
||||
from mindspore.model_zoo.lenet import LeNet5
|
||||
from mindspore import context, Tensor
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig
|
||||
from mindspore.train import Model
|
||||
from mindspore.nn.metrics import Accuracy
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='MindSpore MNIST Example')
|
||||
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
|
||||
help='device where the code will be implemented (default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default="./MNIST_Data",
|
||||
help='path where the dataset is saved')
|
||||
parser.add_argument('--ckpt_path', type=str, default="", help='if mode is test, must provide\
|
||||
path where the trained ckpt file')
|
||||
parser.add_argument('--dataset_sink_mode', type=bool, default=False, help='dataset_sink_mode is False or True')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target, enable_mem_reuse=False)
|
||||
|
||||
network = LeNet5(cfg.num_classes)
|
||||
net_loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
|
||||
repeat_size = cfg.epoch_size
|
||||
net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum)
|
||||
config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps,
|
||||
keep_checkpoint_max=cfg.keep_checkpoint_max)
|
||||
ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", config=config_ck)
|
||||
model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()})
|
||||
|
||||
print("============== Starting Testing ==============")
|
||||
param_dict = load_checkpoint(args.ckpt_path)
|
||||
load_param_into_net(network, param_dict)
|
||||
ds_eval = create_dataset(os.path.join(args.data_path, "test"), 32, 1)
|
||||
acc = model.eval(ds_eval, dataset_sink_mode=args.dataset_sink_mode)
|
||||
print("============== Accuracy:{} ==============".format(acc))
|
|
@ -0,0 +1,57 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
######################## train lenet example ########################
|
||||
train lenet and get network model files(.ckpt) :
|
||||
python main.py --data_path /YourDataPath
|
||||
"""
|
||||
import os
|
||||
import argparse
|
||||
import mindspore.nn as nn
|
||||
from config import mnist_cfg as cfg
|
||||
from dataset import create_dataset
|
||||
from mindspore.model_zoo.lenet import LeNet5
|
||||
from mindspore import context
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
|
||||
from mindspore.train import Model
|
||||
from mindspore.nn.metrics import Accuracy
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='MindSpore MNIST Example')
|
||||
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
|
||||
help='device where the code will be implemented (default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default="./MNIST_Data",
|
||||
help='path where the dataset is saved')
|
||||
parser.add_argument('--dataset_sink_mode', type=bool, default=False, help='dataset_sink_mode is False or True')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=args.device_target, enable_mem_reuse=False)
|
||||
|
||||
network = LeNet5(cfg.num_classes)
|
||||
net_loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True, reduction="mean")
|
||||
repeat_size = cfg.epoch_size
|
||||
net_opt = nn.Momentum(network.trainable_params(), cfg.lr, cfg.momentum)
|
||||
config_ck = CheckpointConfig(save_checkpoint_steps=cfg.save_checkpoint_steps,
|
||||
keep_checkpoint_max=cfg.keep_checkpoint_max)
|
||||
ckpoint_cb = ModelCheckpoint(prefix="checkpoint_lenet", config=config_ck)
|
||||
model = Model(network, net_loss, net_opt, metrics={"Accuracy": Accuracy()})
|
||||
|
||||
ds_train = create_dataset(os.path.join(args.data_path, "train"), batch_size=cfg.batch_size,
|
||||
repeat_size=repeat_size)
|
||||
print("============== Starting Training ==============")
|
||||
model.train(cfg['epoch_size'], ds_train, callbacks=[ckpoint_cb, LossMonitor()],
|
||||
dataset_sink_mode=args.dataset_sink_mode)
|
|
@ -0,0 +1,39 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
network config setting, will be used in train.py and eval.py
|
||||
"""
|
||||
from easydict import EasyDict as ed
|
||||
|
||||
config = ed({
|
||||
"class_num": 10,
|
||||
"batch_size": 32,
|
||||
"loss_scale": 1024,
|
||||
"momentum": 0.9,
|
||||
"weight_decay": 1e-4,
|
||||
"epoch_size": 90,
|
||||
"buffer_size": 100,
|
||||
"image_height": 224,
|
||||
"image_width": 224,
|
||||
"save_checkpoint": True,
|
||||
"save_checkpoint_steps": 195,
|
||||
"keep_checkpoint_max": 10,
|
||||
"save_checkpoint_path": "./",
|
||||
"lr_init": 0.01,
|
||||
"lr_end": 0.00001,
|
||||
"lr_max": 0.1,
|
||||
"warmup_epochs": 5,
|
||||
"lr_decay_mode": "poly"
|
||||
})
|
|
@ -0,0 +1,83 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
create train or eval dataset.
|
||||
"""
|
||||
import os
|
||||
import mindspore.common.dtype as mstype
|
||||
import mindspore.dataset.engine as de
|
||||
import mindspore.dataset.transforms.vision.c_transforms as C
|
||||
import mindspore.dataset.transforms.c_transforms as C2
|
||||
from config import config
|
||||
|
||||
|
||||
def create_dataset(dataset_path, do_train, repeat_num=1, batch_size=32):
|
||||
"""
|
||||
create a train or eval dataset
|
||||
|
||||
Args:
|
||||
dataset_path(string): the path of dataset.
|
||||
do_train(bool): whether dataset is used for train or eval.
|
||||
repeat_num(int): the repeat times of dataset. Default: 1
|
||||
batch_size(int): the batch size of dataset. Default: 32
|
||||
|
||||
Returns:
|
||||
dataset
|
||||
"""
|
||||
device_num = int(os.getenv("DEVICE_NUM"))
|
||||
rank_id = int(os.getenv("RANK_ID"))
|
||||
|
||||
if device_num == 1:
|
||||
ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=4, shuffle=True)
|
||||
else:
|
||||
ds = de.Cifar10Dataset(dataset_path, num_parallel_workers=4, shuffle=True,
|
||||
num_shards=device_num, shard_id=rank_id)
|
||||
|
||||
resize_height = config.image_height
|
||||
resize_width = config.image_width
|
||||
rescale = 1.0 / 255.0
|
||||
shift = 0.0
|
||||
|
||||
# define map operations
|
||||
random_crop_op = C.RandomCrop((32, 32), (4, 4, 4, 4))
|
||||
random_horizontal_flip_op = C.RandomHorizontalFlip(rank_id / (rank_id + 1))
|
||||
|
||||
resize_op = C.Resize((resize_height, resize_width))
|
||||
rescale_op = C.Rescale(rescale, shift)
|
||||
normalize_op = C.Normalize([0.4914, 0.4822, 0.4465], [0.2023, 0.1994, 0.2010])
|
||||
|
||||
change_swap_op = C.HWC2CHW()
|
||||
|
||||
trans = []
|
||||
if do_train:
|
||||
trans += [random_crop_op, random_horizontal_flip_op]
|
||||
|
||||
trans += [resize_op, rescale_op, normalize_op, change_swap_op]
|
||||
|
||||
type_cast_op = C2.TypeCast(mstype.int32)
|
||||
|
||||
ds = ds.map(input_columns="label", operations=type_cast_op)
|
||||
ds = ds.map(input_columns="image", operations=trans)
|
||||
|
||||
# apply shuffle operations
|
||||
ds = ds.shuffle(buffer_size=config.buffer_size)
|
||||
|
||||
# apply batch operations
|
||||
ds = ds.batch(batch_size, drop_remainder=True)
|
||||
|
||||
# apply dataset repeat operation
|
||||
ds = ds.repeat(repeat_num)
|
||||
|
||||
return ds
|
|
@ -0,0 +1,81 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
eval.
|
||||
"""
|
||||
import os
|
||||
import argparse
|
||||
import random
|
||||
import numpy as np
|
||||
from dataset import create_dataset
|
||||
from config import config
|
||||
from mindspore import context
|
||||
from mindspore.model_zoo.resnet import resnet50
|
||||
from mindspore.parallel._auto_parallel_context import auto_parallel_context
|
||||
from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits
|
||||
from mindspore.train.model import Model, ParallelMode
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
import mindspore.dataset.engine as de
|
||||
from mindspore.communication.management import init
|
||||
|
||||
random.seed(1)
|
||||
np.random.seed(1)
|
||||
de.config.set_seed(1)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Image classification')
|
||||
parser.add_argument('--run_distribute', type=bool, default=False, help='Run distribute')
|
||||
parser.add_argument('--device_num', type=int, default=1, help='Device num.')
|
||||
parser.add_argument('--do_train', type=bool, default=False, help='Do train or not.')
|
||||
parser.add_argument('--do_eval', type=bool, default=True, help='Do eval or not.')
|
||||
parser.add_argument('--checkpoint_path', type=str, default=None, help='Checkpoint file path')
|
||||
parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path')
|
||||
args_opt = parser.parse_args()
|
||||
|
||||
device_id = int(os.getenv('DEVICE_ID'))
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=False)
|
||||
context.set_context(enable_task_sink=True, device_id=device_id)
|
||||
context.set_context(enable_loop_sink=True)
|
||||
context.set_context(enable_mem_reuse=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
if args_opt.do_eval:
|
||||
context.set_context(enable_hccl=False)
|
||||
else:
|
||||
if args_opt.run_distribute:
|
||||
context.set_context(enable_hccl=True)
|
||||
context.set_auto_parallel_context(device_num=args_opt.device_num, parallel_mode=ParallelMode.DATA_PARALLEL,
|
||||
mirror_mean=True)
|
||||
auto_parallel_context().set_all_reduce_fusion_split_indices([140])
|
||||
init()
|
||||
else:
|
||||
context.set_context(enable_hccl=False)
|
||||
|
||||
epoch_size = config.epoch_size
|
||||
net = resnet50(class_num=config.class_num)
|
||||
loss = SoftmaxCrossEntropyWithLogits(sparse=True)
|
||||
|
||||
if args_opt.do_eval:
|
||||
dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=False, batch_size=config.batch_size)
|
||||
step_size = dataset.get_dataset_size()
|
||||
|
||||
if args_opt.checkpoint_path:
|
||||
param_dict = load_checkpoint(args_opt.checkpoint_path)
|
||||
load_param_into_net(net, param_dict)
|
||||
net.set_train(False)
|
||||
|
||||
model = Model(net, loss_fn=loss, metrics={'acc'})
|
||||
res = model.eval(dataset)
|
||||
print("result:", res, "ckpt=", args_opt.checkpoint_path)
|
|
@ -0,0 +1,77 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""learning rate generator"""
|
||||
import numpy as np
|
||||
|
||||
|
||||
def get_lr(global_step, lr_init, lr_end, lr_max, warmup_epochs, total_epochs, steps_per_epoch, lr_decay_mode):
|
||||
"""
|
||||
generate learning rate array
|
||||
|
||||
Args:
|
||||
global_step(int): total steps of the training
|
||||
lr_init(float): init learning rate
|
||||
lr_end(float): end learning rate
|
||||
lr_max(float): max learning rate
|
||||
warmup_epochs(int): number of warmup epochs
|
||||
total_epochs(int): total epoch of training
|
||||
steps_per_epoch(int): steps of one epoch
|
||||
lr_decay_mode(string): learning rate decay mode, including steps, poly or default
|
||||
|
||||
Returns:
|
||||
np.array, learning rate array
|
||||
"""
|
||||
lr_each_step = []
|
||||
total_steps = steps_per_epoch * total_epochs
|
||||
warmup_steps = steps_per_epoch * warmup_epochs
|
||||
if lr_decay_mode == 'steps':
|
||||
decay_epoch_index = [0.3 * total_steps, 0.6 * total_steps, 0.8 * total_steps]
|
||||
for i in range(total_steps):
|
||||
if i < decay_epoch_index[0]:
|
||||
lr = lr_max
|
||||
elif i < decay_epoch_index[1]:
|
||||
lr = lr_max * 0.1
|
||||
elif i < decay_epoch_index[2]:
|
||||
lr = lr_max * 0.01
|
||||
else:
|
||||
lr = lr_max * 0.001
|
||||
lr_each_step.append(lr)
|
||||
elif lr_decay_mode == 'poly':
|
||||
if warmup_steps != 0:
|
||||
inc_each_step = (float(lr_max) - float(lr_init)) / float(warmup_steps)
|
||||
else:
|
||||
inc_each_step = 0
|
||||
for i in range(total_steps):
|
||||
if i < warmup_steps:
|
||||
lr = float(lr_init) + inc_each_step * float(i)
|
||||
else:
|
||||
base = (1.0 - (float(i) - float(warmup_steps)) / (float(total_steps) - float(warmup_steps)))
|
||||
lr = float(lr_max) * base * base
|
||||
if lr < 0.0:
|
||||
lr = 0.0
|
||||
lr_each_step.append(lr)
|
||||
else:
|
||||
for i in range(total_steps):
|
||||
if i < warmup_steps:
|
||||
lr = lr_init + (lr_max - lr_init) * i / warmup_steps
|
||||
else:
|
||||
lr = lr_max - (lr_max - lr_end) * (i - warmup_steps) / (total_steps - warmup_steps)
|
||||
lr_each_step.append(lr)
|
||||
|
||||
current_step = global_step
|
||||
lr_each_step = np.array(lr_each_step).astype(np.float32)
|
||||
learning_rate = lr_each_step[current_step:]
|
||||
|
||||
return learning_rate
|
|
@ -0,0 +1,53 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
if [ $# != 2 ]
|
||||
then
|
||||
echo "Usage: sh run_distribute_train.sh [MINDSPORE_HCCL_CONFIG_PATH] [DATASET_PATH]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f $1 ]
|
||||
then
|
||||
echo "error: DMINDSPORE_HCCL_CONFIG_PATH=$1 is not a file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $2 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$2 is not a directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
export DEVICE_NUM=8
|
||||
export RANK_SIZE=8
|
||||
export MINDSPORE_HCCL_CONFIG_PATH=$1
|
||||
|
||||
for((i=0; i<${DEVICE_NUM}; i++))
|
||||
do
|
||||
export DEVICE_ID=$i
|
||||
export RANK_ID=$i
|
||||
rm -rf ./train_parallel$i
|
||||
mkdir ./train_parallel$i
|
||||
cp *.py ./train_parallel$i
|
||||
cp *.sh ./train_parallel$i
|
||||
cd ./train_parallel$i || exit
|
||||
echo "start training for rank $RANK_ID, device $DEVICE_ID"
|
||||
env > env.log
|
||||
python train.py --do_train=True --run_distribute=True --device_num=$DEVICE_NUM --dataset_path=$2 &> log &
|
||||
cd ..
|
||||
done
|
|
@ -0,0 +1,52 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
if [ $# != 2 ]
|
||||
then
|
||||
echo "Usage: sh run_infer.sh [DATASET_PATH] [CHECKPOINT_PATH]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $1 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$1 is not a directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f $2 ]
|
||||
then
|
||||
echo "error: CHECKPOINT_PATH=$2 is not a file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
export DEVICE_NUM=1
|
||||
export DEVICE_ID=0
|
||||
export RANK_SIZE=$DEVICE_NUM
|
||||
export RANK_ID=0
|
||||
|
||||
if [ -d "infer" ];
|
||||
then
|
||||
rm -rf ./infer
|
||||
fi
|
||||
mkdir ./infer
|
||||
cp *.py ./infer
|
||||
cp *.sh ./infer
|
||||
cd ./infer || exit
|
||||
env > env.log
|
||||
echo "start infering for device $DEVICE_ID"
|
||||
python eval.py --do_eval=True --dataset_path=$1 --checkpoint_path=$2 &> log &
|
||||
cd ..
|
|
@ -0,0 +1,45 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
if [ $# != 1 ]
|
||||
then
|
||||
echo "Usage: sh run_standalone_train.sh [DATASET_PATH]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -d $1 ]
|
||||
then
|
||||
echo "error: DATASET_PATH=$1 is not a directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ulimit -u unlimited
|
||||
export DEVICE_NUM=1
|
||||
export DEVICE_ID=0
|
||||
export RANK_ID=0
|
||||
|
||||
if [ -d "train" ];
|
||||
then
|
||||
rm -rf ./train
|
||||
fi
|
||||
mkdir ./train
|
||||
cp *.py ./train
|
||||
cp *.sh ./train
|
||||
cd ./train || exit
|
||||
echo "start training for device $DEVICE_ID"
|
||||
env > env.log
|
||||
python train.py --do_train=True --dataset_path=$1 &> log &
|
||||
cd ..
|
|
@ -0,0 +1,96 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""train_imagenet."""
|
||||
import os
|
||||
import argparse
|
||||
import random
|
||||
import numpy as np
|
||||
from dataset import create_dataset
|
||||
from lr_generator import get_lr
|
||||
from config import config
|
||||
from mindspore import context
|
||||
from mindspore import Tensor
|
||||
from mindspore.model_zoo.resnet import resnet50
|
||||
from mindspore.parallel._auto_parallel_context import auto_parallel_context
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits
|
||||
|
||||
from mindspore.train.model import Model, ParallelMode
|
||||
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
|
||||
from mindspore.train.loss_scale_manager import FixedLossScaleManager
|
||||
import mindspore.dataset.engine as de
|
||||
from mindspore.communication.management import init
|
||||
|
||||
random.seed(1)
|
||||
np.random.seed(1)
|
||||
de.config.set_seed(1)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Image classification')
|
||||
parser.add_argument('--run_distribute', type=bool, default=False, help='Run distribute')
|
||||
parser.add_argument('--device_num', type=int, default=1, help='Device num.')
|
||||
parser.add_argument('--do_train', type=bool, default=True, help='Do train or not.')
|
||||
parser.add_argument('--do_eval', type=bool, default=False, help='Do eval or not.')
|
||||
parser.add_argument('--dataset_path', type=str, default=None, help='Dataset path')
|
||||
args_opt = parser.parse_args()
|
||||
|
||||
device_id = int(os.getenv('DEVICE_ID'))
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", save_graphs=False)
|
||||
context.set_context(enable_task_sink=True, device_id=device_id)
|
||||
context.set_context(enable_loop_sink=True)
|
||||
context.set_context(enable_mem_reuse=True)
|
||||
|
||||
if __name__ == '__main__':
|
||||
if args_opt.do_eval:
|
||||
context.set_context(enable_hccl=False)
|
||||
else:
|
||||
if args_opt.run_distribute:
|
||||
context.set_context(enable_hccl=True)
|
||||
context.set_auto_parallel_context(device_num=args_opt.device_num, parallel_mode=ParallelMode.DATA_PARALLEL,
|
||||
mirror_mean=True)
|
||||
auto_parallel_context().set_all_reduce_fusion_split_indices([140])
|
||||
init()
|
||||
else:
|
||||
context.set_context(enable_hccl=False)
|
||||
|
||||
epoch_size = config.epoch_size
|
||||
net = resnet50(class_num=config.class_num)
|
||||
loss = SoftmaxCrossEntropyWithLogits(sparse=True)
|
||||
|
||||
|
||||
if args_opt.do_train:
|
||||
dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=True,
|
||||
repeat_num=epoch_size, batch_size=config.batch_size)
|
||||
step_size = dataset.get_dataset_size()
|
||||
|
||||
loss_scale = FixedLossScaleManager(config.loss_scale, drop_overflow_update=False)
|
||||
lr = Tensor(get_lr(global_step=0, lr_init=config.lr_init, lr_end=config.lr_end, lr_max=config.lr_max,
|
||||
warmup_epochs=config.warmup_epochs, total_epochs=epoch_size, steps_per_epoch=step_size,
|
||||
lr_decay_mode='poly'))
|
||||
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), lr, config.momentum,
|
||||
config.weight_decay, config.loss_scale)
|
||||
|
||||
model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'})
|
||||
|
||||
time_cb = TimeMonitor(data_size=step_size)
|
||||
loss_cb = LossMonitor()
|
||||
cb = [time_cb, loss_cb]
|
||||
if config.save_checkpoint:
|
||||
config_ck = CheckpointConfig(save_checkpoint_steps=config.save_checkpoint_steps,
|
||||
keep_checkpoint_max=config.keep_checkpoint_max)
|
||||
ckpt_cb = ModelCheckpoint(prefix="resnet", directory=config.save_checkpoint_path, config=config_ck)
|
||||
cb += [ckpt_cb]
|
||||
model.train(epoch_size, dataset, callbacks=cb)
|
|
@ -0,0 +1,30 @@
|
|||
# Copyright 2019 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
network config setting, will be used in train.py and eval.py
|
||||
"""
|
||||
from easydict import EasyDict as edict
|
||||
cifar_cfg = edict({
|
||||
"num_classes": 10,
|
||||
"lr_init": 0.05,
|
||||
"batch_size": 64,
|
||||
"epoch_size": 70,
|
||||
"momentum": 0.9,
|
||||
"weight_decay": 5e-4,
|
||||
"buffer_size": 10,
|
||||
"image_height": 224,
|
||||
"image_width": 224,
|
||||
"keep_checkpoint_max": 10,
|
||||
})
|
|
@ -0,0 +1,63 @@
|
|||
# Copyright 2019 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
Data operations, will be used in train.py and eval.py
|
||||
"""
|
||||
import mindspore.dataset as ds
|
||||
import mindspore.dataset.transforms.c_transforms as C
|
||||
import mindspore.dataset.transforms.vision.c_transforms as vision
|
||||
import mindspore.common.dtype as mstype
|
||||
from config import cifar_cfg as cfg
|
||||
|
||||
def create_dataset(data_home, repeat_num=1, training=True):
|
||||
ds.config.set_seed(1)
|
||||
data_dir = data_home + "/cifar-10-batches-bin"
|
||||
if not training:
|
||||
data_dir = data_home + "/cifar-10-verify-bin"
|
||||
data_set = ds.Cifar10Dataset(data_dir)
|
||||
resize_height = cfg.image_height
|
||||
resize_width = cfg.image_width
|
||||
rescale = 1.0 / 255.0
|
||||
shift = 0.0
|
||||
|
||||
# define map operations
|
||||
random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT
|
||||
random_horizontal_op = vision.RandomHorizontalFlip()
|
||||
resize_op = vision.Resize((resize_height, resize_width)) # interpolation default BILINEAR
|
||||
rescale_op = vision.Rescale(rescale, shift)
|
||||
normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023))
|
||||
changeswap_op = vision.HWC2CHW()
|
||||
type_cast_op = C.TypeCast(mstype.int32)
|
||||
|
||||
c_trans = []
|
||||
if training:
|
||||
c_trans = [random_crop_op, random_horizontal_op]
|
||||
c_trans += [resize_op, rescale_op, normalize_op,
|
||||
changeswap_op]
|
||||
|
||||
# apply map operations on images
|
||||
data_set = data_set.map(input_columns="label", operations=type_cast_op)
|
||||
data_set = data_set.map(input_columns="image", operations=c_trans)
|
||||
|
||||
# apply repeat operations
|
||||
data_set = data_set.repeat(repeat_num)
|
||||
|
||||
# apply shuffle operations
|
||||
data_set = data_set.shuffle(buffer_size=10)
|
||||
|
||||
# apply batch operations
|
||||
data_set = data_set.batch(batch_size=cfg.batch_size, drop_remainder=True)
|
||||
|
||||
return data_set
|
|
@ -0,0 +1,54 @@
|
|||
# Copyright 2019 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
##############test vgg16 example on cifar10#################
|
||||
python eval.py --data_path=$DATA_HOME --device_id=$DEVICE_ID
|
||||
"""
|
||||
import mindspore.nn as nn
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.train.model import Model, ParallelMode
|
||||
from mindspore import context
|
||||
import argparse
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
import dataset
|
||||
from mindspore.model_zoo.vgg import vgg16
|
||||
from config import cifar_cfg as cfg
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Cifar10 classification')
|
||||
parser.add_argument('--device_target', type=str, default='Ascend', choices=['Ascend', 'GPU', 'CPU'],
|
||||
help='device where the code will be implemented. (Default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default='./cifar', help='path where the dataset is saved')
|
||||
parser.add_argument('--checkpoint_path', type=str, default=None, help='checkpoint file path.')
|
||||
parser.add_argument('--device_id', type=int, default=None, help='device id of GPU or Ascend. (Default: None)')
|
||||
args_opt = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target)
|
||||
if args_opt.device_target != 'CPU' and args_opt.device_id:
|
||||
context.set_context(device_id=args_opt.device_id)
|
||||
context.set_context(enable_task_sink=True, enable_loop_sink=True)
|
||||
context.set_context(enable_mem_reuse=True, enable_hccl=False)
|
||||
|
||||
net = vgg16(batch_size=cfg.batch_size, num_classes=cfg.num_classes)
|
||||
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), 0.01, cfg.momentum,
|
||||
weight_decay=cfg.weight_decay)
|
||||
model = Model(net, loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False), optimizer=opt, metrics={'acc'})
|
||||
|
||||
param_dict = load_checkpoint(args_opt.checkpoint_path)
|
||||
load_param_into_net(net, param_dict)
|
||||
net.set_train(False)
|
||||
dataset = dataset.create_dataset(args_opt.data_path, 1, training=False)
|
||||
res = model.eval(dataset)
|
||||
print("result: ", res)
|
|
@ -0,0 +1,80 @@
|
|||
# Copyright 2019 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
"""
|
||||
#################train vgg16 example on cifar10########################
|
||||
python train.py --data_path=$DATA_HOME --device_id=$DEVICE_ID
|
||||
"""
|
||||
import mindspore.nn as nn
|
||||
from mindspore import Tensor
|
||||
from mindspore.nn.optim.momentum import Momentum
|
||||
from mindspore.train.model import Model, ParallelMode
|
||||
from mindspore import context
|
||||
import numpy as np
|
||||
import argparse
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
|
||||
import dataset
|
||||
from mindspore.model_zoo.vgg import vgg16
|
||||
from config import cifar_cfg as cfg
|
||||
import random
|
||||
random.seed(1)
|
||||
np.random.seed(1)
|
||||
|
||||
def lr_steps(global_step, lr_max=None, total_epochs=None, steps_per_epoch=None):
|
||||
lr_each_step = []
|
||||
total_steps = steps_per_epoch * total_epochs
|
||||
decay_epoch_index = [0.3 * total_steps, 0.6 * total_steps, 0.8 * total_steps]
|
||||
for i in range(total_steps):
|
||||
if i < decay_epoch_index[0]:
|
||||
lr = lr_max
|
||||
elif i < decay_epoch_index[1]:
|
||||
lr = lr_max * 0.1
|
||||
elif i < decay_epoch_index[2]:
|
||||
lr = lr_max * 0.01
|
||||
else:
|
||||
lr = lr_max * 0.001
|
||||
lr_each_step.append(lr)
|
||||
current_step = global_step
|
||||
lr_each_step = np.array(lr_each_step).astype(np.float32)
|
||||
learning_rate = lr_each_step[current_step:]
|
||||
|
||||
return learning_rate
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Cifar10 classification')
|
||||
parser.add_argument('--device_target', type=str, default='Ascend', choices=['Ascend', 'GPU', 'CPU'],
|
||||
help='device where the code will be implemented. (Default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default='./cifar', help='path where the dataset is saved')
|
||||
parser.add_argument('--device_id', type=int, default=None, help='device id of GPU or Ascend. (Default: None)')
|
||||
args_opt = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.device_target)
|
||||
if args_opt.device_target != 'CPU' and args_opt.device_id:
|
||||
context.set_context(device_id=args_opt.device_id)
|
||||
context.set_context(enable_task_sink = True, enable_loop_sink = True)
|
||||
context.set_context(enable_mem_reuse=True, enable_hccl=False)
|
||||
|
||||
net = vgg16(batch_size=cfg.batch_size, num_classes=cfg.num_classes)
|
||||
lr = lr_steps(0, lr_max=cfg.lr_init, total_epochs=cfg.epoch_size, steps_per_epoch=50000 // cfg.batch_size)
|
||||
opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), Tensor(lr), cfg.momentum, weight_decay=cfg.weight_decay)
|
||||
|
||||
model = Model(net, loss_fn=nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean', is_grad=False), optimizer=opt, metrics={'acc'})
|
||||
|
||||
dataset = dataset.create_dataset(args_opt.data_path, cfg.epoch_size)
|
||||
batch_num = dataset.get_dataset_size()
|
||||
config_ck = CheckpointConfig(save_checkpoint_steps=batch_num * 5, keep_checkpoint_max=cfg.keep_checkpoint_max)
|
||||
ckpoint_cb = ModelCheckpoint(prefix="train_vgg_cifar10", directory="./", config=config_ck)
|
||||
loss_cb = LossMonitor()
|
||||
model.train(cfg.epoch_size, dataset, callbacks=[ckpoint_cb, loss_cb])
|
|
@ -0,0 +1,46 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
"""Config parameters for YOLOv3 models."""
|
||||
|
||||
|
||||
class ConfigYOLOV3ResNet18:
|
||||
"""
|
||||
Config parameters for YOLOv3.
|
||||
|
||||
Examples:
|
||||
ConfigYoloV3ResNet18.
|
||||
"""
|
||||
img_shape = [352, 640]
|
||||
feature_shape = [32, 3, 352, 640]
|
||||
num_classes = 80
|
||||
|
||||
backbone_input_shape = [64, 64, 128, 256]
|
||||
backbone_shape = [64, 128, 256, 512]
|
||||
backbone_layers = [2, 2, 2, 2]
|
||||
backbone_stride = [1, 2, 2, 2]
|
||||
|
||||
ignore_threshold = 0.5
|
||||
|
||||
anchor_scales = [(10, 13),
|
||||
(16, 30),
|
||||
(33, 23),
|
||||
(30, 61),
|
||||
(62, 45),
|
||||
(59, 119),
|
||||
(116, 90),
|
||||
(156, 198),
|
||||
(163, 326)]
|
||||
out_channel = int(len(anchor_scales) / 3 * (num_classes + 5))
|
|
@ -0,0 +1,389 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
"""YOLOv3 dataset"""
|
||||
from __future__ import division
|
||||
|
||||
import abc
|
||||
import io
|
||||
import os
|
||||
import math
|
||||
import json
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
from matplotlib.colors import rgb_to_hsv, hsv_to_rgb
|
||||
import mindspore.dataset as de
|
||||
import mindspore.dataset.transforms.vision.py_transforms as P
|
||||
from config import ConfigYOLOV3ResNet18
|
||||
|
||||
iter_cnt = 0
|
||||
_NUM_BOXES = 50
|
||||
|
||||
def preprocess_fn(image, box, is_training):
|
||||
"""Preprocess function for dataset."""
|
||||
config_anchors = [10, 13, 16, 30, 33, 23, 30, 61, 62, 45, 59, 119, 116, 90, 156, 198, 163, 326]
|
||||
anchors = np.array([float(x) for x in config_anchors]).reshape(-1, 2)
|
||||
do_hsv = False
|
||||
max_boxes = 20
|
||||
num_classes = ConfigYOLOV3ResNet18.num_classes
|
||||
|
||||
def _rand(a=0., b=1.):
|
||||
return np.random.rand() * (b - a) + a
|
||||
|
||||
def _preprocess_true_boxes(true_boxes, anchors, in_shape=None):
|
||||
"""Get true boxes."""
|
||||
num_layers = anchors.shape[0] // 3
|
||||
anchor_mask = [[6, 7, 8], [3, 4, 5], [0, 1, 2]]
|
||||
true_boxes = np.array(true_boxes, dtype='float32')
|
||||
# input_shape = np.array([in_shape, in_shape], dtype='int32')
|
||||
input_shape = np.array(in_shape, dtype='int32')
|
||||
boxes_xy = (true_boxes[..., 0:2] + true_boxes[..., 2:4]) // 2.
|
||||
boxes_wh = true_boxes[..., 2:4] - true_boxes[..., 0:2]
|
||||
true_boxes[..., 0:2] = boxes_xy / input_shape[::-1]
|
||||
true_boxes[..., 2:4] = boxes_wh / input_shape[::-1]
|
||||
|
||||
grid_shapes = [input_shape // 32, input_shape // 16, input_shape // 8]
|
||||
y_true = [np.zeros((grid_shapes[l][0], grid_shapes[l][1], len(anchor_mask[l]),
|
||||
5 + num_classes), dtype='float32') for l in range(num_layers)]
|
||||
|
||||
anchors = np.expand_dims(anchors, 0)
|
||||
anchors_max = anchors / 2.
|
||||
anchors_min = -anchors_max
|
||||
|
||||
valid_mask = boxes_wh[..., 0] >= 1
|
||||
|
||||
wh = boxes_wh[valid_mask]
|
||||
|
||||
|
||||
if len(wh) >= 1:
|
||||
wh = np.expand_dims(wh, -2)
|
||||
boxes_max = wh / 2.
|
||||
boxes_min = -boxes_max
|
||||
|
||||
intersect_min = np.maximum(boxes_min, anchors_min)
|
||||
intersect_max = np.minimum(boxes_max, anchors_max)
|
||||
intersect_wh = np.maximum(intersect_max - intersect_min, 0.)
|
||||
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
|
||||
box_area = wh[..., 0] * wh[..., 1]
|
||||
anchor_area = anchors[..., 0] * anchors[..., 1]
|
||||
iou = intersect_area / (box_area + anchor_area - intersect_area)
|
||||
|
||||
best_anchor = np.argmax(iou, axis=-1)
|
||||
for t, n in enumerate(best_anchor):
|
||||
for l in range(num_layers):
|
||||
if n in anchor_mask[l]:
|
||||
i = np.floor(true_boxes[t, 0] * grid_shapes[l][1]).astype('int32')
|
||||
j = np.floor(true_boxes[t, 1] * grid_shapes[l][0]).astype('int32')
|
||||
k = anchor_mask[l].index(n)
|
||||
|
||||
c = true_boxes[t, 4].astype('int32')
|
||||
y_true[l][j, i, k, 0:4] = true_boxes[t, 0:4]
|
||||
y_true[l][j, i, k, 4] = 1.
|
||||
y_true[l][j, i, k, 5 + c] = 1.
|
||||
|
||||
pad_gt_box0 = np.zeros(shape=[50, 4], dtype=np.float32)
|
||||
pad_gt_box1 = np.zeros(shape=[50, 4], dtype=np.float32)
|
||||
pad_gt_box2 = np.zeros(shape=[50, 4], dtype=np.float32)
|
||||
|
||||
mask0 = np.reshape(y_true[0][..., 4:5], [-1])
|
||||
gt_box0 = np.reshape(y_true[0][..., 0:4], [-1, 4])
|
||||
gt_box0 = gt_box0[mask0 == 1]
|
||||
pad_gt_box0[:gt_box0.shape[0]] = gt_box0
|
||||
|
||||
mask1 = np.reshape(y_true[1][..., 4:5], [-1])
|
||||
gt_box1 = np.reshape(y_true[1][..., 0:4], [-1, 4])
|
||||
gt_box1 = gt_box1[mask1 == 1]
|
||||
pad_gt_box1[:gt_box1.shape[0]] = gt_box1
|
||||
|
||||
mask2 = np.reshape(y_true[2][..., 4:5], [-1])
|
||||
gt_box2 = np.reshape(y_true[2][..., 0:4], [-1, 4])
|
||||
gt_box2 = gt_box2[mask2 == 1]
|
||||
pad_gt_box2[:gt_box2.shape[0]] = gt_box2
|
||||
|
||||
return y_true[0], y_true[1], y_true[2], pad_gt_box0, pad_gt_box1, pad_gt_box2
|
||||
|
||||
def _data_aug(image, box, is_training, jitter=0.3, hue=0.1, sat=1.5, val=1.5, image_size=(352, 640)):
|
||||
"""Data augmentation function."""
|
||||
if not isinstance(image, Image.Image):
|
||||
image = Image.fromarray(image)
|
||||
|
||||
iw, ih = image.size
|
||||
ori_image_shape = np.array([ih, iw], np.int32)
|
||||
h, w = image_size
|
||||
|
||||
if not is_training:
|
||||
image = image.resize((w, h), Image.BICUBIC)
|
||||
image_data = np.array(image) / 255.
|
||||
if len(image_data.shape) == 2:
|
||||
image_data = np.expand_dims(image_data, axis=-1)
|
||||
image_data = np.concatenate([image_data, image_data, image_data], axis=-1)
|
||||
image_data = image_data.astype(np.float32)
|
||||
|
||||
# correct boxes
|
||||
box_data = np.zeros((max_boxes, 5))
|
||||
if len(box) >= 1:
|
||||
np.random.shuffle(box)
|
||||
if len(box) > max_boxes:
|
||||
box = box[:max_boxes]
|
||||
# xmin ymin xmax ymax
|
||||
box[:, [0, 2]] = box[:, [0, 2]] * float(w) / float(iw)
|
||||
box[:, [1, 3]] = box[:, [1, 3]] * float(h) / float(ih)
|
||||
box_data[:len(box)] = box
|
||||
else:
|
||||
image_data, box_data = None, None
|
||||
|
||||
# preprocess bounding boxes
|
||||
bbox_true_1, bbox_true_2, bbox_true_3, gt_box1, gt_box2, gt_box3 = \
|
||||
_preprocess_true_boxes(box_data, anchors, image_size)
|
||||
|
||||
return image_data, bbox_true_1, bbox_true_2, bbox_true_3, \
|
||||
ori_image_shape, gt_box1, gt_box2, gt_box3
|
||||
|
||||
flip = _rand() < .5
|
||||
# correct boxes
|
||||
box_data = np.zeros((max_boxes, 5))
|
||||
while True:
|
||||
# Prevent the situation that all boxes are eliminated
|
||||
new_ar = float(w) / float(h) * _rand(1 - jitter, 1 + jitter) / \
|
||||
_rand(1 - jitter, 1 + jitter)
|
||||
scale = _rand(0.25, 2)
|
||||
|
||||
if new_ar < 1:
|
||||
nh = int(scale * h)
|
||||
nw = int(nh * new_ar)
|
||||
else:
|
||||
nw = int(scale * w)
|
||||
nh = int(nw / new_ar)
|
||||
|
||||
dx = int(_rand(0, w - nw))
|
||||
dy = int(_rand(0, h - nh))
|
||||
|
||||
if len(box) >= 1:
|
||||
t_box = box.copy()
|
||||
np.random.shuffle(t_box)
|
||||
t_box[:, [0, 2]] = t_box[:, [0, 2]] * float(nw) / float(iw) + dx
|
||||
t_box[:, [1, 3]] = t_box[:, [1, 3]] * float(nh) / float(ih) + dy
|
||||
if flip:
|
||||
t_box[:, [0, 2]] = w - t_box[:, [2, 0]]
|
||||
t_box[:, 0:2][t_box[:, 0:2] < 0] = 0
|
||||
t_box[:, 2][t_box[:, 2] > w] = w
|
||||
t_box[:, 3][t_box[:, 3] > h] = h
|
||||
box_w = t_box[:, 2] - t_box[:, 0]
|
||||
box_h = t_box[:, 3] - t_box[:, 1]
|
||||
t_box = t_box[np.logical_and(box_w > 1, box_h > 1)] # discard invalid box
|
||||
|
||||
if len(t_box) >= 1:
|
||||
box = t_box
|
||||
break
|
||||
|
||||
box_data[:len(box)] = box
|
||||
# resize image
|
||||
image = image.resize((nw, nh), Image.BICUBIC)
|
||||
# place image
|
||||
new_image = Image.new('RGB', (w, h), (128, 128, 128))
|
||||
new_image.paste(image, (dx, dy))
|
||||
image = new_image
|
||||
|
||||
# flip image or not
|
||||
if flip:
|
||||
image = image.transpose(Image.FLIP_LEFT_RIGHT)
|
||||
|
||||
# convert image to gray or not
|
||||
gray = _rand() < .25
|
||||
if gray:
|
||||
image = image.convert('L').convert('RGB')
|
||||
|
||||
# when the channels of image is 1
|
||||
image = np.array(image)
|
||||
if len(image.shape) == 2:
|
||||
image = np.expand_dims(image, axis=-1)
|
||||
image = np.concatenate([image, image, image], axis=-1)
|
||||
|
||||
# distort image
|
||||
hue = _rand(-hue, hue)
|
||||
sat = _rand(1, sat) if _rand() < .5 else 1 / _rand(1, sat)
|
||||
val = _rand(1, val) if _rand() < .5 else 1 / _rand(1, val)
|
||||
image_data = image / 255.
|
||||
if do_hsv:
|
||||
x = rgb_to_hsv(image_data)
|
||||
x[..., 0] += hue
|
||||
x[..., 0][x[..., 0] > 1] -= 1
|
||||
x[..., 0][x[..., 0] < 0] += 1
|
||||
x[..., 1] *= sat
|
||||
x[..., 2] *= val
|
||||
x[x > 1] = 1
|
||||
x[x < 0] = 0
|
||||
image_data = hsv_to_rgb(x) # numpy array, 0 to 1
|
||||
image_data = image_data.astype(np.float32)
|
||||
|
||||
# preprocess bounding boxes
|
||||
bbox_true_1, bbox_true_2, bbox_true_3, gt_box1, gt_box2, gt_box3 = \
|
||||
_preprocess_true_boxes(box_data, anchors, image_size)
|
||||
|
||||
return image_data, bbox_true_1, bbox_true_2, bbox_true_3, \
|
||||
ori_image_shape, gt_box1, gt_box2, gt_box3
|
||||
|
||||
images, bbox_1, bbox_2, bbox_3, _, gt_box1, gt_box2, gt_box3 = _data_aug(image, box, is_training)
|
||||
return images, bbox_1, bbox_2, bbox_3, gt_box1, gt_box2, gt_box3
|
||||
|
||||
|
||||
def anno_parser(annos_str):
|
||||
"""Annotation parser."""
|
||||
annos = []
|
||||
for anno_str in annos_str:
|
||||
anno = list(map(int, anno_str.strip().split(',')))
|
||||
annos.append(anno)
|
||||
return annos
|
||||
|
||||
|
||||
def expand_path(path):
|
||||
"""Get file list from path."""
|
||||
files = []
|
||||
if os.path.isdir(path):
|
||||
for file in os.listdir(path):
|
||||
if os.path.isfile(os.path.join(path, file)):
|
||||
files.append(file)
|
||||
else:
|
||||
raise RuntimeError("Path given is not valid.")
|
||||
return files
|
||||
|
||||
|
||||
def read_image(img_path):
|
||||
"""Read image with PIL."""
|
||||
with open(img_path, "rb") as f:
|
||||
img = f.read()
|
||||
data = io.BytesIO(img)
|
||||
img = Image.open(data)
|
||||
return np.array(img)
|
||||
|
||||
|
||||
class BaseDataset():
|
||||
"""BaseDataset for GeneratorDataset iterator."""
|
||||
def __init__(self, image_dir, anno_path):
|
||||
self.image_dir = image_dir
|
||||
self.anno_path = anno_path
|
||||
self.cur_index = 0
|
||||
self.samples = []
|
||||
self.image_anno_dict = {}
|
||||
self._load_samples()
|
||||
|
||||
def __getitem__(self, item):
|
||||
sample = self.samples[item]
|
||||
return self._next_data(sample, self.image_dir, self.image_anno_dict)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.samples)
|
||||
|
||||
@staticmethod
|
||||
def _next_data(sample, image_dir, image_anno_dict):
|
||||
"""Get next data."""
|
||||
image = read_image(os.path.join(image_dir, sample))
|
||||
annos = image_anno_dict[sample]
|
||||
return [np.array(image), np.array(annos)]
|
||||
|
||||
@abc.abstractmethod
|
||||
def _load_samples(self):
|
||||
"""Base load samples."""
|
||||
|
||||
|
||||
class YoloDataset(BaseDataset):
|
||||
"""YoloDataset for GeneratorDataset iterator."""
|
||||
def _load_samples(self):
|
||||
"""Load samples."""
|
||||
image_files_raw = expand_path(self.image_dir)
|
||||
self.samples = self._filter_valid_data(self.anno_path, image_files_raw)
|
||||
self.dataset_size = len(self.samples)
|
||||
if self.dataset_size == 0:
|
||||
raise RuntimeError("Valid dataset is none!")
|
||||
|
||||
def _filter_valid_data(self, anno_path, image_files_raw):
|
||||
"""Filter valid data."""
|
||||
image_files = []
|
||||
anno_dict = {}
|
||||
print("Start filter valid data.")
|
||||
with open(anno_path, "rb") as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
line_str = line.decode("utf-8")
|
||||
line_split = str(line_str).split(' ')
|
||||
anno_dict[line_split[0].split("/")[-1]] = line_split[1:]
|
||||
anno_set = set(anno_dict.keys())
|
||||
image_set = set(image_files_raw)
|
||||
for image_file in (anno_set & image_set):
|
||||
image_files.append(image_file)
|
||||
self.image_anno_dict[image_file] = anno_parser(anno_dict[image_file])
|
||||
image_files.sort()
|
||||
print("Filter valid data done!")
|
||||
return image_files
|
||||
|
||||
|
||||
class DistributedSampler():
|
||||
"""DistributedSampler for YOLOv3"""
|
||||
def __init__(self, dataset_size, batch_size, num_replicas=None, rank=None, shuffle=True):
|
||||
if num_replicas is None:
|
||||
num_replicas = 1
|
||||
if rank is None:
|
||||
rank = 0
|
||||
self.dataset_size = dataset_size
|
||||
self.num_replicas = num_replicas
|
||||
self.rank = rank % num_replicas
|
||||
self.epoch = 0
|
||||
self.num_samples = max(batch_size, int(math.ceil(dataset_size * 1.0 / self.num_replicas)))
|
||||
self.total_size = self.num_samples * self.num_replicas
|
||||
self.shuffle = shuffle
|
||||
|
||||
def __iter__(self):
|
||||
# deterministically shuffle based on epoch
|
||||
if self.shuffle:
|
||||
indices = np.random.RandomState(seed=self.epoch).permutation(self.dataset_size)
|
||||
indices = indices.tolist()
|
||||
else:
|
||||
indices = list(range(self.dataset_size))
|
||||
|
||||
# add extra samples to make it evenly divisible
|
||||
indices += indices[:(self.total_size - len(indices))]
|
||||
assert len(indices) == self.total_size
|
||||
|
||||
# subsample
|
||||
indices = indices[self.rank:self.total_size:self.num_replicas]
|
||||
assert len(indices) == self.num_samples
|
||||
|
||||
return iter(indices)
|
||||
|
||||
def __len__(self):
|
||||
return self.num_samples
|
||||
|
||||
def set_epoch(self, epoch):
|
||||
self.epoch = epoch
|
||||
|
||||
|
||||
def create_yolo_dataset(image_dir, anno_path, batch_size=32, repeat_num=10, device_num=1, rank=0,
|
||||
is_training=True, num_parallel_workers=8):
|
||||
"""Creatr YOLOv3 dataset with GeneratorDataset."""
|
||||
yolo_dataset = YoloDataset(image_dir=image_dir, anno_path=anno_path)
|
||||
distributed_sampler = DistributedSampler(yolo_dataset.dataset_size, batch_size, device_num, rank)
|
||||
ds = de.GeneratorDataset(yolo_dataset, column_names=["image", "annotation"], sampler=distributed_sampler)
|
||||
ds.set_dataset_size(len(distributed_sampler))
|
||||
compose_map_func = (lambda image, annotation: preprocess_fn(image, annotation, is_training))
|
||||
hwc_to_chw = P.HWC2CHW()
|
||||
ds = ds.map(input_columns=["image", "annotation"],
|
||||
output_columns=["image", "bbox_1", "bbox_2", "bbox_3", "gt_box1", "gt_box2", "gt_box3"],
|
||||
columns_order=["image", "bbox_1", "bbox_2", "bbox_3", "gt_box1", "gt_box2", "gt_box3"],
|
||||
operations=compose_map_func, num_parallel_workers=num_parallel_workers)
|
||||
ds = ds.map(input_columns=["image"], operations=hwc_to_chw, num_parallel_workers=num_parallel_workers)
|
||||
ds = ds.shuffle(buffer_size=256)
|
||||
ds = ds.batch(batch_size, drop_remainder=True)
|
||||
ds = ds.repeat(repeat_num)
|
||||
return ds
|
|
@ -0,0 +1,47 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
echo "Please run the scipt as: "
|
||||
echo "sh run_distribute_train.sh DEVICE_NUM EPOCH_SIZE IMAGE_DIR ANNO_PATH MINDSPORE_HCCL_CONFIG_PATH"
|
||||
echo "for example: sh run_distribute_train.sh 8 100 ./dataset/coco/train2017 ./dataset/train.txt ./hccl.json"
|
||||
echo "After running the scipt, the network runs in the background. The log will be generated in LOGx/log.txt"
|
||||
|
||||
export RANK_SIZE=$1
|
||||
EPOCH_SIZE=$2
|
||||
IMAGE_DIR=$3
|
||||
ANNO_PATH=$4
|
||||
export MINDSPORE_HCCL_CONFIG_PATH=$5
|
||||
|
||||
|
||||
for((i=0;i<RANK_SIZE;i++))
|
||||
do
|
||||
export DEVICE_ID=$i
|
||||
rm -rf LOG$i
|
||||
mkdir ./LOG$i
|
||||
cp *.py ./LOG$i
|
||||
cd ./LOG$i || exit
|
||||
export RANK_ID=$i
|
||||
echo "start training for rank $i, device $DEVICE_ID"
|
||||
env > env.log
|
||||
python ../train.py \
|
||||
--distribute=1 \
|
||||
--device_num=$RANK_SIZE \
|
||||
--device_id=$DEVICE_ID \
|
||||
--image_dir=$IMAGE_DIR \
|
||||
--epoch_size=$EPOCH_SIZE \
|
||||
--anno_path=$ANNO_PATH > log.txt 2>&1 &
|
||||
cd ../
|
||||
done
|
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
echo "Please run the scipt as: "
|
||||
echo "sh run_standalone_train.sh DEVICE_ID EPOCH_SIZE IMAGE_DIR ANNO_PATH"
|
||||
echo "for example: sh run_standalone_train.sh 0 50 ./dataset/coco/train2017 ./dataset/train.txt"
|
||||
|
||||
python train.py --device_id=$1 --epoch_size=$2 --image_dir=$3 --anno_path=$4
|
|
@ -0,0 +1,115 @@
|
|||
# Copyright 2020 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# less required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
"""
|
||||
######################## train YOLOv3 example ########################
|
||||
train YOLOv3 and get network model files(.ckpt) :
|
||||
python train.py --image_dir dataset/coco/coco/train2017 --anno_path dataset/coco/train_coco.txt
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import numpy as np
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context, Tensor
|
||||
from mindspore.communication.management import init
|
||||
from mindspore.train.callback import CheckpointConfig, ModelCheckpoint, LossMonitor, TimeMonitor
|
||||
from mindspore.train import Model, ParallelMode
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
from mindspore.common.initializer import initializer
|
||||
|
||||
from mindspore.model_zoo.yolov3 import yolov3_resnet18, YoloWithLossCell, TrainingWrapper
|
||||
from dataset import create_yolo_dataset
|
||||
from config import ConfigYOLOV3ResNet18
|
||||
|
||||
|
||||
def get_lr(learning_rate, start_step, global_step, decay_step, decay_rate, steps=False):
|
||||
"""Set learning rate"""
|
||||
lr_each_step = []
|
||||
lr = learning_rate
|
||||
for i in range(global_step):
|
||||
if steps:
|
||||
lr_each_step.append(lr * (decay_rate ** (i // decay_step)))
|
||||
else:
|
||||
lr_each_step.append(lr * (decay_rate ** (i / decay_step)))
|
||||
lr_each_step = np.array(lr_each_step).astype(np.float32)
|
||||
lr_each_step = lr_each_step[start_step:]
|
||||
return lr_each_step
|
||||
|
||||
|
||||
def init_net_param(net, init='ones'):
|
||||
"""Init the parameters in net."""
|
||||
params = net.trainable_params()
|
||||
for p in params:
|
||||
if isinstance(p.data, Tensor) and 'beta' not in p.name and 'gamma' not in p.name and 'bias' not in p.name:
|
||||
p.set_parameter_data(initializer(init, p.data.shape(), p.data.dtype()))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="YOLOv3")
|
||||
parser.add_argument("--distribute", type=bool, default=False, help="Run distribute, default is false.")
|
||||
parser.add_argument("--device_id", type=int, default=0, help="Device id, default is 0.")
|
||||
parser.add_argument("--device_num", type=int, default=1, help="Use device nums, default is 1.")
|
||||
parser.add_argument("--mode", type=str, default="graph", help="Run graph mode or feed mode, default is graph")
|
||||
parser.add_argument("--epoch_size", type=int, default=10, help="Epoch size, default is 10")
|
||||
parser.add_argument("--batch_size", type=int, default=32, help="Batch size, default is 32.")
|
||||
parser.add_argument("--checkpoint_path", type=str, default="", help="Checkpoint file path")
|
||||
parser.add_argument("--save_checkpoint_epochs", type=int, default=5, help="Save checkpoint epochs, default is 5.")
|
||||
parser.add_argument("--loss_scale", type=int, default=1024, help="Loss scale, default is 1024.")
|
||||
parser.add_argument("--image_dir", type=str, required=True, help="Dataset image dir.")
|
||||
parser.add_argument("--anno_path", type=str, required=True, help="Dataset anno path.")
|
||||
args_opt = parser.parse_args()
|
||||
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=args_opt.device_id)
|
||||
context.set_context(enable_task_sink=True, enable_loop_sink=True, enable_mem_reuse=True)
|
||||
if args_opt.distribute:
|
||||
device_num = args_opt.device_num
|
||||
context.reset_auto_parallel_context()
|
||||
context.set_context(enable_hccl=True)
|
||||
context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, mirror_mean=True,
|
||||
device_num=device_num)
|
||||
init()
|
||||
rank = args_opt.device_id
|
||||
else:
|
||||
context.set_context(enable_hccl=False)
|
||||
rank = 0
|
||||
device_num = 1
|
||||
|
||||
loss_scale = float(args_opt.loss_scale)
|
||||
dataset = create_yolo_dataset(args_opt.image_dir, args_opt.anno_path, repeat_num=args_opt.epoch_size,
|
||||
batch_size=args_opt.batch_size, device_num=device_num, rank=rank)
|
||||
dataset_size = dataset.get_dataset_size()
|
||||
net = yolov3_resnet18(ConfigYOLOV3ResNet18())
|
||||
net = YoloWithLossCell(net, ConfigYOLOV3ResNet18())
|
||||
init_net_param(net, "XavierUniform")
|
||||
|
||||
# checkpoint
|
||||
ckpt_config = CheckpointConfig(save_checkpoint_steps=dataset_size * args_opt.save_checkpoint_epochs)
|
||||
ckpoint_cb = ModelCheckpoint(prefix="yolov3", directory=None, config=ckpt_config)
|
||||
if args_opt.checkpoint_path != "":
|
||||
param_dict = load_checkpoint(args_opt.checkpoint_path)
|
||||
load_param_into_net(net, param_dict)
|
||||
|
||||
lr = Tensor(get_lr(learning_rate=0.001, start_step=0, global_step=args_opt.epoch_size * dataset_size,
|
||||
decay_step=1000, decay_rate=0.95))
|
||||
opt = nn.Adam(filter(lambda x: x.requires_grad, net.get_parameters()), lr, loss_scale=loss_scale)
|
||||
net = TrainingWrapper(net, opt, loss_scale)
|
||||
callback = [TimeMonitor(data_size=dataset_size), LossMonitor(), ckpoint_cb]
|
||||
|
||||
model = Model(net)
|
||||
dataset_sink_mode = False
|
||||
if args_opt.mode == "graph":
|
||||
dataset_sink_mode = True
|
||||
print("Start train YOLOv3.")
|
||||
model.train(args_opt.epoch_size, dataset, callbacks=callback, dataset_sink_mode=dataset_sink_mode)
|
|
@ -1 +1 @@
|
|||
Subproject commit 6d12411003164d88eaed62e1ead33761cbfa15ef
|
||||
Subproject commit ff80316dab61b06ea15f8621d045b7699d2f0c79
|
|
@ -1,107 +0,0 @@
|
|||
/**
|
||||
* Copyright 2019 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef MINDSPORE_INFERENCE_LOG_H_
|
||||
#define MINDSPORE_INFERENCE_LOG_H_
|
||||
|
||||
#include <stdarg.h>
|
||||
#include <stdint.h>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
#include <memory>
|
||||
#include <iostream>
|
||||
|
||||
#ifndef ENABLE_ACL
|
||||
#include "mindspore/core/utils/log_adapter.h"
|
||||
namespace mindspore::inference {
|
||||
#define MSI_LOG(level) MS_LOG(level)
|
||||
|
||||
#define MSI_LOG_DEBUG MSI_LOG(DEBUG)
|
||||
#define MSI_LOG_INFO MSI_LOG(INFO)
|
||||
#define MSI_LOG_WARNING MSI_LOG(WARNING)
|
||||
#define MSI_LOG_ERROR MSI_LOG(ERROR)
|
||||
|
||||
#define MSI_ASSERT(item) MS_ASSERT(item)
|
||||
} // namespace mindspore::inference
|
||||
|
||||
#else // ENABLE_ACL
|
||||
#include "acl/acl.h"
|
||||
namespace mindspore::inference {
|
||||
|
||||
class LogStream {
|
||||
public:
|
||||
LogStream() { sstream_ = std::make_shared<std::stringstream>(); }
|
||||
~LogStream() = default;
|
||||
|
||||
template <typename T>
|
||||
LogStream &operator<<(const T &val) noexcept {
|
||||
(*sstream_) << val;
|
||||
return *this;
|
||||
}
|
||||
|
||||
LogStream &operator<<(std::ostream &func(std::ostream &os)) noexcept {
|
||||
(*sstream_) << func;
|
||||
return *this;
|
||||
}
|
||||
|
||||
friend class LogWriter;
|
||||
|
||||
private:
|
||||
std::shared_ptr<std::stringstream> sstream_;
|
||||
};
|
||||
|
||||
template <class T, typename std::enable_if<std::is_enum<T>::value, int>::type = 0>
|
||||
constexpr std::ostream &operator<<(std::ostream &stream, const T &value) {
|
||||
return stream << static_cast<typename std::underlying_type<T>::type>(value);
|
||||
}
|
||||
|
||||
class LogWriter {
|
||||
public:
|
||||
LogWriter(const char *file, int line, const char *func, aclLogLevel log_level)
|
||||
: file_(file), line_(line), func_(func), log_level_(log_level) {}
|
||||
~LogWriter() = default;
|
||||
|
||||
void operator<(const LogStream &stream) const noexcept __attribute__((visibility("default"))) {
|
||||
std::ostringstream msg;
|
||||
msg << stream.sstream_->rdbuf();
|
||||
OutputLog(msg);
|
||||
}
|
||||
|
||||
private:
|
||||
void OutputLog(const std::ostringstream &msg) const { aclAppLog(log_level_, func_, file_, line_, msg.str().c_str()); }
|
||||
|
||||
const char *file_;
|
||||
int line_;
|
||||
const char *func_;
|
||||
aclLogLevel log_level_;
|
||||
};
|
||||
|
||||
#define MSILOG_IF(level) inference::LogWriter(__FILE__, __LINE__, __FUNCTION__, ACL_##level) < inference::LogStream()
|
||||
|
||||
#define MSI_LOG(level) MSI_LOG_##level
|
||||
|
||||
#define MSI_LOG_DEBUG MSILOG_IF(DEBUG)
|
||||
#define MSI_LOG_INFO MSILOG_IF(INFO)
|
||||
#define MSI_LOG_WARNING MSILOG_IF(WARNING)
|
||||
#define MSI_LOG_ERROR MSILOG_IF(ERROR)
|
||||
|
||||
#define MSI_ASSERT(item)
|
||||
|
||||
} // namespace mindspore::inference
|
||||
|
||||
#endif // ENABLE_ACL
|
||||
|
||||
#endif // MINDSPORE_INFERENCE_LOG_H_
|
|
@ -1,191 +0,0 @@
|
|||
/**
|
||||
* Copyright 2020 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef MINDSPORE_INCLUDE_INFER_TENSOR_H_
|
||||
#define MINDSPORE_INCLUDE_INFER_TENSOR_H_
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include <numeric>
|
||||
#include <map>
|
||||
#include <functional>
|
||||
|
||||
#include "securec/include/securec.h"
|
||||
#include "include/infer_log.h"
|
||||
|
||||
namespace mindspore {
|
||||
#define MS_API __attribute__((visibility("default")))
|
||||
namespace inference {
|
||||
|
||||
enum DataType {
|
||||
kMSI_Unknown = 0,
|
||||
kMSI_Bool = 1,
|
||||
kMSI_Int8 = 2,
|
||||
kMSI_Int16 = 3,
|
||||
kMSI_Int32 = 4,
|
||||
kMSI_Int64 = 5,
|
||||
kMSI_Uint8 = 6,
|
||||
kMSI_Uint16 = 7,
|
||||
kMSI_Uint32 = 8,
|
||||
kMSI_Uint64 = 9,
|
||||
kMSI_Float16 = 10,
|
||||
kMSI_Float32 = 11,
|
||||
kMSI_Float64 = 12,
|
||||
};
|
||||
|
||||
class InferTensorBase {
|
||||
public:
|
||||
InferTensorBase() = default;
|
||||
virtual ~InferTensorBase() = default;
|
||||
|
||||
virtual DataType data_type() const = 0;
|
||||
virtual void set_data_type(DataType type) = 0;
|
||||
virtual std::vector<int64_t> shape() const = 0;
|
||||
virtual void set_shape(const std::vector<int64_t> &shape) = 0;
|
||||
virtual const void *data() const = 0;
|
||||
virtual size_t data_size() const = 0;
|
||||
virtual bool resize_data(size_t data_len) = 0;
|
||||
virtual void *mutable_data() = 0;
|
||||
|
||||
bool set_data(const void *data, size_t data_len) {
|
||||
resize_data(data_len);
|
||||
if (mutable_data() == nullptr) {
|
||||
MSI_LOG_ERROR << "set data failed, data len " << data_len;
|
||||
return false;
|
||||
}
|
||||
if (data_size() != data_len) {
|
||||
MSI_LOG_ERROR << "set data failed, tensor current data size " << data_size() << " not match data len "
|
||||
<< data_len;
|
||||
return false;
|
||||
}
|
||||
if (data_len == 0) {
|
||||
return true;
|
||||
}
|
||||
memcpy_s(mutable_data(), data_size(), data, data_len);
|
||||
return true;
|
||||
}
|
||||
|
||||
int64_t ElementNum() const {
|
||||
std::vector<int64_t> shapex = shape();
|
||||
return std::accumulate(shapex.begin(), shapex.end(), 1LL, std::multiplies<int64_t>());
|
||||
}
|
||||
|
||||
int GetTypeSize(DataType type) const {
|
||||
const std::map<DataType, size_t> type_size_map{
|
||||
{kMSI_Bool, sizeof(bool)}, {kMSI_Float64, sizeof(double)}, {kMSI_Int8, sizeof(int8_t)},
|
||||
{kMSI_Uint8, sizeof(uint8_t)}, {kMSI_Int16, sizeof(int16_t)}, {kMSI_Uint16, sizeof(uint16_t)},
|
||||
{kMSI_Int32, sizeof(int32_t)}, {kMSI_Uint32, sizeof(uint32_t)}, {kMSI_Int64, sizeof(int64_t)},
|
||||
{kMSI_Uint64, sizeof(uint64_t)}, {kMSI_Float16, sizeof(uint16_t)}, {kMSI_Float32, sizeof(float)},
|
||||
};
|
||||
auto it = type_size_map.find(type);
|
||||
if (it != type_size_map.end()) {
|
||||
return it->second;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
class InferTensor : public InferTensorBase {
|
||||
public:
|
||||
DataType type_;
|
||||
std::vector<int64_t> shape_;
|
||||
std::vector<uint8_t> data_;
|
||||
|
||||
public:
|
||||
InferTensor() = default;
|
||||
InferTensor(DataType type, std::vector<int64_t> shape, const void *data, size_t data_len) {
|
||||
set_data_type(type);
|
||||
set_shape(shape);
|
||||
set_data(data, data_len);
|
||||
}
|
||||
|
||||
void set_data_type(DataType type) override { type_ = type; }
|
||||
DataType data_type() const override { return type_; }
|
||||
|
||||
void set_shape(const std::vector<int64_t> &shape) override { shape_ = shape; }
|
||||
std::vector<int64_t> shape() const override { return shape_; }
|
||||
|
||||
const void *data() const override { return data_.data(); }
|
||||
size_t data_size() const override { return data_.size(); }
|
||||
|
||||
bool resize_data(size_t data_len) override {
|
||||
data_.resize(data_len);
|
||||
return true;
|
||||
}
|
||||
void *mutable_data() override { return data_.data(); }
|
||||
};
|
||||
|
||||
class RequestBase {
|
||||
public:
|
||||
virtual size_t size() const = 0;
|
||||
virtual const InferTensorBase *operator[](size_t index) const = 0;
|
||||
};
|
||||
|
||||
class ReplyBase {
|
||||
public:
|
||||
virtual size_t size() const = 0;
|
||||
virtual InferTensorBase *operator[](size_t index) = 0;
|
||||
virtual const InferTensorBase *operator[](size_t index) const = 0;
|
||||
virtual InferTensorBase *add() = 0;
|
||||
virtual void clear() = 0;
|
||||
};
|
||||
|
||||
class VectorInferTensorWrapReply : public ReplyBase {
|
||||
public:
|
||||
explicit VectorInferTensorWrapReply(std::vector<InferTensor> &tensor_list) : tensor_list_(tensor_list) {}
|
||||
|
||||
size_t size() const { return tensor_list_.size(); }
|
||||
InferTensorBase *operator[](size_t index) {
|
||||
if (index >= tensor_list_.size()) {
|
||||
MSI_LOG_ERROR << "visit invalid index " << index << " total size " << tensor_list_.size();
|
||||
return nullptr;
|
||||
}
|
||||
return &(tensor_list_[index]);
|
||||
}
|
||||
const InferTensorBase *operator[](size_t index) const {
|
||||
if (index >= tensor_list_.size()) {
|
||||
MSI_LOG_ERROR << "visit invalid index " << index << " total size " << tensor_list_.size();
|
||||
return nullptr;
|
||||
}
|
||||
return &(tensor_list_[index]);
|
||||
}
|
||||
InferTensorBase *add() {
|
||||
tensor_list_.push_back(InferTensor());
|
||||
return &(tensor_list_.back());
|
||||
}
|
||||
void clear() { tensor_list_.clear(); }
|
||||
std::vector<InferTensor> &tensor_list_;
|
||||
};
|
||||
|
||||
class VectorInferTensorWrapRequest : public RequestBase {
|
||||
public:
|
||||
explicit VectorInferTensorWrapRequest(const std::vector<InferTensor> &tensor_list) : tensor_list_(tensor_list) {}
|
||||
|
||||
size_t size() const { return tensor_list_.size(); }
|
||||
const InferTensorBase *operator[](size_t index) const {
|
||||
if (index >= tensor_list_.size()) {
|
||||
MSI_LOG_ERROR << "visit invalid index " << index << " total size " << tensor_list_.size();
|
||||
return nullptr;
|
||||
}
|
||||
return &(tensor_list_[index]);
|
||||
}
|
||||
const std::vector<InferTensor> &tensor_list_;
|
||||
};
|
||||
|
||||
} // namespace inference
|
||||
} // namespace mindspore
|
||||
#endif // MINDSPORE_INCLUDE_INFER_TENSOR_H_
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue