add src dir

This commit is contained in:
wukesong 2020-06-19 12:32:51 +08:00
parent b3f91a4f22
commit 367a51aeeb
15 changed files with 30 additions and 30 deletions

View File

@ -2,7 +2,7 @@
## Description ## Description
Training AlexNet with CIFAR-10 dataset in MindSpore. Training AlexNet with dataset in MindSpore.
This is the simple tutorial for training AlexNet in MindSpore. This is the simple tutorial for training AlexNet in MindSpore.
@ -10,19 +10,19 @@ This is the simple tutorial for training AlexNet in MindSpore.
- Install [MindSpore](https://www.mindspore.cn/install/en). - Install [MindSpore](https://www.mindspore.cn/install/en).
- Download the CIFAR-10 dataset, the directory structure is as follows: - Download the dataset, the directory structure is as follows:
``` ```
├─cifar-10-batches-bin ├─10-batches-bin
└─cifar-10-verify-bin └─10-verify-bin
``` ```
## Running the example ## Running the example
```python ```python
# train AlexNet, hyperparameter setting in config.py # train AlexNet, hyperparameter setting in config.py
python train.py --data_path cifar-10-batches-bin python train.py --data_path 10-batches-bin
``` ```
You will get the loss value of each step as following: You will get the loss value of each step as following:
@ -38,8 +38,8 @@ epoch: 1 step: 1538, loss is 1.0221305
Then, evaluate AlexNet according to network model Then, evaluate AlexNet according to network model
```python ```python
# evaluate AlexNet, 1 epoch training accuracy is up to 51.1%; 10 epoch training accuracy is up to 81.2% # evaluate AlexNet
python eval.py --data_path cifar-10-verify-bin --ckpt_path checkpoint_alexnet-1_1562.ckpt python eval.py --data_path 10-verify-bin --ckpt_path checkpoint_alexnet-1_1562.ckpt
``` ```
## Note ## Note

View File

@ -19,9 +19,9 @@ python eval.py --data_path /YourDataPath --ckpt_path Your.ckpt
""" """
import argparse import argparse
from config import alexnet_cfg as cfg from src.config import alexnet_cfg as cfg
from dataset import create_dataset_mnist from src.dataset import create_dataset_mnist
from alexnet import AlexNet from src.alexnet import AlexNet
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net

View File

View File

@ -19,10 +19,10 @@ python train.py --data_path /YourDataPath
""" """
import argparse import argparse
from config import alexnet_cfg as cfg from src.config import alexnet_cfg as cfg
from dataset import create_dataset_mnist from src.dataset import create_dataset_mnist
from generator_lr import get_lr from src.generator_lr import get_lr
from alexnet import AlexNet from src.alexnet import AlexNet
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import context from mindspore import context
from mindspore import Tensor from mindspore import Tensor

View File

@ -2,7 +2,7 @@
## Description ## Description
Training LeNet with MNIST dataset in MindSpore. Training LeNet with dataset in MindSpore.
This is the simple and basic tutorial for constructing a network in MindSpore. This is the simple and basic tutorial for constructing a network in MindSpore.
@ -10,10 +10,10 @@ This is the simple and basic tutorial for constructing a network in MindSpore.
- Install [MindSpore](https://www.mindspore.cn/install/en). - Install [MindSpore](https://www.mindspore.cn/install/en).
- Download the MNIST dataset, the directory structure is as follows: - Download the dataset, the directory structure is as follows:
``` ```
└─MNIST_Data └─Data
├─test ├─test
│ t10k-images.idx3-ubyte │ t10k-images.idx3-ubyte
│ t10k-labels.idx1-ubyte │ t10k-labels.idx1-ubyte
@ -27,7 +27,7 @@ This is the simple and basic tutorial for constructing a network in MindSpore.
```python ```python
# train LeNet, hyperparameter setting in config.py # train LeNet, hyperparameter setting in config.py
python train.py --data_path MNIST_Data python train.py --data_path Data
``` ```
You will get the loss value of each step as following: You will get the loss value of each step as following:
@ -43,8 +43,8 @@ epoch: 1 step: 1741, loss is 0.05018193
Then, evaluate LeNet according to network model Then, evaluate LeNet according to network model
```python ```python
# evaluate LeNet, after 1 epoch training, the accuracy is up to 96.5% # evaluate LeNet
python eval.py --data_path MNIST_Data --ckpt_path checkpoint_lenet-1_1875.ckpt python eval.py --data_path Data --ckpt_path checkpoint_lenet-1_1875.ckpt
``` ```
## Note ## Note

View File

@ -20,9 +20,9 @@ python eval.py --data_path /YourDataPath --ckpt_path Your.ckpt
import os import os
import argparse import argparse
from dataset import create_dataset from src.dataset import create_dataset
from config import mnist_cfg as cfg from src.config import mnist_cfg as cfg
from lenet import LeNet5 from src.lenet import LeNet5
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import context from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net from mindspore.train.serialization import load_checkpoint, load_param_into_net
@ -32,10 +32,10 @@ from mindspore.nn.metrics import Accuracy
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description='MindSpore MNIST Example') parser = argparse.ArgumentParser(description='MindSpore Lenet Example')
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'], parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
help='device where the code will be implemented (default: Ascend)') help='device where the code will be implemented (default: Ascend)')
parser.add_argument('--data_path', type=str, default="./MNIST_Data", parser.add_argument('--data_path', type=str, default="./Data",
help='path where the dataset is saved') help='path where the dataset is saved')
parser.add_argument('--ckpt_path', type=str, default="", help='if mode is test, must provide\ parser.add_argument('--ckpt_path', type=str, default="", help='if mode is test, must provide\
path where the trained ckpt file') path where the trained ckpt file')

View File

View File

@ -20,9 +20,9 @@ python train.py --data_path /YourDataPath
import os import os
import argparse import argparse
from config import mnist_cfg as cfg from src.config import mnist_cfg as cfg
from dataset import create_dataset from src.dataset import create_dataset
from lenet import LeNet5 from src.lenet import LeNet5
import mindspore.nn as nn import mindspore.nn as nn
from mindspore import context from mindspore import context
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
@ -31,10 +31,10 @@ from mindspore.nn.metrics import Accuracy
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser(description='MindSpore MNIST Example') parser = argparse.ArgumentParser(description='MindSpore Lenet Example')
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'], parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
help='device where the code will be implemented (default: Ascend)') help='device where the code will be implemented (default: Ascend)')
parser.add_argument('--data_path', type=str, default="./MNIST_Data", parser.add_argument('--data_path', type=str, default="./Data",
help='path where the dataset is saved') help='path where the dataset is saved')
parser.add_argument('--dataset_sink_mode', type=bool, default=True, help='dataset_sink_mode is False or True') parser.add_argument('--dataset_sink_mode', type=bool, default=True, help='dataset_sink_mode is False or True')