forked from mindspore-Ecosystem/mindspore
add src dir
This commit is contained in:
parent
b3f91a4f22
commit
367a51aeeb
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Description
|
||||
|
||||
Training AlexNet with CIFAR-10 dataset in MindSpore.
|
||||
Training AlexNet with dataset in MindSpore.
|
||||
|
||||
This is the simple tutorial for training AlexNet in MindSpore.
|
||||
|
||||
|
@ -10,19 +10,19 @@ This is the simple tutorial for training AlexNet in MindSpore.
|
|||
|
||||
- Install [MindSpore](https://www.mindspore.cn/install/en).
|
||||
|
||||
- Download the CIFAR-10 dataset, the directory structure is as follows:
|
||||
- Download the dataset, the directory structure is as follows:
|
||||
|
||||
```
|
||||
├─cifar-10-batches-bin
|
||||
├─10-batches-bin
|
||||
│
|
||||
└─cifar-10-verify-bin
|
||||
└─10-verify-bin
|
||||
```
|
||||
|
||||
## Running the example
|
||||
|
||||
```python
|
||||
# train AlexNet, hyperparameter setting in config.py
|
||||
python train.py --data_path cifar-10-batches-bin
|
||||
python train.py --data_path 10-batches-bin
|
||||
```
|
||||
|
||||
You will get the loss value of each step as following:
|
||||
|
@ -38,8 +38,8 @@ epoch: 1 step: 1538, loss is 1.0221305
|
|||
|
||||
Then, evaluate AlexNet according to network model
|
||||
```python
|
||||
# evaluate AlexNet, 1 epoch training accuracy is up to 51.1%; 10 epoch training accuracy is up to 81.2%
|
||||
python eval.py --data_path cifar-10-verify-bin --ckpt_path checkpoint_alexnet-1_1562.ckpt
|
||||
# evaluate AlexNet
|
||||
python eval.py --data_path 10-verify-bin --ckpt_path checkpoint_alexnet-1_1562.ckpt
|
||||
```
|
||||
|
||||
## Note
|
||||
|
|
|
@ -19,9 +19,9 @@ python eval.py --data_path /YourDataPath --ckpt_path Your.ckpt
|
|||
"""
|
||||
|
||||
import argparse
|
||||
from config import alexnet_cfg as cfg
|
||||
from dataset import create_dataset_mnist
|
||||
from alexnet import AlexNet
|
||||
from src.config import alexnet_cfg as cfg
|
||||
from src.dataset import create_dataset_mnist
|
||||
from src.alexnet import AlexNet
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
|
|
|
@ -19,10 +19,10 @@ python train.py --data_path /YourDataPath
|
|||
"""
|
||||
|
||||
import argparse
|
||||
from config import alexnet_cfg as cfg
|
||||
from dataset import create_dataset_mnist
|
||||
from generator_lr import get_lr
|
||||
from alexnet import AlexNet
|
||||
from src.config import alexnet_cfg as cfg
|
||||
from src.dataset import create_dataset_mnist
|
||||
from src.generator_lr import get_lr
|
||||
from src.alexnet import AlexNet
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore import Tensor
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
## Description
|
||||
|
||||
Training LeNet with MNIST dataset in MindSpore.
|
||||
Training LeNet with dataset in MindSpore.
|
||||
|
||||
This is the simple and basic tutorial for constructing a network in MindSpore.
|
||||
|
||||
|
@ -10,10 +10,10 @@ This is the simple and basic tutorial for constructing a network in MindSpore.
|
|||
|
||||
- Install [MindSpore](https://www.mindspore.cn/install/en).
|
||||
|
||||
- Download the MNIST dataset, the directory structure is as follows:
|
||||
- Download the dataset, the directory structure is as follows:
|
||||
|
||||
```
|
||||
└─MNIST_Data
|
||||
└─Data
|
||||
├─test
|
||||
│ t10k-images.idx3-ubyte
|
||||
│ t10k-labels.idx1-ubyte
|
||||
|
@ -27,7 +27,7 @@ This is the simple and basic tutorial for constructing a network in MindSpore.
|
|||
|
||||
```python
|
||||
# train LeNet, hyperparameter setting in config.py
|
||||
python train.py --data_path MNIST_Data
|
||||
python train.py --data_path Data
|
||||
```
|
||||
|
||||
You will get the loss value of each step as following:
|
||||
|
@ -43,8 +43,8 @@ epoch: 1 step: 1741, loss is 0.05018193
|
|||
|
||||
Then, evaluate LeNet according to network model
|
||||
```python
|
||||
# evaluate LeNet, after 1 epoch training, the accuracy is up to 96.5%
|
||||
python eval.py --data_path MNIST_Data --ckpt_path checkpoint_lenet-1_1875.ckpt
|
||||
# evaluate LeNet
|
||||
python eval.py --data_path Data --ckpt_path checkpoint_lenet-1_1875.ckpt
|
||||
```
|
||||
|
||||
## Note
|
||||
|
|
|
@ -20,9 +20,9 @@ python eval.py --data_path /YourDataPath --ckpt_path Your.ckpt
|
|||
|
||||
import os
|
||||
import argparse
|
||||
from dataset import create_dataset
|
||||
from config import mnist_cfg as cfg
|
||||
from lenet import LeNet5
|
||||
from src.dataset import create_dataset
|
||||
from src.config import mnist_cfg as cfg
|
||||
from src.lenet import LeNet5
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.train.serialization import load_checkpoint, load_param_into_net
|
||||
|
@ -32,10 +32,10 @@ from mindspore.nn.metrics import Accuracy
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='MindSpore MNIST Example')
|
||||
parser = argparse.ArgumentParser(description='MindSpore Lenet Example')
|
||||
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
|
||||
help='device where the code will be implemented (default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default="./MNIST_Data",
|
||||
parser.add_argument('--data_path', type=str, default="./Data",
|
||||
help='path where the dataset is saved')
|
||||
parser.add_argument('--ckpt_path', type=str, default="", help='if mode is test, must provide\
|
||||
path where the trained ckpt file')
|
||||
|
|
|
@ -20,9 +20,9 @@ python train.py --data_path /YourDataPath
|
|||
|
||||
import os
|
||||
import argparse
|
||||
from config import mnist_cfg as cfg
|
||||
from dataset import create_dataset
|
||||
from lenet import LeNet5
|
||||
from src.config import mnist_cfg as cfg
|
||||
from src.dataset import create_dataset
|
||||
from src.lenet import LeNet5
|
||||
import mindspore.nn as nn
|
||||
from mindspore import context
|
||||
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor
|
||||
|
@ -31,10 +31,10 @@ from mindspore.nn.metrics import Accuracy
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description='MindSpore MNIST Example')
|
||||
parser = argparse.ArgumentParser(description='MindSpore Lenet Example')
|
||||
parser.add_argument('--device_target', type=str, default="Ascend", choices=['Ascend', 'GPU', 'CPU'],
|
||||
help='device where the code will be implemented (default: Ascend)')
|
||||
parser.add_argument('--data_path', type=str, default="./MNIST_Data",
|
||||
parser.add_argument('--data_path', type=str, default="./Data",
|
||||
help='path where the dataset is saved')
|
||||
parser.add_argument('--dataset_sink_mode', type=bool, default=True, help='dataset_sink_mode is False or True')
|
||||
|
||||
|
|
Loading…
Reference in New Issue