forked from mindspore-Ecosystem/mindspore
applied minor fixes
This commit is contained in:
parent
719823291b
commit
aec85d299b
6
build.sh
6
build.sh
|
@ -519,11 +519,6 @@ build_opencl() {
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
build_gtest() {
|
|
||||||
cd ${BASEPATH}
|
|
||||||
git submodule update --init --recursive third_party/googletest
|
|
||||||
}
|
|
||||||
|
|
||||||
build_opencv() {
|
build_opencv() {
|
||||||
cd ${BASEPATH}
|
cd ${BASEPATH}
|
||||||
if [[ "${INC_BUILD}" == "off" ]]; then
|
if [[ "${INC_BUILD}" == "off" ]]; then
|
||||||
|
@ -566,7 +561,6 @@ build_minddata_lite_deps()
|
||||||
build_opencv
|
build_opencv
|
||||||
build_eigen
|
build_eigen
|
||||||
build_jpeg_turbo
|
build_jpeg_turbo
|
||||||
build_gtest
|
|
||||||
}
|
}
|
||||||
|
|
||||||
build_lite()
|
build_lite()
|
||||||
|
|
|
@ -46,7 +46,7 @@ std::shared_ptr<tensor::MSTensor> Execute::operator()(std::shared_ptr<tensor::MS
|
||||||
MS_LOG(ERROR) << "Operation execution failed : " << rc.ToString();
|
MS_LOG(ERROR) << "Operation execution failed : " << rc.ToString();
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
return std::shared_ptr<tensor::MSTensor>(new tensor::DETensor(std::move(de_output)));
|
return std::make_shared<tensor::DETensor>(std::move(de_output));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
|
#ifndef MINDSPORE_CCSRC_MINDDATA_DATASET_API_DETENSOR_H_
|
||||||
#ifndef DATASET_INCLUDE_DETENSOR_H_
|
#define MINDSPORE_CCSRC_MINDDATA_DATASET_API_DETENSOR_H_
|
||||||
#define DATASET_INCLUDE_DETENSOR_H_
|
|
||||||
#include "include/ms_tensor.h"
|
#include "include/ms_tensor.h"
|
||||||
#include "minddata/dataset/include/tensor.h"
|
#include "minddata/dataset/include/tensor.h"
|
||||||
#include "minddata/dataset/util/status.h"
|
#include "minddata/dataset/util/status.h"
|
||||||
|
@ -8,46 +7,50 @@ namespace mindspore {
|
||||||
namespace tensor {
|
namespace tensor {
|
||||||
class DETensor : public MSTensor {
|
class DETensor : public MSTensor {
|
||||||
public:
|
public:
|
||||||
// brief Create a MSTensor pointer.
|
/// \brief Create a MSTensor pointer.
|
||||||
//
|
/// \param[data_type] DataTypeId of tensor to be created.
|
||||||
// param data_type DataTypeId of tensor to be created.
|
/// \param[shape] Shape of tensor to be created.
|
||||||
// param shape Shape of tensor to be created.
|
/// \return - MSTensor pointer.
|
||||||
// return MSTensor pointer.
|
static MSTensor *CreateTensor(TypeId data_type, const std::vector<int> &shape);
|
||||||
static MSTensor *CreateTensor(TypeId data_type, const std::vector<int> &shape);
|
|
||||||
|
|
||||||
static MSTensor *CreateTensor(const std::string &path);
|
/// \brief Create a MSTensor pointer.
|
||||||
|
/// \param[path] Path file to be read.
|
||||||
|
/// \return - MSTensor pointer.
|
||||||
|
static MSTensor *CreateTensor(const std::string &path);
|
||||||
|
|
||||||
DETensor(TypeId data_type, const std::vector<int> &shape);
|
DETensor(TypeId data_type, const std::vector<int> &shape);
|
||||||
|
|
||||||
explicit DETensor(std::shared_ptr<dataset::Tensor> tensor_ptr);
|
explicit DETensor(std::shared_ptr<dataset::Tensor> tensor_ptr);
|
||||||
|
|
||||||
~DETensor() = default;
|
~DETensor() = default;
|
||||||
|
|
||||||
MSTensor *ConvertToLiteTensor();
|
/// \brief Create a duplicate instance, convert the DETensor to the LiteTensor.
|
||||||
|
/// \return - MSTensor pointer.
|
||||||
|
MSTensor *ConvertToLiteTensor();
|
||||||
|
|
||||||
std::shared_ptr<dataset::Tensor> tensor() const;
|
std::shared_ptr<dataset::Tensor> tensor() const;
|
||||||
|
|
||||||
TypeId data_type() const override;
|
TypeId data_type() const override;
|
||||||
|
|
||||||
TypeId set_data_type(const TypeId data_type) override;
|
TypeId set_data_type(const TypeId data_type) override;
|
||||||
|
|
||||||
std::vector<int> shape() const override;
|
std::vector<int> shape() const override;
|
||||||
|
|
||||||
size_t set_shape(const std::vector<int> &shape) override;
|
size_t set_shape(const std::vector<int> &shape) override;
|
||||||
|
|
||||||
int DimensionSize(size_t index) const override;
|
int DimensionSize(size_t index) const override;
|
||||||
|
|
||||||
int ElementsNum() const override;
|
int ElementsNum() const override;
|
||||||
|
|
||||||
std::size_t hash() const override;
|
std::size_t hash() const override;
|
||||||
|
|
||||||
size_t Size() const override;
|
size_t Size() const override;
|
||||||
|
|
||||||
void *MutableData() const override;
|
void *MutableData() const override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
std::shared_ptr<dataset::Tensor> tensor_impl_;
|
std::shared_ptr<dataset::Tensor> tensor_impl_;
|
||||||
};
|
};
|
||||||
} // namespace tensor
|
} // namespace tensor
|
||||||
} // namespace mindspore
|
} // namespace mindspore
|
||||||
#endif // DATASET_INCLUDE_DETENSOR_H_
|
#endif // MINDSPORE_CCSRC_MINDDATA_DATASET_API_DETENSOR_H_
|
|
@ -1,5 +1,5 @@
|
||||||
/**
|
/**
|
||||||
* Copyright 2019 Huawei Technologies Co., Ltd
|
* Copyright 2020 Huawei Technologies Co., Ltd
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
|
|
@ -62,104 +62,3 @@ TEST_F(MindDataTestEager, Test1) {
|
||||||
double elapsed_time_ms = std::chrono::duration<double, std::milli>(t_end-t_start).count();
|
double elapsed_time_ms = std::chrono::duration<double, std::milli>(t_end-t_start).count();
|
||||||
MS_LOG(INFO) << "duration: " << elapsed_time_ms << " ms\n";
|
MS_LOG(INFO) << "duration: " << elapsed_time_ms << " ms\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
TEST_F(MindDataTestEager, Test2) {
|
|
||||||
// string dir for image folder
|
|
||||||
std::string in_dir = datasets_root_path_ + "/testPK/data";
|
|
||||||
// run dataset with decode = on
|
|
||||||
std::shared_ptr<Dataset> ds = ImageFolder(in_dir, true, RandomSampler(false));
|
|
||||||
std::shared_ptr<TensorOperation> normalize_op = vision::Normalize({121.0, 115.0, 100.0}, {70.0, 68.0, 71.0});
|
|
||||||
EXPECT_TRUE(normalize_op != nullptr);
|
|
||||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({224, 224});
|
|
||||||
EXPECT_TRUE(resize_op != nullptr);
|
|
||||||
ds = ds->Map({normalize_op, resize_op});
|
|
||||||
EXPECT_TRUE(ds != nullptr);
|
|
||||||
// Create an iterator over the result of the above dataset
|
|
||||||
// This will trigger the creation of the Execution Tree and launch it.
|
|
||||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
|
||||||
EXPECT_TRUE(iter != nullptr);
|
|
||||||
|
|
||||||
// Iterate the dataset and get each row
|
|
||||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
|
||||||
iter->GetNextRow(&row);
|
|
||||||
|
|
||||||
uint64_t i = 0;
|
|
||||||
while (row.size() != 0) {
|
|
||||||
i++;
|
|
||||||
auto image = row["image"];
|
|
||||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
|
||||||
iter->GetNextRow(&row);
|
|
||||||
}
|
|
||||||
MS_LOG(WARNING) << i << ".";
|
|
||||||
iter->Stop();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(MindDataTestEager, Test3) {
|
|
||||||
// string dir for image folder
|
|
||||||
ConfigManager cm = ConfigManager();
|
|
||||||
cm.set_num_parallel_workers(1);
|
|
||||||
std::string in_dir = datasets_root_path_ + "/testPK/data";
|
|
||||||
// run dataset with decode = on
|
|
||||||
std::shared_ptr<Dataset> ds = ImageFolder(in_dir, true, RandomSampler(false));
|
|
||||||
std::shared_ptr<TensorOperation> normalize_op = vision::Normalize({121.0, 115.0, 100.0}, {70.0, 68.0, 71.0});
|
|
||||||
EXPECT_TRUE(normalize_op != nullptr);
|
|
||||||
std::shared_ptr<TensorOperation> resize_op = vision::Resize({224, 224});
|
|
||||||
EXPECT_TRUE(resize_op != nullptr);
|
|
||||||
ds = ds->Map({normalize_op, resize_op});
|
|
||||||
EXPECT_TRUE(ds != nullptr);
|
|
||||||
// Create an iterator over the result of the above dataset
|
|
||||||
// This will trigger the creation of the Execution Tree and launch it.
|
|
||||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
|
||||||
EXPECT_TRUE(iter != nullptr);
|
|
||||||
|
|
||||||
// Iterate the dataset and get each row
|
|
||||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
|
||||||
iter->GetNextRow(&row);
|
|
||||||
|
|
||||||
uint64_t i = 0;
|
|
||||||
while (row.size() != 0) {
|
|
||||||
i++;
|
|
||||||
auto image = row["image"];
|
|
||||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
|
||||||
iter->GetNextRow(&row);
|
|
||||||
}
|
|
||||||
MS_LOG(WARNING) << i << ".";
|
|
||||||
iter->Stop();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(MindDataTestEager, Test4) {
|
|
||||||
// string dir for image folder
|
|
||||||
ConfigManager cm = ConfigManager();
|
|
||||||
cm.set_num_parallel_workers(1);
|
|
||||||
std::string in_dir = datasets_root_path_ + "/testPK/data";
|
|
||||||
// run dataset with decode = on
|
|
||||||
std::shared_ptr<Dataset> ds = ImageFolder(in_dir, true, RandomSampler(false));
|
|
||||||
// Create an iterator over the result of the above dataset
|
|
||||||
// This will trigger the creation of the Execution Tree and launch it.
|
|
||||||
std::shared_ptr<Iterator> iter = ds->CreateIterator();
|
|
||||||
EXPECT_TRUE(iter != nullptr);
|
|
||||||
|
|
||||||
// Iterate the dataset and get each row
|
|
||||||
std::unordered_map<std::string, std::shared_ptr<Tensor>> row;
|
|
||||||
iter->GetNextRow(&row);
|
|
||||||
|
|
||||||
uint64_t i = 0;
|
|
||||||
while (row.size() != 0) {
|
|
||||||
i++;
|
|
||||||
auto image = row["image"];
|
|
||||||
image = Execute(vision::Normalize({121.0, 115.0, 100.0}, {70.0, 68.0, 71.0}))(image);
|
|
||||||
EXPECT_TRUE(image != nullptr);
|
|
||||||
image = Execute(vision::Resize({224, 224}))(image);
|
|
||||||
EXPECT_TRUE(image != nullptr);
|
|
||||||
|
|
||||||
MS_LOG(INFO) << "Tensor image shape: " << image->shape();
|
|
||||||
iter->GetNextRow(&row);
|
|
||||||
}
|
|
||||||
MS_LOG(WARNING) << i << ".";
|
|
||||||
iter->Stop();
|
|
||||||
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
Loading…
Reference in New Issue