!6307 Conversion to and from TensorRow in C++

Merge pull request !6307 from MahdiRahmaniHanzaki/c_func
This commit is contained in:
mindspore-ci-bot 2020-09-25 04:14:05 +08:00 committed by Gitee
commit 70221f5261
3 changed files with 352 additions and 0 deletions

View File

@ -72,6 +72,99 @@ class TensorRow {
// Destructor
~TensorRow() = default;
/// Convert a vector of primitive types to a TensorRow consisting of n single data Tensors.
/// \tparam `T`
/// \param[in] o input vector
/// \param[out] output TensorRow
template <typename T>
static Status ConvertToTensorRow(const std::vector<T> &o, TensorRow *output) {
DataType data_type = DataType::FromCType<T>();
if (data_type == DataType::DE_UNKNOWN) {
RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type was not recognized.");
}
if (data_type == DataType::DE_STRING) {
RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported.");
}
for (int i = 0; i < o.size(); i++) {
std::shared_ptr<Tensor> tensor;
Tensor::CreateEmpty(TensorShape({1}), data_type, &tensor);
std::string_view s;
tensor->SetItemAt({0}, o[i]);
output->push_back(tensor);
}
return Status::OK();
}
/// Convert a single primitive type to a TensorRow consisting of one single data Tensor.
/// \tparam `T`
/// \param[in] o input
/// \param[out] output TensorRow
template <typename T>
static Status ConvertToTensorRow(const T &o, TensorRow *output) {
DataType data_type = DataType::FromCType<T>();
if (data_type == DataType::DE_UNKNOWN) {
RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type was not recognized.");
}
if (data_type == DataType::DE_STRING) {
RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported.");
}
std::shared_ptr<Tensor> tensor;
Tensor::CreateEmpty(TensorShape({1}), data_type, &tensor);
tensor->SetItemAt({0}, o);
output->push_back(tensor);
return Status::OK();
}
/// Return the value in a TensorRow consiting of 1 single data Tensor
/// \tparam `T`
/// \param[in] input TensorRow
/// \param[out] o the primitive variable
template <typename T>
static Status ConvertFromTensorRow(const TensorRow &input, T *o) {
DataType data_type = DataType::FromCType<T>();
if (data_type == DataType::DE_UNKNOWN) {
RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: Data type was not recognized.");
}
if (data_type == DataType::DE_STRING) {
RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported.");
}
if (input.size() != 1) {
RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The input TensorRow is empty.");
}
if (input.at(0)->type() != data_type) {
RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The output type doesn't match the input tensor type.");
}
if (input.at(0)->shape() != TensorShape({1})) {
RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The input tensors must have a shape of {1}.");
}
return input.at(0)->GetItemAt(o, {0});
}
/// Convert a TensorRow consisting of n single data tensors to a vector of size n
/// \tparam `T`
/// \param[in] o TensorRow consisting of n single data tensors
/// \param[out] o vector of primitive variable
template <typename T>
static Status ConvertFromTensorRow(const TensorRow &input, std::vector<T> *o) {
DataType data_type = DataType::FromCType<T>();
if (data_type == DataType::DE_UNKNOWN) {
RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: Data type was not recognized.");
}
if (data_type == DataType::DE_STRING) {
RETURN_STATUS_UNEXPECTED("ConvertToTensorRow: Data type string is not supported.");
}
for (int i = 0; i < input.size(); i++) {
if (input.at(i)->shape() != TensorShape({1})) {
RETURN_STATUS_UNEXPECTED("ConvertFromTensorRow: The input tensor must have a shape of 1.");
}
T item;
RETURN_IF_NOT_OK(input.at(i)->GetItemAt(&item, {0}));
o->push_back(item);
}
return Status::OK();
}
// Functions to fetch/set id/vector
row_id_type getId() const { return id_; }

View File

@ -71,6 +71,7 @@ SET(DE_UT_SRCS
status_test.cc
task_manager_test.cc
tensor_test.cc
tensor_row_test.cc
tensor_string_test.cc
tensorshape_test.cc
tfReader_op_test.cc

View File

@ -0,0 +1,258 @@
/**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "minddata/dataset/core/client.h"
#include "common/common.h"
#include "gtest/gtest.h"
#include "minddata/dataset/core/tensor.h"
#include "minddata/dataset/core/tensor_row.h"
using namespace mindspore::dataset;
namespace py = pybind11;
class MindDataTestTensorRowDE : public UT::Common {
public:
MindDataTestTensorRowDE() {}
void SetUp() { GlobalInit(); }
};
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowBoolTest) {
Status s;
TensorRow bool_output;
bool bool_value = true;
s = TensorRow::ConvertToTensorRow(bool_value, &bool_output);
ASSERT_EQ(s, Status::OK());
TensorRow expected_bool;
std::shared_ptr<Tensor> expected_tensor;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &expected_tensor);
expected_tensor->SetItemAt<bool>({0}, bool_value);
expected_bool.push_back(expected_tensor);
ASSERT_EQ(*(bool_output.at(0)) == *(expected_bool.at(0)), true);
}
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowIntTest) {
Status s;
TensorRow int_output;
int32_t int_value = 12;
TensorRow expected_int;
s = TensorRow::ConvertToTensorRow(int_value, &int_output);
ASSERT_EQ(s, Status::OK());
std::shared_ptr<Tensor> expected_tensor;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_INT32), &expected_tensor);
expected_tensor->SetItemAt({0}, int_value);
expected_int.push_back(expected_tensor);
ASSERT_EQ(*(int_output.at(0)) == *(expected_int.at(0)), true);
}
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowFloatTest) {
Status s;
TensorRow expected_bool;
TensorRow float_output;
float float_value = 12.57;
TensorRow expected_float;
s = TensorRow::ConvertToTensorRow(float_value, &float_output);
ASSERT_EQ(s, Status::OK());
std::shared_ptr<Tensor> expected_tensor;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT32), &expected_tensor);
expected_tensor->SetItemAt({0}, float_value);
expected_float.push_back(expected_tensor);
ASSERT_EQ(*(float_output.at(0)) == *(expected_float.at(0)), true);
}
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowBoolVectorTest) {
Status s;
TensorRow bool_output;
std::vector<bool> bool_value = {true, false};
s = TensorRow::ConvertToTensorRow(bool_value, &bool_output);
ASSERT_EQ(s, Status::OK());
TensorRow expected_bool;
std::shared_ptr<Tensor> expected_tensor, expected_tensor2;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &expected_tensor);
expected_tensor->SetItemAt<bool>({0}, bool_value[0]);
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &expected_tensor2);
expected_tensor2->SetItemAt<bool>({0}, bool_value[1]);
expected_bool.push_back(expected_tensor);
expected_bool.push_back(expected_tensor2);
ASSERT_EQ(*(bool_output.at(0)) == *(expected_bool.at(0)), true);
ASSERT_EQ(*(bool_output.at(1)) == *(expected_bool.at(1)), true);
}
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowIntVectorTest) {
Status s;
TensorRow int_output;
std::vector<uint64_t> int_value = {12, 16};
TensorRow expected_int;
s = TensorRow::ConvertToTensorRow(int_value, &int_output);
ASSERT_EQ(s, Status::OK());
std::shared_ptr<Tensor> expected_tensor, expected_tensor2;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &expected_tensor);
expected_tensor->SetItemAt({0}, int_value[0]);
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &expected_tensor2);
expected_tensor2->SetItemAt({0}, int_value[1]);
expected_int.push_back(expected_tensor);
expected_int.push_back(expected_tensor2);
ASSERT_EQ(*(int_output.at(0)) == *(expected_int.at(0)), true);
ASSERT_EQ(*(int_output.at(1)) == *(expected_int.at(1)), true);
}
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowFloatVectorTest) {
Status s;
TensorRow float_output;
std::vector<double> float_value = {12.57, 0.264};
TensorRow expected_float;
s = TensorRow::ConvertToTensorRow(float_value, &float_output);
ASSERT_EQ(s, Status::OK());
std::shared_ptr<Tensor> expected_tensor, expected_tensor2;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &expected_tensor);
expected_tensor->SetItemAt({0}, float_value[0]);
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &expected_tensor2);
expected_tensor2->SetItemAt({0}, float_value[1]);
expected_float.push_back(expected_tensor);
expected_float.push_back(expected_tensor2);
ASSERT_EQ(*(float_output.at(0)) == *(expected_float.at(0)), true);
ASSERT_EQ(*(float_output.at(1)) == *(expected_float.at(1)), true);
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowBoolTest) {
Status s;
bool bool_value = true;
bool result;
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor);
input_tensor->SetItemAt<bool>({0}, bool_value);
input_tensor_row.push_back(input_tensor);
s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result);
ASSERT_EQ(s, Status::OK());
ASSERT_EQ(bool_value, result);
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowIntTest) {
Status s;
int32_t int_value = 12;
int32_t result;
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_INT32), &input_tensor);
input_tensor->SetItemAt({0}, int_value);
input_tensor_row.push_back(input_tensor);
s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result);
ASSERT_EQ(s, Status::OK());
ASSERT_EQ(int_value, result);
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowFloatTest) {
Status s;
float float_value = 12.57;
float result;
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT32), &input_tensor);
input_tensor->SetItemAt({0}, float_value);
input_tensor_row.push_back(input_tensor);
s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result);
ASSERT_EQ(s, Status::OK());
ASSERT_EQ(float_value, result);
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowBoolVectorTest) {
Status s;
std::vector<bool> bool_value = {true, false};
std::vector<bool> result;
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor1, input_tensor2;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor1);
input_tensor1->SetItemAt<bool>({0}, bool_value[0]);
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor2);
input_tensor2->SetItemAt<bool>({0}, bool_value[1]);
input_tensor_row.push_back(input_tensor1);
input_tensor_row.push_back(input_tensor2);
s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result);
ASSERT_EQ(s, Status::OK());
ASSERT_EQ(result, bool_value);
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowIntVectorTest) {
Status s;
std::vector<uint64_t> int_value = {12, 16};
std::vector<uint64_t> result;
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor1, input_tensor2;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &input_tensor1);
input_tensor1->SetItemAt({0}, int_value[0]);
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_UINT64), &input_tensor2);
input_tensor2->SetItemAt({0}, int_value[1]);
input_tensor_row.push_back(input_tensor1);
input_tensor_row.push_back(input_tensor2);
s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result);
ASSERT_EQ(s, Status::OK());
ASSERT_EQ(result, int_value);
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowFloatVectorTest) {
Status s;
std::vector<double> float_value = {12.57, 0.264};
std::vector<double> result;
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor1, input_tensor2;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &input_tensor1);
input_tensor1->SetItemAt({0}, float_value[0]);
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_FLOAT64), &input_tensor2);
input_tensor2->SetItemAt({0}, float_value[1]);
input_tensor_row.push_back(input_tensor1);
input_tensor_row.push_back(input_tensor2);
s = TensorRow::ConvertFromTensorRow(input_tensor_row, &result);
ASSERT_EQ(s, Status::OK());
ASSERT_EQ(result, float_value);
}
TEST_F(MindDataTestTensorRowDE, ConvertToTensorRowInvalidDataTest) {
TensorRow output;
std::string string_input = "Bye";
ASSERT_FALSE(TensorRow::ConvertToTensorRow(string_input, &output).IsOk());
std::vector<std::string> string_vector_input = {"Hello"};
ASSERT_FALSE(TensorRow::ConvertToTensorRow(string_vector_input, &output).IsOk());
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowTypeMismatchTest) {
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor1;
Tensor::CreateEmpty(TensorShape({1}), DataType(DataType::DE_BOOL), &input_tensor1);
input_tensor1->SetItemAt({0}, false);
input_tensor_row.push_back(input_tensor1);
double output;
ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output).IsOk());
std::vector<double> output_vector;
ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output_vector).IsOk());
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowInvalidShapeTest) {
TensorRow input_tensor_row;
std::shared_ptr<Tensor> input_tensor1;
Tensor::CreateEmpty(TensorShape({2}), DataType(DataType::DE_FLOAT64), &input_tensor1);
input_tensor_row.push_back(input_tensor1);
std::vector<double> output;
ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output).IsOk());
std::vector<double> output_vector;
ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output_vector).IsOk());
}
TEST_F(MindDataTestTensorRowDE, ConvertFromTensorRowEmptyInputTest) {
TensorRow input_tensor_row;
double output;
ASSERT_FALSE(TensorRow::ConvertFromTensorRow(input_tensor_row, &output).IsOk());
}