fix: code check

This commit is contained in:
jonyguo 2022-03-16 18:50:35 +08:00
parent 62a2b7c41c
commit 11079eabbb
13 changed files with 48 additions and 26 deletions

View File

@ -205,7 +205,7 @@ std::shared_ptr<PullIterator> Dataset::CreatePullBasedIterator(const std::vector
}
std::shared_ptr<PullIterator> iter = std::make_shared<PullIterator>();
Status rc = iter->BuildAndLaunchTree(ds);
Status rc = iter->BuildAndLaunchTree(ds, 1);
if (rc.IsError()) {
MS_LOG(ERROR) << "CreateIterator: Iterator exception caught: " << rc;
}

View File

@ -136,7 +136,7 @@ Status PullIterator::GetNextRow(MSTensorVec *const row) {
// Function to build and launch the execution tree. This function kicks off a different type of consumer
// for the tree, the reason why this is the case is due to the fact that PullBasedIterator does not need
// to instantiate threads for each op. As such, the call to the consumer will by pass the execution tree.
Status PullIterator::BuildAndLaunchTree(const std::shared_ptr<Dataset> &ds) {
Status PullIterator::BuildAndLaunchTree(const std::shared_ptr<Dataset> &ds, int32_t num_epochs) {
if (pull_consumer_ == nullptr) {
pull_consumer_ = std::make_unique<PullBasedIteratorConsumer>();
}

View File

@ -33,7 +33,7 @@ class DatasetIteratorTracing : public Tracing {
std::string Name() const override { return kDatasetIteratorTracingName; };
private:
protected:
Path GetFileName(const std::string &dir_path, const std::string &rank_id) override;
};
} // namespace dataset

View File

@ -34,7 +34,7 @@ class DeviceQueueTracing : public Tracing {
std::string Name() const override { return kDeviceQueueTracingName; };
private:
protected:
Path GetFileName(const std::string &dir_path, const std::string &rank_id) override;
};
} // namespace dataset

View File

@ -26,6 +26,18 @@ Monitor::Monitor(ProfilingManager *profiling_manager) : profiling_manager_(profi
sampling_interval_ = cfg->monitor_sampling_interval();
tree_ = profiling_manager_->tree_;
}
Monitor::~Monitor() {
// just set the pointer to nullptr, it's not be released here
if (profiling_manager_) {
profiling_manager_ = nullptr;
}
if (tree_) {
tree_ = nullptr;
}
}
Status Monitor::operator()() {
// Register this thread with TaskManager to receive proper interrupt signal.
TaskManager::FindMe()->Post();

View File

@ -35,7 +35,7 @@ class Monitor {
// Monitor object constructor
explicit Monitor(ProfilingManager *profiler_manager);
~Monitor() = default;
~Monitor();
// Functor for Perf Monitor main loop.
// This function will be the entry point of mindspore::Dataset::Task

View File

@ -521,7 +521,7 @@ class ProfilingManager {
/// Get number of epochs that have been already profiled
/// \return number of epochs
int32_t GetNumOfProfiledEpochs() { return epoch_end_step_.size() - 1; }
int32_t GetNumOfProfiledEpochs() { return static_cast<int32_t>(epoch_end_step_.size()) - 1; }
// Get number of steps taken in pipeline
/// \return number of steps
@ -529,11 +529,11 @@ class ProfilingManager {
/// Determine if the Profiler is being used for autotuning.
/// \return boolean
bool IsAutotuning() { return autotuning_; }
bool IsAutotuning() const { return autotuning_; }
/// Determine if the Profiler is being used for profiling.
/// \return boolean
bool IsProfiling() { return profiling_; }
bool IsProfiling() const { return profiling_; }
// Registration state for the profiler
enum ProfilingRegistrationState {

View File

@ -19,6 +19,7 @@
#include <iostream>
#include <map>
#include <thread>
#include <utility>
#include "utils/log_adapter.h"
#include "acl/acl_tdt.h"
@ -26,11 +27,11 @@ namespace mindspore {
namespace dataset {
class TdtHandle {
public:
static void AddHandle(acltdtChannelHandle **handle, std::thread *use_thread);
static inline void AddHandle(acltdtChannelHandle **handle, std::thread *use_thread);
static bool DestroyHandle();
static inline bool DestroyHandle();
static void DelHandle(acltdtChannelHandle **handle);
static inline void DelHandle(acltdtChannelHandle **handle);
private:
TdtHandle() {}
@ -39,7 +40,10 @@ class TdtHandle {
inline void TdtHandle::AddHandle(acltdtChannelHandle **handle, std::thread *use_thread) {
if (*handle != nullptr) {
acl_handle_map.insert({reinterpret_cast<void **>(handle), use_thread});
auto ret = acl_handle_map.emplace(reinterpret_cast<void **>(handle), use_thread);
if (!std::get<1>(ret)) {
MS_LOG(ERROR) << "Failed to add new handle to acl_handle_map." << std::endl;
}
}
}
@ -53,7 +57,11 @@ inline bool TdtHandle::DestroyHandle() {
for (auto &item : acl_handle_map) {
acltdtChannelHandle **handle = reinterpret_cast<acltdtChannelHandle **>(item.first);
if (*handle != nullptr) {
acltdtStopChannel(*handle);
aclError stop_status = acltdtStopChannel(*handle);
if (stop_status != ACL_SUCCESS) {
MS_LOG(ERROR) << "Failed stop acl data channel and the stop status is " << stop_status << std::endl;
return false;
}
if (item.second != nullptr && item.second->joinable()) {
item.second->join();
}

View File

@ -1835,7 +1835,7 @@ class MS_API CMUArcticDataset : public Dataset {
const std::reference_wrapper<Sampler> &sampler, const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of CMUArcticDataset.
~CMUArcticDataset() = default;
~CMUArcticDataset() override = default;
};
/// \brief Function to create a CMUArcticDataset.
@ -2789,7 +2789,7 @@ class MS_API GTZANDataset : public Dataset {
const std::reference_wrapper<Sampler> &sampler, const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of GTZANDataset.
~GTZANDataset() = default;
~GTZANDataset() override = default;
};
/// \brief Function to create a GTZANDataset.
@ -3203,7 +3203,7 @@ class MS_API KITTIDataset : public Dataset {
const std::reference_wrapper<Sampler> &sampler, const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of KITTIDataset.
~KITTIDataset() = default;
~KITTIDataset() override = default;
};
/// \brief Function to create a KITTIDataset.
@ -3388,7 +3388,7 @@ class MS_API LFWDataset : public Dataset {
const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of LFWDataset.
~LFWDataset() = default;
~LFWDataset() override = default;
};
/// \brief Function to create a LFWDataset.
@ -3498,7 +3498,7 @@ class MS_API LibriTTSDataset : public Dataset {
const std::reference_wrapper<Sampler> &sampler, const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of LibriTTSDataset.
~LibriTTSDataset() = default;
~LibriTTSDataset() override = default;
};
/// \brief Function to create a LibriTTSDataset.
@ -3670,7 +3670,7 @@ class MS_API LSUNDataset : public Dataset {
const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of LSUNDataset.
~LSUNDataset() = default;
~LSUNDataset() override = default;
};
/// \brief Function to create a LSUNDataset.
@ -4281,7 +4281,7 @@ class MS_API Multi30kDataset : public Dataset {
int32_t num_shards, int32_t shard_id, const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of Multi30kDataset.
~Multi30kDataset() = default;
~Multi30kDataset() override = default;
};
/// \brief Function to create a Multi30kDataset.
@ -4359,7 +4359,7 @@ class MS_API OmniglotDataset : public Dataset {
const std::reference_wrapper<Sampler> &sampler, const std::shared_ptr<DatasetCache> &cache);
/// Destructor of OmniglotDataset.
~OmniglotDataset() = default;
~OmniglotDataset() override = default;
};
/// \brief Function to create an OmniglotDataset.
@ -5138,7 +5138,7 @@ class MS_API SQuADDataset : public Dataset {
ShuffleMode shuffle, int32_t num_shards, int32_t shard_id, const std::shared_ptr<DatasetCache> &cache);
/// \brief Destructor of SQuADUDataset.
~SQuADDataset() = default;
~SQuADDataset() override = default;
};
/// \brief Function to create a SQuADDataset.

View File

@ -184,8 +184,9 @@ class MS_API PullIterator : public Iterator {
/// \brief Method for building and launching the pipeline.
/// \note Consider making this function protected.
/// \param[in] ds The root node that calls the function.
/// \param[in] num_epochs Number of epochs passed down to EpochCtrlNode (default=-1, which means infinite epochs).
/// \return Status error code, returns OK if no error encountered.
Status BuildAndLaunchTree(const std::shared_ptr<Dataset> &ds);
Status BuildAndLaunchTree(const std::shared_ptr<Dataset> &ds, int32_t num_epochs);
private:
std::unique_ptr<PullBasedIteratorConsumer> pull_consumer_;

View File

@ -153,7 +153,7 @@ class Vocab {
/// \brief Return a read-only vocab in unordered_map type.
/// \return A unordered_map of word2id.
const std::unordered_map<WordType, WordIdType> &GetVocab() { return word2id_; }
const std::unordered_map<WordType, WordIdType> &GetVocab() const { return word2id_; }
/// \brief Constructor.
Vocab() = default;

View File

@ -499,7 +499,7 @@ inline float32x4_t round_simd(const float32x4_t &v) {
template <typename T>
inline void DivideImpl(const T *src0, const T *src1, T *dst, int64_t total_size) {
for (size_t i = 0; i < total_size; i++) {
for (int64_t i = 0; i < total_size; i++) {
dst[i] = src1[i] ? src0[i] / src1[i] : 0;
}
}
@ -641,7 +641,7 @@ bool Divide(const LiteMat &src_a, const LiteMat &src_b, LiteMat *dst) {
template <typename T>
inline void MultiplyImpl(const T *src0, const T *src1, T *dst, int64_t total_size) {
for (size_t i = 0; i < total_size; i++) {
for (int64_t i = 0; i < total_size; i++) {
dst[i] = src0[i] * src1[i];
}
}

View File

@ -229,3 +229,4 @@ if __name__ == "__main__":
test_reset_cifar2()
test_reset_imagenet()
test_reset_mindrecord(add_and_remove_cv_file)
test_reset_np_error()