Browse Source

Minor fix (requested by HQ)

tags/v1.1.0
Lixia Chen 5 years ago
parent
commit
b86ad055be
5 changed files with 11 additions and 5 deletions
  1. +4
    -2
      mindspore/ccsrc/minddata/dataset/engine/cache/cache_service.cc
  2. +1
    -0
      mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_base_op.cc
  3. +2
    -1
      mindspore/ccsrc/minddata/dataset/engine/ir/cache/dataset_cache_impl.h
  4. +2
    -1
      mindspore/ccsrc/minddata/dataset/include/datasets.h
  5. +2
    -1
      mindspore/dataset/engine/cache_client.py

+ 4
- 2
mindspore/ccsrc/minddata/dataset/engine/cache/cache_service.cc View File

@@ -39,8 +39,10 @@ Status CacheService::DoServiceStart() {
if (cache_mem_sz_ > 0) {
auto avail_mem = CacheServerHW::GetTotalSystemMemory();
if (cache_mem_sz_ > avail_mem) {
// Output a warning that we use more than recommended. If we fail to allocate, we will fail anyway.
MS_LOG(WARNING) << "Requesting cache size " << cache_mem_sz_ << " while available system memory " << avail_mem;
// Return an error if we use more than recommended memory.
std::string errMsg = "Requesting cache size " + std::to_string(cache_mem_sz_) +
" while available system memory " + std::to_string(avail_mem);
return Status(StatusCode::kOutOfMemory, __LINE__, __FILE__, errMsg);
}
memory_cap_ratio = static_cast<float>(cache_mem_sz_) / avail_mem;
}


+ 1
- 0
mindspore/ccsrc/minddata/dataset/engine/datasetops/cache_base_op.cc View File

@@ -308,6 +308,7 @@ Status CacheBase::Prefetcher(int32_t worker_id) {
// If we get some network error, we will attempt some retries
retry_count++;
} else if (rc.IsError()) {
MS_LOG(WARNING) << rc.ToString();
return rc;
}
} while (rc.IsNetWorkError());


+ 2
- 1
mindspore/ccsrc/minddata/dataset/engine/ir/cache/dataset_cache_impl.h View File

@@ -32,7 +32,8 @@ class DatasetCacheImpl : public DatasetCache {
///
/// \brief Constructor
/// \param id A user assigned session id for the current pipeline.
/// \param mem_sz Size of the memory set aside for the row caching (default=0 which means unlimited).
/// \param mem_sz Size of the memory set aside for the row caching (default=0 which means unlimited,
/// note that it might bring in the risk of running out of memory on the machine).
/// \param spill Spill to disk if out of memory (default=False).
/// \param hostname optional host name (default="127.0.0.1").
/// \param port optional port (default=50052).


+ 2
- 1
mindspore/ccsrc/minddata/dataset/include/datasets.h View File

@@ -1011,7 +1011,8 @@ std::shared_ptr<VOCDataset> VOC(const std::string &dataset_dir, const std::strin

/// \brief Function the create a cache to be attached to a dataset
/// \param id A user assigned session id for the current pipeline.
/// \param mem_sz Size of the memory set aside for the row caching (default=0 which means unlimited).
/// \param mem_sz Size of the memory set aside for the row caching (default=0 which means unlimited,
/// note that it might bring in the risk of running out of memory on the machine).
/// \param spill Spill to disk if out of memory (default=False).
/// \param hostname optional host name (default="127.0.0.1").
/// \param port optional port (default=50052).


+ 2
- 1
mindspore/dataset/engine/cache_client.py View File

@@ -30,7 +30,8 @@ class DatasetCache:

Args:
session_id (int): A user assigned session id for the current pipeline.
size (int, optional): Size of the memory set aside for the row caching (default=0 which means unlimited).
size (int, optional): Size of the memory set aside for the row caching (default=0 which means unlimited,
note that it might bring in the risk of running out of memory on the machine).
spilling (bool, optional): Whether or not spilling to disk if out of memory (default=False).
hostname (str, optional): Host name (default="127.0.0.1").
port (int, optional): Port to connect to server (default=50052).


Loading…
Cancel
Save