Browse Source

!2894 debug for out of memory when global shuffle level was set for large dataset of Bert

Merge pull request !2894 from shibeiji/master
tags/v0.6.0-beta
mindspore-ci-bot Gitee 5 years ago
parent
commit
716d0eb969
2 changed files with 3 additions and 3 deletions
  1. +1
    -1
      mindspore/nn/optim/adam.py
  2. +2
    -2
      model_zoo/bert/src/dataset.py

+ 1
- 1
mindspore/nn/optim/adam.py View File

@@ -388,7 +388,7 @@ class AdamWeightDecayDynamicLR(Optimizer):
beta2=0.999,
eps=1e-6,
weight_decay=0.0,
decay_filter=lambda x: 'beta' not in x.name and 'gamma' not in x.name):
decay_filter=lambda x: 'layernorm' not in x.name.lower() and 'bias' not in x.name.lower()):
super(AdamWeightDecayDynamicLR, self).__init__(0.0, params)
if self.is_group:
raise RuntimeError(f"The {self.cls_name} optimizer cannot support group setting.")


+ 2
- 2
model_zoo/bert/src/dataset.py View File

@@ -36,8 +36,8 @@ def create_bert_dataset(epoch_size=1, device_num=1, rank=0, do_shuffle="true", e
ds = de.TFRecordDataset(data_files, schema_dir if schema_dir != "" else None,
columns_list=["input_ids", "input_mask", "segment_ids", "next_sentence_labels",
"masked_lm_positions", "masked_lm_ids", "masked_lm_weights"],
shuffle=(do_shuffle == "true"), num_shards=device_num, shard_id=rank,
shard_equal_rows=True)
shuffle=de.Shuffle.FILES if do_shuffle == "true" else False,
num_shards=device_num, shard_id=rank, shard_equal_rows=True)
ori_dataset_size = ds.get_dataset_size()
print('origin dataset size: ', ori_dataset_size)
new_size = ori_dataset_size


Loading…
Cancel
Save