Browse Source

!2192 fix cpu lstm bug when hidden_size is zero

Merge pull request !2192 from baihuawei/cpulstm
tags/v0.5.0-beta
mindspore-ci-bot Gitee 5 years ago
parent
commit
39bee74ebf
1 changed files with 3 additions and 0 deletions
  1. +3
    -0
      mindspore/nn/layer/lstm.py

+ 3
- 0
mindspore/nn/layer/lstm.py View File

@@ -23,6 +23,7 @@ from mindspore.common.parameter import Parameter, ParameterTuple
from mindspore.common.tensor import Tensor
from mindspore.nn.cell import Cell
from mindspore.ops import operations as P
from ..._checkparam import Rel

__all__ = ['LSTM', 'LSTMCell']

@@ -123,6 +124,8 @@ class LSTM(Cell):
self.num_layers = num_layers
self.has_bias = has_bias
self.batch_first = validator.check_value_type("batch_first", batch_first, [bool], self.cls_name)
self.hidden_size = validator.check_integer("hidden_size", hidden_size, 0, Rel.GT, self.cls_name)
self.num_layers = validator.check_integer("num_layers", num_layers, 0, Rel.GT, self.cls_name)
self.dropout = float(dropout)
self.bidirectional = bidirectional
if self.batch_first:


Loading…
Cancel
Save