From cc6ddc144fa85010b111df2b4c596c7230052080 Mon Sep 17 00:00:00 2001 From: Beacontownfc <19636977267@qq.com> Date: Fri, 7 Jul 2023 00:33:41 +0000 Subject: [PATCH] Add AdamW optimizer --- src/TensorFlowNET.Keras/Optimizers/AdamW.cs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/TensorFlowNET.Keras/Optimizers/AdamW.cs b/src/TensorFlowNET.Keras/Optimizers/AdamW.cs index 469b8ad2..d111b5d3 100644 --- a/src/TensorFlowNET.Keras/Optimizers/AdamW.cs +++ b/src/TensorFlowNET.Keras/Optimizers/AdamW.cs @@ -1,4 +1,4 @@ -namespace Tensorflow.Keras.Optimizers +namespace Tensorflow.Keras.Optimizers { public class AdamW : Adam { @@ -22,9 +22,6 @@ protected Operation _decay_weights_op(IVariableV1 var, float learning_rate, Dictionary> apply_state) { - var device_dtype = new DeviceDType(); - device_dtype.DType = var.dtype; - device_dtype.Device = var.Device; bool do_decay = _do_use_weight_decay(var.Name); if (do_decay) return var.assign_add( -learning_rate * var.AsTensor() * apply_state[deType]["weight_decay"]);