You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

tf.train.cs 4.9 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. /*****************************************************************************
  2. Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved.
  3. Licensed under the Apache License, Version 2.0 (the "License");
  4. you may not use this file except in compliance with the License.
  5. You may obtain a copy of the License at
  6. http://www.apache.org/licenses/LICENSE-2.0
  7. Unless required by applicable law or agreed to in writing, software
  8. distributed under the License is distributed on an "AS IS" BASIS,
  9. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  10. See the License for the specific language governing permissions and
  11. limitations under the License.
  12. ******************************************************************************/
  13. using System.Collections.Generic;
  14. using Tensorflow.Keras.Optimizers;
  15. using Tensorflow.Train;
  16. namespace Tensorflow
  17. {
  18. public partial class tensorflow
  19. {
  20. public train_internal train { get; } = new train_internal();
  21. public class train_internal
  22. {
  23. public RefVariable create_global_step(Graph graph)
  24. => TrainingUtil.create_global_step(graph);
  25. public RefVariable get_global_step(Graph graph)
  26. => TrainingUtil.get_global_step(graph);
  27. public Optimizer GradientDescentOptimizer(float learning_rate)
  28. => new GradientDescentOptimizer(learning_rate);
  29. public Optimizer GradientDescentOptimizer(Tensor learning_rate)
  30. => new GradientDescentOptimizer(learning_rate);
  31. public Optimizer AdamOptimizer(float learning_rate, float epsilon = 1e-8f, string name = "Adam")
  32. => new AdamOptimizer(learning_rate, epsilon:epsilon, name: name);
  33. public Optimizer AdamOptimizer(float learning_rate, TF_DataType dtype, string name = "Adam")
  34. => new AdamOptimizer(learning_rate, name: name, dtype: dtype);
  35. public Optimizer AdamOptimizer(Tensor learning_rate, string name = "Adam")
  36. => new AdamOptimizer(learning_rate, name: name);
  37. public ExponentialMovingAverage ExponentialMovingAverage(float decay)
  38. => new ExponentialMovingAverage(decay);
  39. public Saver Saver(IVariableV1[] var_list = null, int max_to_keep = 5)
  40. => new Saver(var_list: var_list, max_to_keep: max_to_keep);
  41. public string write_graph(Graph graph, string logdir, string name, bool as_text = true)
  42. => graph_io.write_graph(graph, logdir, name, as_text);
  43. public Graph load_graph(string freeze_graph_pb)
  44. => saver.load_graph(freeze_graph_pb);
  45. public string freeze_graph(string checkpoint_dir, string output_pb_name, string[] output_node_names)
  46. => saver.freeze_graph(checkpoint_dir, output_pb_name, output_node_names);
  47. public Saver import_meta_graph(string meta_graph_or_file,
  48. bool clear_devices = false,
  49. string import_scope = "") => saver._import_meta_graph_with_return_elements(meta_graph_or_file,
  50. clear_devices,
  51. import_scope).Item1;
  52. public (MetaGraphDef, Dictionary<string, IVariableV1>) export_meta_graph(string filename = "",
  53. bool as_text = false,
  54. bool clear_devices = false,
  55. bool clear_extraneous_savers = false,
  56. bool strip_default_attrs = false) => meta_graph.export_scoped_meta_graph(filename: filename,
  57. as_text: as_text,
  58. clear_devices: clear_devices,
  59. clear_extraneous_savers: clear_extraneous_savers,
  60. strip_default_attrs: strip_default_attrs);
  61. public string latest_checkpoint(string checkpoint_dir, string latest_filename = null)
  62. => checkpoint_management.latest_checkpoint(checkpoint_dir, latest_filename: latest_filename);
  63. public CheckpointState get_checkpoint_state(string checkpoint_dir, string latest_filename = null)
  64. => checkpoint_management.get_checkpoint_state(checkpoint_dir, latest_filename: latest_filename);
  65. public Tensor polynomial_decay(float learning_rate,
  66. RefVariable global_step,
  67. float decay_steps,
  68. float end_learning_rate = 0.0001f,
  69. float power = 1.0f,
  70. bool cycle = false,
  71. string name = null)
  72. {
  73. var decayed = new PolynomialDecay(learning_rate,
  74. decay_steps,
  75. end_learning_rate: end_learning_rate,
  76. power: power,
  77. cycle: cycle,
  78. name: name);
  79. var decayed_lr = decayed.__call__(global_step);
  80. return decayed_lr;
  81. }
  82. }
  83. }
  84. }