You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

IOptimizerApi.cs 1.6 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Text;
  4. using Tensorflow.Keras.Engine;
  5. namespace Tensorflow.Keras
  6. {
  7. public interface IOptimizerApi
  8. {
  9. /// <summary>
  10. /// Adam optimization is a stochastic gradient descent method that is based on
  11. /// adaptive estimation of first-order and second-order moments.
  12. /// </summary>
  13. /// <param name="learning_rate"></param>
  14. /// <param name="beta_1"></param>
  15. /// <param name="beta_2"></param>
  16. /// <param name="epsilon"></param>
  17. /// <param name="amsgrad"></param>
  18. /// <param name="name"></param>
  19. /// <returns></returns>
  20. IOptimizer Adam(float learning_rate = 0.001f,
  21. float beta_1 = 0.9f,
  22. float beta_2 = 0.999f,
  23. float epsilon = 1e-7f,
  24. bool amsgrad = false,
  25. string name = "Adam");
  26. /// <summary>
  27. /// Construct a new RMSprop optimizer.
  28. /// </summary>
  29. /// <param name="learning_rate"></param>
  30. /// <param name="rho"></param>
  31. /// <param name="momentum"></param>
  32. /// <param name="epsilon"></param>
  33. /// <param name="centered"></param>
  34. /// <param name="name"></param>
  35. /// <returns></returns>
  36. IOptimizer RMSprop(float learning_rate = 0.001f,
  37. float rho = 0.9f,
  38. float momentum = 0.0f,
  39. float epsilon = 1e-7f,
  40. bool centered = false,
  41. string name = "RMSprop");
  42. IOptimizer SGD(float learning_rate);
  43. }
  44. }