| @@ -208,6 +208,9 @@ namespace Tensorflow | |||||
| => image_ops_impl.non_max_suppression_padded(boxes, scores, max_output_size, iou_threshold, score_threshold, pad_to_max_output_size, | => image_ops_impl.non_max_suppression_padded(boxes, scores, max_output_size, iou_threshold, score_threshold, pad_to_max_output_size, | ||||
| name, sorted_input, canonicalized_coordinates, tile_size); | name, sorted_input, canonicalized_coordinates, tile_size); | ||||
| public Tensor resize(Tensor image, TensorShape size) | |||||
| => image_ops_impl.resize_images(image, tf.constant(size)); | |||||
| public Tensor resize_bilinear(Tensor images, Tensor size, bool align_corners = false, bool half_pixel_centers = false, string name = null) | public Tensor resize_bilinear(Tensor images, Tensor size, bool align_corners = false, bool half_pixel_centers = false, string name = null) | ||||
| => gen_image_ops.resize_bilinear(images, size, align_corners: align_corners, half_pixel_centers: half_pixel_centers, name: name); | => gen_image_ops.resize_bilinear(images, size, align_corners: align_corners, half_pixel_centers: half_pixel_centers, name: name); | ||||
| @@ -25,6 +25,9 @@ namespace Tensorflow | |||||
| public IDatasetV2 from_tensor_slices(Tensor features, Tensor labels) | public IDatasetV2 from_tensor_slices(Tensor features, Tensor labels) | ||||
| => new TensorSliceDataset(features, labels); | => new TensorSliceDataset(features, labels); | ||||
| public IDatasetV2 from_tensor_slices(string[] array) | |||||
| => new TensorSliceDataset(array); | |||||
| public IDatasetV2 from_tensor_slices(NDArray array) | public IDatasetV2 from_tensor_slices(NDArray array) | ||||
| => new TensorSliceDataset(array); | => new TensorSliceDataset(array); | ||||
| @@ -11,6 +11,16 @@ namespace Tensorflow.Data | |||||
| { | { | ||||
| public class TensorSliceDataset : DatasetSource | public class TensorSliceDataset : DatasetSource | ||||
| { | { | ||||
| public TensorSliceDataset(string[] array) | |||||
| { | |||||
| var element = tf.constant(array); | |||||
| _tensors = new[] { element }; | |||||
| var batched_spec = new[] { element.ToTensorSpec() }; | |||||
| structure = batched_spec.Select(x => x._unbatch()).ToArray(); | |||||
| variant_tensor = ops.tensor_slice_dataset(_tensors, output_shapes); | |||||
| } | |||||
| public TensorSliceDataset(NDArray array) | public TensorSliceDataset(NDArray array) | ||||
| { | { | ||||
| var element = tf.constant(array); | var element = tf.constant(array); | ||||
| @@ -33,8 +33,6 @@ namespace Tensorflow.Functions | |||||
| new Operation[] { input }, | new Operation[] { input }, | ||||
| new Operation[] { output }, | new Operation[] { output }, | ||||
| null); | null); | ||||
| c_api.TFE_ContextAddFunction(tf.Context.Handle, _handle, tf.Status.Handle); | |||||
| } | } | ||||
| tf.enable_eager_execution(); | tf.enable_eager_execution(); | ||||
| @@ -54,6 +52,7 @@ namespace Tensorflow.Functions | |||||
| public void Dispose() | public void Dispose() | ||||
| { | { | ||||
| c_api.TFE_ContextRemoveFunction(tf.Context.Handle, Name, tf.Status.Handle); | c_api.TFE_ContextRemoveFunction(tf.Context.Handle, Name, tf.Status.Handle); | ||||
| c_api.TF_DeleteFunction(_handle); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -26,8 +26,6 @@ namespace Tensorflow.Graphs | |||||
| new Operation[] { input1, input2 }, | new Operation[] { input1, input2 }, | ||||
| new Operation[] { output }, | new Operation[] { output }, | ||||
| null); | null); | ||||
| c_api.TFE_ContextAddFunction(tf.Context.Handle, func_handle, tf.Status.Handle); | |||||
| } | } | ||||
| tf.enable_eager_execution(); | tf.enable_eager_execution(); | ||||
| @@ -1,4 +1,4 @@ | |||||
| /*using MethodBoundaryAspect.Fody.Attributes; | |||||
| using MethodBoundaryAspect.Fody.Attributes; | |||||
| using System; | using System; | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using System.Linq; | using System.Linq; | ||||
| @@ -8,49 +8,73 @@ using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Graphs | namespace Tensorflow.Graphs | ||||
| { | { | ||||
| public sealed class AutoGraphAspect : OnMethodBoundaryAspect | |||||
| [AllowChangingInputArguments] | |||||
| public sealed class AutoGraphAttribute : OnMethodBoundaryAspect | |||||
| { | { | ||||
| FuncGraph graph; | FuncGraph graph; | ||||
| IntPtr func_handle; | |||||
| Tensor[] originalInputs; | |||||
| string func_name; | |||||
| static Dictionary<string, Func<Tensor[], Tensor>> functions = new Dictionary<string, Func<Tensor[], Tensor>>(); | |||||
| public override void OnEntry(MethodExecutionArgs args) | public override void OnEntry(MethodExecutionArgs args) | ||||
| { | { | ||||
| func_name = $"autograph_{args.Instance}.{args.Method.Name}"; | |||||
| if (functions.ContainsKey(func_name)) | |||||
| { | |||||
| args.ReturnValue = functions[func_name](args.Arguments.Select(x => x as Tensor).ToArray()); | |||||
| args.FlowBehavior = FlowBehavior.Return; | |||||
| return; | |||||
| } | |||||
| tf.compat.v1.disable_eager_execution(); | tf.compat.v1.disable_eager_execution(); | ||||
| // make function as an Operation by autograph | |||||
| graph = new FuncGraph(func_name); | |||||
| graph.as_default(); | |||||
| originalInputs = new Tensor[args.Arguments.Length]; | |||||
| // convert args to placeholder | // convert args to placeholder | ||||
| for (var i = 0; i < args.Arguments.Length; i++) | for (var i = 0; i < args.Arguments.Length; i++) | ||||
| { | { | ||||
| if (args.Arguments[i] is EagerTensor tensor) | if (args.Arguments[i] is EagerTensor tensor) | ||||
| { | |||||
| originalInputs[i] = tensor; | |||||
| args.Arguments[i] = tf.placeholder(tensor.dtype, shape: tensor.TensorShape); | args.Arguments[i] = tf.placeholder(tensor.dtype, shape: tensor.TensorShape); | ||||
| } | |||||
| } | } | ||||
| // make function as an Operation by autograph | |||||
| graph = new FuncGraph("autograph_add"); | |||||
| graph.as_default(); | |||||
| } | } | ||||
| public override void OnExit(MethodExecutionArgs args) | public override void OnExit(MethodExecutionArgs args) | ||||
| { | { | ||||
| var output = (Tensor)args.Method.Invoke(args.Instance, args.Arguments); | |||||
| var output = (Tensor)args.ReturnValue; | |||||
| var inputs = args.Arguments.Select(x => x as Tensor).ToArray(); | |||||
| var opers = graph._nodes_by_name.Values.Select(x => x as Operation).ToArray(); | var opers = graph._nodes_by_name.Values.Select(x => x as Operation).ToArray(); | ||||
| func_handle = graph.ToGraph(opers, | |||||
| new Operation[] { }, | |||||
| new Operation[] { }, | |||||
| graph.ToGraph(opers, | |||||
| inputs.Select(x => x.op).ToArray(), | |||||
| new Operation[] { output.op }, | |||||
| null); | null); | ||||
| graph.Dispose(); | |||||
| tf.enable_eager_execution(); | |||||
| c_api.TFE_ContextAddFunction(tf.Context.Handle, func_handle, tf.Status.Handle); | |||||
| Func<Tensor[], Tensor> function = (x) => | |||||
| { | |||||
| var result = tf.Runner.TFE_Execute(tf.Context, | |||||
| tf.Context.DeviceName, | |||||
| func_name, | |||||
| x, | |||||
| null, | |||||
| 1); | |||||
| var a1 = tf.constant(1); | |||||
| var b1 = tf.constant(2); | |||||
| return result[0]; | |||||
| }; | |||||
| // cache function. | |||||
| functions[func_name] = function; | |||||
| var result = tf.Runner.TFE_Execute(tf.Context, | |||||
| tf.Context.DeviceName, | |||||
| "autograph_add", | |||||
| new[] { a1, b1 }, | |||||
| null, | |||||
| 1); | |||||
| graph.Dispose(); | |||||
| // run function | |||||
| args.ReturnValue = function(originalInputs); | |||||
| } | } | ||||
| } | } | ||||
| }*/ | |||||
| } | |||||
| @@ -47,8 +47,13 @@ namespace Tensorflow.Graphs | |||||
| IntPtr.Zero, | IntPtr.Zero, | ||||
| null, | null, | ||||
| status.Handle); | status.Handle); | ||||
| status.Check(true); | |||||
| c_api.TF_GraphCopyFunction(outer_graph, func_handle, IntPtr.Zero, status.Handle); | c_api.TF_GraphCopyFunction(outer_graph, func_handle, IntPtr.Zero, status.Handle); | ||||
| status.Check(true); | |||||
| c_api.TFE_ContextAddFunction(tf.Context.Handle, func_handle, status.Handle); | |||||
| status.Check(true); | |||||
| return func_handle; | return func_handle; | ||||
| } | } | ||||
| @@ -16,7 +16,10 @@ namespace Tensorflow.Keras | |||||
| int num_classes, | int num_classes, | ||||
| string interpolation) | string interpolation) | ||||
| { | { | ||||
| Shape shape = (image_paths.Length, image_size.dims[0], image_size.dims[1], num_channels); | |||||
| var path_ds = tf.data.Dataset.from_tensor_slices(image_paths); | |||||
| var img_ds = path_ds.map(x => path_to_image(x, image_size, num_channels, interpolation)); | |||||
| /*Shape shape = (image_paths.Length, image_size.dims[0], image_size.dims[1], num_channels); | |||||
| Console.WriteLine($"Allocating memory for shape{shape}, {NPTypeCode.Float}"); | Console.WriteLine($"Allocating memory for shape{shape}, {NPTypeCode.Float}"); | ||||
| var data = np.zeros(shape, NPTypeCode.Float); | var data = np.zeros(shape, NPTypeCode.Float); | ||||
| @@ -35,13 +38,13 @@ namespace Tensorflow.Keras | |||||
| var label_ds = tf.keras.preprocessing.dataset_utils.labels_to_dataset(labels, label_mode, num_classes); | var label_ds = tf.keras.preprocessing.dataset_utils.labels_to_dataset(labels, label_mode, num_classes); | ||||
| img_ds = tf.data.Dataset.zip(img_ds, label_ds); | img_ds = tf.data.Dataset.zip(img_ds, label_ds); | ||||
| } | } | ||||
| else | |||||
| else*/ | |||||
| throw new NotImplementedException(""); | throw new NotImplementedException(""); | ||||
| return img_ds; | return img_ds; | ||||
| } | } | ||||
| Tensor path_to_image(string path, TensorShape image_size, int num_channels, string interpolation) | |||||
| Tensor path_to_image(Tensor path, TensorShape image_size, int num_channels, string interpolation) | |||||
| { | { | ||||
| var img = tf.io.read_file(path); | var img = tf.io.read_file(path); | ||||
| img = tf.image.decode_image( | img = tf.image.decode_image( | ||||
| @@ -1668,8 +1668,6 @@ new_height, new_width"); | |||||
| public static Tensor decode_image(Tensor contents, int channels = 0, TF_DataType dtype = TF_DataType.TF_UINT8, | public static Tensor decode_image(Tensor contents, int channels = 0, TF_DataType dtype = TF_DataType.TF_UINT8, | ||||
| string name = null, bool expand_animations = true) | string name = null, bool expand_animations = true) | ||||
| { | { | ||||
| Tensor substr = null; | |||||
| Func<ITensorOrOperation> _jpeg = () => | Func<ITensorOrOperation> _jpeg = () => | ||||
| { | { | ||||
| int jpeg_channels = channels; | int jpeg_channels = channels; | ||||
| @@ -1695,8 +1693,7 @@ new_height, new_width"); | |||||
| { | { | ||||
| var result = convert_image_dtype(gen_image_ops.decode_gif(contents), dtype); | var result = convert_image_dtype(gen_image_ops.decode_gif(contents), dtype); | ||||
| if (!expand_animations) | if (!expand_animations) | ||||
| // result = array_ops.gather(result, 0); | |||||
| throw new NotImplementedException(""); | |||||
| result = array_ops.gather(result, 0); | |||||
| return result; | return result; | ||||
| }); | }); | ||||
| }; | }; | ||||
| @@ -1728,18 +1725,16 @@ new_height, new_width"); | |||||
| Func<ITensorOrOperation> check_gif = () => | Func<ITensorOrOperation> check_gif = () => | ||||
| { | { | ||||
| var is_gif = math_ops.equal(substr, "\x47\x49\x46", name: "is_gif"); | |||||
| return control_flow_ops.cond(is_gif, _gif, _bmp, name: "cond_gif"); | |||||
| return control_flow_ops.cond(is_gif(contents), _gif, _bmp, name: "cond_gif"); | |||||
| }; | }; | ||||
| Func<ITensorOrOperation> check_png = () => | Func<ITensorOrOperation> check_png = () => | ||||
| { | { | ||||
| return control_flow_ops.cond(_is_png(contents), _png, check_gif, name: "cond_png"); | |||||
| return control_flow_ops.cond(is_png(contents), _png, check_gif, name: "cond_png"); | |||||
| }; | }; | ||||
| return tf_with(ops.name_scope(name, "decode_image"), scope => | return tf_with(ops.name_scope(name, "decode_image"), scope => | ||||
| { | { | ||||
| substr = tf.strings.substr(contents, 0, 3); | |||||
| return control_flow_ops.cond(is_jpeg(contents), _jpeg, check_png, name: "cond_jpeg"); | return control_flow_ops.cond(is_jpeg(contents), _jpeg, check_png, name: "cond_jpeg"); | ||||
| }); | }); | ||||
| } | } | ||||
| @@ -2089,7 +2084,7 @@ new_height, new_width"); | |||||
| }); | }); | ||||
| } | } | ||||
| public static Tensor _is_png(Tensor contents, string name = null) | |||||
| static Tensor is_png(Tensor contents, string name = null) | |||||
| { | { | ||||
| return tf_with(ops.name_scope(name, "is_png"), scope => | return tf_with(ops.name_scope(name, "is_png"), scope => | ||||
| { | { | ||||
| @@ -2098,6 +2093,17 @@ new_height, new_width"); | |||||
| }); | }); | ||||
| } | } | ||||
| static Tensor is_gif(Tensor contents, string name = null) | |||||
| { | |||||
| return tf_with(ops.name_scope(name, "is_gif"), scope => | |||||
| { | |||||
| var substr = tf.strings.substr(contents, 0, 3); | |||||
| var gif = tf.constant(new byte[] { 0x47, 0x49, 0x46 }, TF_DataType.TF_STRING); | |||||
| var result = math_ops.equal(substr, gif, name: name); | |||||
| return result; | |||||
| }); | |||||
| } | |||||
| public static Tensor convert_image_dtype(Tensor image, TF_DataType dtype, bool saturate = false, | public static Tensor convert_image_dtype(Tensor image, TF_DataType dtype, bool saturate = false, | ||||
| string name = null) | string name = null) | ||||
| { | { | ||||
| @@ -148,9 +148,18 @@ namespace Tensorflow | |||||
| // If shape is not given, get the shape from the numpy array. | // If shape is not given, get the shape from the numpy array. | ||||
| if (shape == null) | if (shape == null) | ||||
| { | { | ||||
| shape = nparray.shape; | |||||
| is_same_size = true; | |||||
| shape_size = nparray.size; | |||||
| if(numpy_dtype == TF_DataType.TF_STRING) | |||||
| { | |||||
| // scalar string | |||||
| shape = new int[0]; | |||||
| shape_size = 0; | |||||
| } | |||||
| else | |||||
| { | |||||
| shape = nparray.shape; | |||||
| is_same_size = true; | |||||
| shape_size = nparray.size; | |||||
| } | |||||
| } | } | ||||
| else | else | ||||
| { | { | ||||
| @@ -470,6 +470,8 @@ namespace Tensorflow | |||||
| return varVal._TensorConversionFunction(dtype: dtype, name: name, as_ref: as_ref); | return varVal._TensorConversionFunction(dtype: dtype, name: name, as_ref: as_ref); | ||||
| case TensorShape ts: | case TensorShape ts: | ||||
| return constant_op.constant(ts.dims, dtype: dtype, name: name); | return constant_op.constant(ts.dims, dtype: dtype, name: name); | ||||
| case string str: | |||||
| return constant_op.constant(value, dtype: tf.@string, name: name); | |||||
| case int[] dims: | case int[] dims: | ||||
| return constant_op.constant(dims, dtype: dtype, name: name); | return constant_op.constant(dims, dtype: dtype, name: name); | ||||
| case object[] objects: | case object[] objects: | ||||
| @@ -1,4 +1,6 @@ | |||||
| using Microsoft.VisualStudio.TestTools.UnitTesting; | |||||
| using FluentAssertions; | |||||
| using Microsoft.VisualStudio.TestTools.UnitTesting; | |||||
| using NumSharp; | |||||
| using System; | using System; | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using System.IO; | using System.IO; | ||||
| @@ -26,12 +28,69 @@ namespace TensorFlowNET.UnitTest.Basics | |||||
| contents = tf.io.read_file(imgPath); | contents = tf.io.read_file(imgPath); | ||||
| } | } | ||||
| [Ignore] | |||||
| [TestMethod] | [TestMethod] | ||||
| public void decode_image() | public void decode_image() | ||||
| { | { | ||||
| var img = tf.image.decode_image(contents); | var img = tf.image.decode_image(contents); | ||||
| Assert.AreEqual(img.name, "decode_image/cond_jpeg/Merge:0"); | Assert.AreEqual(img.name, "decode_image/cond_jpeg/Merge:0"); | ||||
| } | } | ||||
| [TestMethod, Ignore] | |||||
| public void resize_image() | |||||
| { | |||||
| var image = tf.constant(new int[5, 5] | |||||
| { | |||||
| {1, 0, 0, 0, 0 }, | |||||
| {0, 1, 0, 0, 0 }, | |||||
| {0, 0, 1, 0, 0 }, | |||||
| {0, 0, 0, 1, 0 }, | |||||
| {0, 0, 0, 0, 1 } | |||||
| }); | |||||
| //image = image[tf.newaxis, ..., tf.newaxis]; | |||||
| var img = tf.image.resize(contents, (3, 5)); | |||||
| Assert.AreEqual(img.name, "decode_image/cond_jpeg/Merge:0"); | |||||
| } | |||||
| [TestMethod] | |||||
| public void TestCropAndResize() | |||||
| { | |||||
| var graph = tf.Graph().as_default(); | |||||
| // 3x3 'Image' with numbered coordinates | |||||
| var input = np.array(0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f); | |||||
| var image = tf.reshape(input, new int[] { 1, 3, 3, 1 }); | |||||
| // 4x4 'Image' with numbered coordinates | |||||
| var input2 = np.array(0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f, 11f, 12f, 13f, 14f, 15f); | |||||
| var image2 = tf.reshape(input2, new int[] { 1, 4, 4, 1 }); | |||||
| // create one box over the full image that flips it (y1 > y2) | |||||
| var box = tf.reshape(np.array(1f, 0f, 0f, 1f), new int[] { 1, 4 }); | |||||
| var boxInd = tf.Variable(np.array(0)); | |||||
| // crop first 3x3 imageto size 1x1 | |||||
| var cropSize1_1 = tf.Variable(np.array(1, 1)); | |||||
| // don't crop second 4x4 image | |||||
| var cropSize2_2 = tf.Variable(np.array(4, 4)); | |||||
| var init = tf.global_variables_initializer(); | |||||
| using (Session sess = tf.Session()) | |||||
| { | |||||
| sess.run(init); | |||||
| var cropped = tf.image.crop_and_resize(image, box, boxInd, cropSize1_1); | |||||
| var result = sess.run(cropped); | |||||
| // check if cropped to 1x1 center was succesfull | |||||
| result.size.Should().Be(1); | |||||
| result[0, 0, 0, 0].Should().Be(4f); | |||||
| cropped = tf.image.crop_and_resize(image2, box, boxInd, cropSize2_2); | |||||
| result = sess.run(cropped); | |||||
| // check if flipped and no cropping occured | |||||
| result.size.Should().Be(16); | |||||
| result[0, 0, 0, 0].Should().Be(12f); | |||||
| } | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,56 +0,0 @@ | |||||
| using FluentAssertions; | |||||
| using Microsoft.VisualStudio.TestTools.UnitTesting; | |||||
| using NumSharp; | |||||
| using Tensorflow; | |||||
| using Tensorflow.UnitTest; | |||||
| using static Tensorflow.Binding; | |||||
| namespace TensorFlowNET.UnitTest.img_test | |||||
| { | |||||
| [TestClass] | |||||
| public class TestCrop : GraphModeTestBase | |||||
| { | |||||
| [TestMethod] | |||||
| public void TestCropAndResize() | |||||
| { | |||||
| var graph = tf.Graph().as_default(); | |||||
| // 3x3 'Image' with numbered coordinates | |||||
| var input = np.array(0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f); | |||||
| var image = tf.reshape(input, new int[] { 1, 3, 3, 1 }); | |||||
| // 4x4 'Image' with numbered coordinates | |||||
| var input2 = np.array(0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f, 8f, 9f, 10f, 11f, 12f, 13f, 14f, 15f); | |||||
| var image2 = tf.reshape(input2, new int[] { 1, 4, 4, 1 }); | |||||
| // create one box over the full image that flips it (y1 > y2) | |||||
| var box = tf.reshape(np.array(1f, 0f, 0f, 1f), new int[] {1, 4}); | |||||
| var boxInd = tf.Variable(np.array(0)); | |||||
| // crop first 3x3 imageto size 1x1 | |||||
| var cropSize1_1 = tf.Variable(np.array(1, 1)); | |||||
| // don't crop second 4x4 image | |||||
| var cropSize2_2 = tf.Variable(np.array(4, 4)); | |||||
| var init = tf.global_variables_initializer(); | |||||
| using (Session sess = tf.Session()) | |||||
| { | |||||
| sess.run(init); | |||||
| var cropped = tf.image.crop_and_resize(image, box, boxInd, cropSize1_1); | |||||
| var result = sess.run(cropped); | |||||
| // check if cropped to 1x1 center was succesfull | |||||
| result.size.Should().Be(1); | |||||
| result[0, 0, 0, 0].Should().Be(4f); | |||||
| cropped = tf.image.crop_and_resize(image2, box, boxInd, cropSize2_2); | |||||
| result = sess.run(cropped); | |||||
| // check if flipped and no cropping occured | |||||
| result.size.Should().Be(16); | |||||
| result[0, 0, 0, 0].Should().Be(12f); | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||