| @@ -60,13 +60,9 @@ namespace Tensorflow | |||||
| } | } | ||||
| } | } | ||||
| public Tensor this[Range slices] | |||||
| => throw new NotImplementedException(""); | |||||
| public Tensor this[params string[] slices] | public Tensor this[params string[] slices] | ||||
| => this[slices.Select(x => new Slice(x)).ToArray()]; | => this[slices.Select(x => new Slice(x)).ToArray()]; | ||||
| public Tensor slice(Slice slice) | public Tensor slice(Slice slice) | ||||
| { | { | ||||
| var slice_spec = new int[] { slice.Start.Value }; | var slice_spec = new int[] { slice.Start.Value }; | ||||
| @@ -0,0 +1,42 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Utils | |||||
| { | |||||
| public class KerasUtils | |||||
| { | |||||
| /// <summary> | |||||
| /// Downloads a file from a URL if it not already in the cache. | |||||
| /// </summary> | |||||
| /// <param name="fname">Name of the file</param> | |||||
| /// <param name="origin">Original URL of the file</param> | |||||
| /// <param name="untar"></param> | |||||
| /// <param name="md5_hash"></param> | |||||
| /// <param name="file_hash"></param> | |||||
| /// <param name="cache_subdir"></param> | |||||
| /// <param name="hash_algorithm"></param> | |||||
| /// <param name="extract"></param> | |||||
| /// <param name="archive_format"></param> | |||||
| /// <param name="cache_dir"></param> | |||||
| /// <returns></returns> | |||||
| public string get_file(string fname, string origin, | |||||
| bool untar = false, | |||||
| string md5_hash = null, | |||||
| string file_hash = null, | |||||
| string cache_subdir = "datasets", | |||||
| string hash_algorithm = "auto", | |||||
| bool extract = false, | |||||
| string archive_format = "auto", | |||||
| string cache_dir = null) | |||||
| => data_utils.get_file(fname, origin, | |||||
| untar: untar, | |||||
| md5_hash: md5_hash, | |||||
| file_hash: file_hash, | |||||
| cache_subdir: cache_subdir, | |||||
| hash_algorithm: hash_algorithm, | |||||
| extract: extract, | |||||
| archive_format: archive_format, | |||||
| cache_dir: cache_dir); | |||||
| } | |||||
| } | |||||
| @@ -1,39 +0,0 @@ | |||||
| namespace System.Runtime.CompilerServices | |||||
| { | |||||
| internal static class RuntimeHelpers | |||||
| { | |||||
| /// <summary> | |||||
| /// Slices the specified array using the specified range. | |||||
| /// </summary> | |||||
| public static T[] GetSubArray<T>(T[] array, Range range) | |||||
| { | |||||
| if (array == null) | |||||
| { | |||||
| throw new ArgumentNullException(nameof(array)); | |||||
| } | |||||
| (int offset, int length) = range.GetOffsetAndLength(array.Length); | |||||
| if (default(T) != null || typeof(T[]) == array.GetType()) | |||||
| { | |||||
| // We know the type of the array to be exactly T[]. | |||||
| if (length == 0) | |||||
| { | |||||
| return Array.Empty<T>(); | |||||
| } | |||||
| var dest = new T[length]; | |||||
| Array.Copy(array, offset, dest, 0, length); | |||||
| return dest; | |||||
| } | |||||
| else | |||||
| { | |||||
| // The array is actually a U[] where U:T. | |||||
| var dest = (T[])Array.CreateInstance(array.GetType().GetElementType(), length); | |||||
| Array.Copy(array, offset, dest, 0, length); | |||||
| return dest; | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,37 @@ | |||||
| using System; | |||||
| using System.Linq; | |||||
| using System.Collections.Generic; | |||||
| using System.IO; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Utils | |||||
| { | |||||
| public class data_utils | |||||
| { | |||||
| public static string get_file(string fname, string origin, | |||||
| bool untar = false, | |||||
| string md5_hash = null, | |||||
| string file_hash = null, | |||||
| string cache_subdir = "datasets", | |||||
| string hash_algorithm = "auto", | |||||
| bool extract = false, | |||||
| string archive_format = "auto", | |||||
| string cache_dir = null) | |||||
| { | |||||
| var datadir_base = cache_dir; | |||||
| Directory.CreateDirectory(datadir_base); | |||||
| var datadir = Path.Combine(datadir_base, cache_subdir); | |||||
| Directory.CreateDirectory(datadir); | |||||
| Web.Download(origin, datadir, fname); | |||||
| if (untar) | |||||
| Compress.ExtractTGZ(Path.Combine(datadir_base, fname), datadir_base); | |||||
| else if (extract) | |||||
| Compress.ExtractGZip(Path.Combine(datadir_base, fname), datadir_base); | |||||
| return datadir; | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -67,7 +67,7 @@ namespace Tensorflow.Keras.Utils | |||||
| line_length = 65; | line_length = 65; | ||||
| if (positions == null) | if (positions == null) | ||||
| positions = new[] { 0.45f, 0.85f, 1.0f }; | positions = new[] { 0.45f, 0.85f, 1.0f }; | ||||
| if (positions[^1] <= 1) | |||||
| if (positions.Last() <= 1) | |||||
| positions = positions.Select(p => line_length * p).ToArray(); | positions = positions.Select(p => line_length * p).ToArray(); | ||||
| to_display = new[] { "Layer (type)", "Output Shape", "Param #" }; | to_display = new[] { "Layer (type)", "Output Shape", "Param #" }; | ||||
| } | } | ||||
| @@ -77,7 +77,7 @@ namespace Tensorflow.Keras.Utils | |||||
| line_length = 98; | line_length = 98; | ||||
| if (positions == null) | if (positions == null) | ||||
| positions = new[] { 0.33f, 0.55f, 0.67f, 1.0f }; | positions = new[] { 0.33f, 0.55f, 0.67f, 1.0f }; | ||||
| if (positions[^1] <= 1) | |||||
| if (positions.Last() <= 1) | |||||
| positions = positions.Select(p => line_length * p).ToArray(); | positions = positions.Select(p => line_length * p).ToArray(); | ||||
| to_display = new[] { "Layer (type)", "Output Shape", "Param #", "Connected to" }; | to_display = new[] { "Layer (type)", "Output Shape", "Param #", "Connected to" }; | ||||
| @@ -118,7 +118,7 @@ namespace Tensorflow.Keras.Utils | |||||
| foreach (var i in range(fields.Length)) | foreach (var i in range(fields.Length)) | ||||
| { | { | ||||
| if (i > 0) | if (i > 0) | ||||
| line = line[0..^1] + " "; | |||||
| line = line + " "; | |||||
| line += fields[i]; | line += fields[i]; | ||||
| line = string.Join("", line.Take(positions[i])); | line = string.Join("", line.Take(positions[i])); | ||||
| line += string.Join("", range(positions[i] - len(line)).Select(x => " ")); | line += string.Join("", range(positions[i] - len(line)).Select(x => " ")); | ||||
| @@ -1,7 +1,7 @@ | |||||
| <Project Sdk="Microsoft.NET.Sdk"> | <Project Sdk="Microsoft.NET.Sdk"> | ||||
| <PropertyGroup> | <PropertyGroup> | ||||
| <TargetFramework>netcoreapp3.1</TargetFramework> | |||||
| <TargetFramework>net5.0</TargetFramework> | |||||
| <IsPackable>false</IsPackable> | <IsPackable>false</IsPackable> | ||||
| @@ -1,7 +1,7 @@ | |||||
| <Project Sdk="Microsoft.NET.Sdk"> | <Project Sdk="Microsoft.NET.Sdk"> | ||||
| <PropertyGroup> | <PropertyGroup> | ||||
| <TargetFramework>netcoreapp3.1</TargetFramework> | |||||
| <TargetFramework>net5.0</TargetFramework> | |||||
| <IsPackable>false</IsPackable> | <IsPackable>false</IsPackable> | ||||
| @@ -119,7 +119,7 @@ namespace TensorFlowNET.UnitTest.Dataset | |||||
| long value = 0; | long value = 0; | ||||
| var dataset = tf.data.Dataset.range(0, 2); | var dataset = tf.data.Dataset.range(0, 2); | ||||
| dataset = dataset.map(x => x + 10); | |||||
| dataset = dataset.map(x => x[0] + 10); | |||||
| foreach (var item in dataset) | foreach (var item in dataset) | ||||
| { | { | ||||
| @@ -147,7 +147,7 @@ namespace TensorFlowNET.UnitTest.Dataset | |||||
| public void Cardinality() | public void Cardinality() | ||||
| { | { | ||||
| var dataset = tf.data.Dataset.range(10); | var dataset = tf.data.Dataset.range(10); | ||||
| dataset = dataset.map(x => x + 1); | |||||
| dataset = dataset.map(x => x[0] + 1); | |||||
| var cardinality = dataset.dataset_cardinality(); | var cardinality = dataset.dataset_cardinality(); | ||||
| Assert.AreEqual(new long[] { 10 }, cardinality.numpy()); | Assert.AreEqual(new long[] { 10 }, cardinality.numpy()); | ||||
| } | } | ||||
| @@ -1,7 +1,7 @@ | |||||
| <Project Sdk="Microsoft.NET.Sdk"> | <Project Sdk="Microsoft.NET.Sdk"> | ||||
| <PropertyGroup> | <PropertyGroup> | ||||
| <TargetFramework>netcoreapp3.1</TargetFramework> | |||||
| <TargetFramework>net5.0</TargetFramework> | |||||
| <IsPackable>false</IsPackable> | <IsPackable>false</IsPackable> | ||||