Simple example import tensorflow as tf # Model parameters W = tf.Variable([.3], dtype=tf.float32) b = tf.Variable([-.3], dtype=tf.float32) # Model input and output x = tf.placeholder(tf.float32) linear_model = W * x + b y = tf.placeholder(tf.float32) # loss loss = tf.reduce_sum(tf.square(linear_model - y)) # sum of the squares # optimizer optimizer = tf.train.GradientDescentOptimizer(0.01) train = optimizer.minimize(loss)
Tensor 3 # a rank 0 tensor; this is a scalar with shape [] [1., 2., 3.] # a rank 1 tensor; this is a vector with shape [3] [[1., 2., 3.], [4., 5., 6.]] # a rank 2 tensor; a matrix with shape [2, 3] [[[1., 2., 3.]], [[7., 8., 9.]]] # a rank 3 tensor with shape [2, 1, 3]
Cloud Vision API data class CloudVisionRequest(val requests:List) data class AnnotateImageRequest(val image:Image, val features: List, val imageContext:ImageContext) data class Image(val content:String?, val source:ImageSource?) data class ImageSource(val gcsImageUri:String?, val imageUri:String?) data class Feature(...) data class ImageContext(...)
Cloud Vision API data class CloudVisionResponse(val responses:List) data class AnnotateImageResponse(val faceAnnotations:List, val landmarkAnnotations:List, val logoAnnotations:List, val labelAnnotations:List, val textAnnotations:List, val fullTextAnnotation:FullTextAnnotation, val safeSearchAnnotation:SafeSearchAnnotation, val imagePropertiesAnnotation:ImagePropertiesAnnotation, val cropHintsAnnotation:CropHintsAnnotation, val webDetection:WebDetection, val error:Status)
// load the model into a TensorFlowInferenceInterface. c.inferenceInterface = new TensorFlowInferenceInterface( assetManager, modelFilename); // Get the tensorflow node final Operation operation = c.inferenceInterface.graphOperation(outputName); // Inspect its shape final int numClasses = (int) operation.output(0).shape().size(1); // Build the output array with the correct size. c.outputs = new float[numClasses];
inferenceInterface.feed( inputName, // The name of the node to feed. floatValues, // The array to feed 1, inputSize, inputSize, 3 ); // The shape of the array inferenceInterface.run( outputNames, // Names of all the nodes to calculate. logStats); // Bool, enable stat logging. inferenceInterface.fetch( outputName, // Fetch this output. outputs); // Into the prepared array.