Tensorflow classifier.export_savedmodel (Beginner)

前端 未结 3 1885
再見小時候
再見小時候 2020-12-05 01:55

I know about the \"Serving a Tensorflow Model\" page

https://www.tensorflow.org/serving/serving_basic

but those functions assume you\'re using tf.Session() w

3条回答
  •  被撕碎了的回忆
    2020-12-05 02:10

    If you try to use predictor with tensorflow > 1.6 you can get this Error :

    signature_def_key "serving_default". Available signatures are ['predict']. Original error:
    No SignatureDef with key 'serving_default' found in MetaGraphDef.
    

    Here is working example which is tested on 1.7.0 :

    SAVING :

    First you need to define features length in dict format like this:

    feature_spec = {'x': tf.FixedLenFeature([4],tf.float32)}
    

    Then you have to build a function which have placeholder with same shape of features and return using tf.estimator.export.ServingInputReceiver

    def serving_input_receiver_fn():
        serialized_tf_example = tf.placeholder(dtype=tf.string,
                                             shape=[None],
                                             name='input_tensors')
        receiver_tensors = {'inputs': serialized_tf_example}
    
        features = tf.parse_example(serialized_tf_example, feature_spec)
        return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
    

    Then just save with export_savedmodel :

    classifier.export_savedmodel(dir_path, serving_input_receiver_fn)
    

    full example code:

    import os
    from six.moves.urllib.request import urlopen
    
    import numpy as np
    import tensorflow as tf
    
    
    dir_path = os.path.dirname('.')
    
    IRIS_TRAINING = os.path.join(dir_path,  "iris_training.csv")
    IRIS_TEST = os.path.join(dir_path,   "iris_test.csv") 
    
    feature_spec = {'x': tf.FixedLenFeature([4],tf.float32)}
    
    def serving_input_receiver_fn():
        serialized_tf_example = tf.placeholder(dtype=tf.string,
                                             shape=[None],
                                             name='input_tensors')
        receiver_tensors = {'inputs': serialized_tf_example}
    
        features = tf.parse_example(serialized_tf_example, feature_spec)
        return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
    
    
    
    
    def main():
        training_set = tf.contrib.learn.datasets.base.load_csv_with_header(
            filename=IRIS_TRAINING,
            target_dtype=np.int,
            features_dtype=np.float32)
        test_set = tf.contrib.learn.datasets.base.load_csv_with_header(
            filename=IRIS_TEST,
            target_dtype=np.int,
            features_dtype=np.float32)
    
        feature_columns = [tf.feature_column.numeric_column("x", shape=[4])]
    
    
        classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
                                              hidden_units=[10, 20, 10],
                                              n_classes=3,
                                              model_dir=dir_path)
      # Define the training inputs
        train_input_fn = tf.estimator.inputs.numpy_input_fn(
          x={"x": np.array(training_set.data)},
          y=np.array(training_set.target),
          num_epochs=None,
          shuffle=True)
    
      # Train model.
        classifier.train(input_fn=train_input_fn, steps=200)
    
    
        classifier.export_savedmodel(dir_path, serving_input_receiver_fn)
    
    
    if __name__ == "__main__":
        main()
    

    Restoring

    Now let's restore the model :

    import tensorflow as tf 
    import os
    
    dir_path = os.path.dirname('.') #current directory
    exported_path= os.path.join(dir_path,  "1536315752")
    
    def main():
        with tf.Session() as sess:
    
            tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], exported_path)
    
            model_input= tf.train.Example(features=tf.train.Features(feature={
                    'x': tf.train.Feature(float_list=tf.train.FloatList(value=[6.4, 3.2, 4.5, 1.5]))        
                    })) 
    
            predictor= tf.contrib.predictor.from_saved_model(exported_path)
    
            input_tensor=tf.get_default_graph().get_tensor_by_name("input_tensors:0")
    
            model_input=model_input.SerializeToString()
    
            output_dict= predictor({"inputs":[model_input]})
    
            print(" prediction is " , output_dict['scores'])
    
    
    if __name__ == "__main__":
        main()
    

    Here is Ipython notebook demo example with data and explanation :

提交回复
热议问题