当我导出keras mod时,tensorflow服务产生错误答案

2024-04-19 19:45:47 发布

您现在位置:Python中文网/ 问答频道 /正文

我尝试将我的keras模型导出到tensorflow服务,一切都很好。我要做的是从客户端接受一个b64编码的输入图像字符串并输出一个真/假值。我的keras模型输出3个值,第一个值表示模型预测的度数,我将它与另一个固定值进行比较,并使用restfulapi将从获取图像字符串到输出真/假值的整个算法导出到tensorflow服务。但是,我没有从我的客户端程序得到正确的输出。长话短说,让我看看代码

我的导出保存模型的程序:

import tensorflow as tf
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import tag_constants, signature_constants, signature_def_utils_impl
from keras.models import load_model
from keras.layers import Input
import os

tf.app.flags.DEFINE_string('model_dir', './keras_models',
                           '''Directory which contains keras models''')
tf.app.flags.DEFINE_string('output_dir', './model_output',
                           '''Directory where to export the model''')
tf.app.flags.DEFINE_string('model_version', '1',
                           '''version number of the model''')
tf.app.flags.DEFINE_string('model_file', 'pointer_model.json',
                           '''json file which contains model architecture''')
tf.app.flags.DEFINE_string('weights_file', 'pointer_model.h5',
                           '''h5 file that contains model weights''')

FLAGS = tf.app.flags.FLAGS


def preprocess_image(image_buffer):
    '''
    Preprocess JPEG encoded bytes to 3D floate tensor

    :param image_buffer:
    :return: 4D image tensor (1, width, height, channels)
    '''

    image = tf.image.decode_jpeg(image_buffer, channels=3)
    image = tf.image.convert_image_dtype(image, dtype=tf.float32)

    return image


def main(_):
    with tf.Graph().as_default():
        serialized_tf_example = tf.placeholder(tf.string, name='input_image')
        feature_configs = {
            'image/encoded': tf.FixedLenFeature(
                shape=[], dtype=tf.string),
        }
        tf_example = tf.parse_example(serialized_tf_example, feature_configs)
        jpegs = tf_example['image/encoded']
        images = tf.map_fn(preprocess_image, jpegs, dtype=tf.float32)

        images = tf.squeeze(images, [0])
        images = tf.expand_dims(images, axis=0)
        # now the image shape is [1, ?, ?, 3]
        images = tf.image.resize_images(images, tf.constant([224, 224]))

        model = load_model('./keras_models/my_model.h5')

        x = Input(tensor=images)
        y = model(x)

        model.summary()
        compare_value = tf.Variable(100.0)
        bool_out = tf.math.greater(y, compare_value)

        bool_out = bool_out[:,0]

        bool_out = tf.cast(bool_out, tf.float32)
        bool_out = tf.expand_dims(bool_out, axis=0)
        final_out = tf.concat([tf.transpose(y), bool_out], axis=0)
        init = tf.global_variables_initializer()
        with tf.Session() as sess:
            sess.run(init)

            # predict_tensor_input_info = tf.saved_model.utils.build_tensor_info(jpegs)
            # predict_tensor_score_info = tf.saved_model.utils.build_tensor_info(bool_out)
            prediction_signature = \
                (tf.saved_model.signature_def_utils.predict_signature_def(
                    inputs={'images': jpegs},
                    outputs={'scores': final_out}
                )
            )

            export_path = os.path.join(
                tf.compat.as_bytes(FLAGS.output_dir),
                tf.compat.as_bytes(FLAGS.model_version)
            )

            builder = saved_model_builder.SavedModelBuilder(export_path)

            legacy_init_op = tf.group(tf.tables_initializer(),
                                      name = 'legacy_init_op')

            builder.add_meta_graph_and_variables(
                sess, [tag_constants.SERVING],
                signature_def_map={
                    signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:prediction_signature,
                },
                legacy_init_op = legacy_init_op
            )

            builder.save()

if __name__ =="__main__":
    tf.app.run()

这是我的客户程序:

^{pr2}$

json的输出_响应.text就像:

{"instances": [{"b64": "/9j/4A...Y//9k="}]}
{
    "predictions": [[-0.00015692], [-0.000967527], [0.000567942], [0.0]
    ]
}

{"instances": [{"b64": "/9j/4A...if/9k="}]}
{
    "predictions": [[-0.000157582], [-0.000998327], [0.000598866], [0.0]
    ]
}

。。。。。。在

预测关键字的前3个值应该是度,而图像中的x、y坐标应该是百值。。。最后一个值是转换为float32的真/假值,与100.0进行比较

好吧。。最后,我还用模型.预测,给出正确答案。。。在

现在我完全糊涂了。有人能告诉我我的代码哪里出问题了吗?在


Tags: imageimportappstringmodeltfasbuilder
1条回答
网友
1楼 · 发布于 2024-04-19 19:45:47

使用我的脚本以tensorflow服务格式导出

import sys
from keras.models import load_model
import tensorflow as tf
from keras import backend as K
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import graph_io
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants


K.set_learning_phase(0)
K.set_image_data_format('channels_last')

INPUT_MODEL = sys.argv[1]
NUMBER_OF_OUTPUTS = 1
OUTPUT_NODE_PREFIX = 'output_node'
OUTPUT_FOLDER= 'frozen'
OUTPUT_GRAPH = 'frozen_model.pb'
OUTPUT_SERVABLE_FOLDER = sys.argv[2]
INPUT_TENSOR = sys.argv[3]


try:
    model = load_model(INPUT_MODEL)
except ValueError as err:
    print('Please check the input saved model file')
    raise err

output = [None]*NUMBER_OF_OUTPUTS
output_node_names = [None]*NUMBER_OF_OUTPUTS
for i in range(NUMBER_OF_OUTPUTS):
    output_node_names[i] = OUTPUT_NODE_PREFIX+str(i)
    output[i] = tf.identity(model.outputs[i], name=output_node_names[i])
print('Output Tensor names: ', output_node_names)


sess = K.get_session()
try:
    frozen_graph = graph_util.convert_variables_to_constants(sess,             sess.graph.as_graph_def(), output_node_names)    
    graph_io.write_graph(frozen_graph, OUTPUT_FOLDER, OUTPUT_GRAPH, as_text=False)
    print(f'Frozen graph ready for inference/serving at     {OUTPUT_FOLDER}/{OUTPUT_GRAPH}')
except:
    print('Error Occured')



builder = tf.saved_model.builder.SavedModelBuilder(OUTPUT_SERVABLE_FOLDER)

with tf.gfile.GFile(f'{OUTPUT_FOLDER}/{OUTPUT_GRAPH}', "rb") as f:
    graph_def = tf.GraphDef()
    graph_def.ParseFromString(f.read())

sigs = {}
OUTPUT_TENSOR = output_node_names
with tf.Session(graph=tf.Graph()) as sess:
    tf.import_graph_def(graph_def, name="")
    g = tf.get_default_graph()
    inp = g.get_tensor_by_name(INPUT_TENSOR)
    out = g.get_tensor_by_name(OUTPUT_TENSOR[0] + ':0')

    sigs[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \
        tf.saved_model.signature_def_utils.predict_signature_def(
            {"input": inp}, {"outout": out})

    builder.add_meta_graph_and_variables(sess,
                                         [tag_constants.SERVING],
                                         signature_def_map=sigs)
    try:
        builder.save()
        print(f'Model ready for deployment at     {OUTPUT_SERVABLE_FOLDER}/saved_model.pb')
        print('Prediction signature : ')
        print(sigs['serving_default'])
    except:
        print('Error Occured, please checked frozen graph')

相关问题 更多 >