-
Notifications
You must be signed in to change notification settings - Fork 4.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[Python] Added Tensorflow Model Handler (#25368)
* go lints * added tf model handler and tests * lint and formatting changes * correct lints * more lints and formats * auto formatted with yapf * rm spare lines * add readme file * test requirement file * add test to gradle * add test tasks for tf * unit test * lints * updated inferenceFn type * add tox info for py38 * pylint * lints * using tfhub * added tf model handler and tests * lint and formatting changes * correct lints * more lints and formats * auto formatted with yapf * rm spare lines * merge master * test requirement file * add test to gradle * add test tasks for tf * unit test * lints * updated inferenceFn type * add tox info for py38 * pylint * lints * using tfhub * tfhub example * update doc * sort imports * resolve pydoc,precommit * fix import * fix lint * address comments * fix optional inference args * change to ml bucket * fix doc
- Loading branch information
1 parent
26735eb
commit 8bf324d
Showing
11 changed files
with
938 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
128 changes: 128 additions & 0 deletions
128
sdks/python/apache_beam/examples/inference/tensorflow_imagenet_segmentation.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,128 @@ | ||
# | ||
# Licensed to the Apache Software Foundation (ASF) under one or more | ||
# contributor license agreements. See the NOTICE file distributed with | ||
# this work for additional information regarding copyright ownership. | ||
# The ASF licenses this file to You under the Apache License, Version 2.0 | ||
# (the "License"); you may not use this file except in compliance with | ||
# the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
# | ||
|
||
import argparse | ||
import logging | ||
from typing import Iterable | ||
from typing import Iterator | ||
|
||
import numpy | ||
|
||
import apache_beam as beam | ||
import tensorflow as tf | ||
from apache_beam.ml.inference.base import PredictionResult | ||
from apache_beam.ml.inference.base import RunInference | ||
from apache_beam.ml.inference.tensorflow_inference import TFModelHandlerTensor | ||
from apache_beam.options.pipeline_options import PipelineOptions | ||
from apache_beam.options.pipeline_options import SetupOptions | ||
from apache_beam.runners.runner import PipelineResult | ||
from PIL import Image | ||
|
||
|
||
class PostProcessor(beam.DoFn): | ||
"""Process the PredictionResult to get the predicted label. | ||
Returns predicted label. | ||
""" | ||
def process(self, element: PredictionResult) -> Iterable[str]: | ||
predicted_class = numpy.argmax(element.inference[0], axis=-1) | ||
labels_path = tf.keras.utils.get_file( | ||
'ImageNetLabels.txt', | ||
'https://storage.googleapis.com/download.tensorflow.org/data/ImageNetLabels.txt' # pylint: disable=line-too-long | ||
) | ||
imagenet_labels = numpy.array(open(labels_path).read().splitlines()) | ||
predicted_class_name = imagenet_labels[predicted_class] | ||
return predicted_class_name.title() | ||
|
||
|
||
def parse_known_args(argv): | ||
"""Parses args for the workflow.""" | ||
parser = argparse.ArgumentParser() | ||
parser.add_argument( | ||
'--input', | ||
dest='input', | ||
required=True, | ||
help='Path to the text file containing image names.') | ||
parser.add_argument( | ||
'--output', | ||
dest='output', | ||
required=True, | ||
help='Path to save output predictions.') | ||
parser.add_argument( | ||
'--model_path', | ||
dest='model_path', | ||
required=True, | ||
help='Path to load the Tensorflow model for Inference.') | ||
parser.add_argument( | ||
'--image_dir', help='Path to the directory where images are stored.') | ||
return parser.parse_known_args(argv) | ||
|
||
|
||
def filter_empty_lines(text: str) -> Iterator[str]: | ||
if len(text.strip()) > 0: | ||
yield text | ||
|
||
|
||
def read_image(image_name, image_dir): | ||
img = tf.keras.utils.get_file(image_name, image_dir + image_name) | ||
img = Image.open(img).resize((224, 224)) | ||
img = numpy.array(img) / 255.0 | ||
img_tensor = tf.cast(tf.convert_to_tensor(img[...]), dtype=tf.float32) | ||
return img_tensor | ||
|
||
|
||
def run( | ||
argv=None, save_main_session=True, test_pipeline=None) -> PipelineResult: | ||
""" | ||
Args: | ||
argv: Command line arguments defined for this example. | ||
save_main_session: Used for internal testing. | ||
test_pipeline: Used for internal testing. | ||
""" | ||
known_args, pipeline_args = parse_known_args(argv) | ||
pipeline_options = PipelineOptions(pipeline_args) | ||
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session | ||
|
||
# In this example we will use the TensorflowHub model URL. | ||
model_loader = TFModelHandlerTensor(model_uri=known_args.model_path) | ||
|
||
pipeline = test_pipeline | ||
if not test_pipeline: | ||
pipeline = beam.Pipeline(options=pipeline_options) | ||
|
||
image = ( | ||
pipeline | ||
| 'ReadImageNames' >> beam.io.ReadFromText(known_args.input) | ||
| 'FilterEmptyLines' >> beam.ParDo(filter_empty_lines) | ||
| "PreProcessInputs" >> | ||
beam.Map(lambda image_name: read_image(image_name, known_args.image_dir))) | ||
|
||
predictions = ( | ||
image | ||
| "RunInference" >> RunInference(model_loader) | ||
| "PostProcessOutputs" >> beam.ParDo(PostProcessor())) | ||
|
||
_ = predictions | "WriteOutput" >> beam.io.WriteToText( | ||
known_args.output, shard_name_template='', append_trailing_newlines=False) | ||
|
||
result = pipeline.run() | ||
result.wait_until_finish() | ||
return result | ||
|
||
|
||
if __name__ == '__main__': | ||
logging.getLogger().setLevel(logging.INFO) | ||
run() |
118 changes: 118 additions & 0 deletions
118
sdks/python/apache_beam/examples/inference/tensorflow_mnist_classification.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,118 @@ | ||
# | ||
# Licensed to the Apache Software Foundation (ASF) under one or more | ||
# contributor license agreements. See the NOTICE file distributed with | ||
# this work for additional information regarding copyright ownership. | ||
# The ASF licenses this file to You under the Apache License, Version 2.0 | ||
# (the "License"); you may not use this file except in compliance with | ||
# the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
# | ||
|
||
import argparse | ||
import logging | ||
from typing import Iterable | ||
from typing import Tuple | ||
|
||
import numpy | ||
|
||
import apache_beam as beam | ||
from apache_beam.ml.inference.base import KeyedModelHandler | ||
from apache_beam.ml.inference.base import PredictionResult | ||
from apache_beam.ml.inference.base import RunInference | ||
from apache_beam.ml.inference.tensorflow_inference import ModelType | ||
from apache_beam.ml.inference.tensorflow_inference import TFModelHandlerNumpy | ||
from apache_beam.options.pipeline_options import PipelineOptions | ||
from apache_beam.options.pipeline_options import SetupOptions | ||
from apache_beam.runners.runner import PipelineResult | ||
|
||
|
||
def process_input(row: str) -> Tuple[int, numpy.ndarray]: | ||
data = row.split(',') | ||
label, pixels = int(data[0]), data[1:] | ||
pixels = [int(pixel) for pixel in pixels] | ||
# the trained model accepts the input of shape 28x28 | ||
pixels = numpy.array(pixels).reshape((28, 28, 1)) | ||
return label, pixels | ||
|
||
|
||
class PostProcessor(beam.DoFn): | ||
"""Process the PredictionResult to get the predicted label. | ||
Returns a comma separated string with true label and predicted label. | ||
""" | ||
def process(self, element: Tuple[int, PredictionResult]) -> Iterable[str]: | ||
label, prediction_result = element | ||
prediction = numpy.argmax(prediction_result.inference, axis=0) | ||
yield '{},{}'.format(label, prediction) | ||
|
||
|
||
def parse_known_args(argv): | ||
"""Parses args for the workflow.""" | ||
parser = argparse.ArgumentParser() | ||
parser.add_argument( | ||
'--input', | ||
dest='input', | ||
required=True, | ||
help='text file with comma separated int values.') | ||
parser.add_argument( | ||
'--output', | ||
dest='output', | ||
required=True, | ||
help='Path to save output predictions.') | ||
parser.add_argument( | ||
'--model_path', | ||
dest='model_path', | ||
required=True, | ||
help='Path to load the Tensorflow model for Inference.') | ||
return parser.parse_known_args(argv) | ||
|
||
|
||
def run( | ||
argv=None, save_main_session=True, test_pipeline=None) -> PipelineResult: | ||
""" | ||
Args: | ||
argv: Command line arguments defined for this example. | ||
save_main_session: Used for internal testing. | ||
test_pipeline: Used for internal testing. | ||
""" | ||
known_args, pipeline_args = parse_known_args(argv) | ||
pipeline_options = PipelineOptions(pipeline_args) | ||
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session | ||
|
||
# In this example we pass keyed inputs to RunInference transform. | ||
# Therefore, we use KeyedModelHandler wrapper over TFModelHandlerNumpy. | ||
model_loader = KeyedModelHandler( | ||
TFModelHandlerNumpy( | ||
model_uri=known_args.model_path, model_type=ModelType.SAVED_MODEL)) | ||
|
||
pipeline = test_pipeline | ||
if not test_pipeline: | ||
pipeline = beam.Pipeline(options=pipeline_options) | ||
|
||
label_pixel_tuple = ( | ||
pipeline | ||
| "ReadFromInput" >> beam.io.ReadFromText(known_args.input) | ||
| "PreProcessInputs" >> beam.Map(process_input)) | ||
|
||
predictions = ( | ||
label_pixel_tuple | ||
| "RunInference" >> RunInference(model_loader) | ||
| "PostProcessOutputs" >> beam.ParDo(PostProcessor())) | ||
|
||
_ = predictions | "WriteOutput" >> beam.io.WriteToText( | ||
known_args.output, shard_name_template='', append_trailing_newlines=True) | ||
|
||
result = pipeline.run() | ||
result.wait_until_finish() | ||
return result | ||
|
||
|
||
if __name__ == '__main__': | ||
logging.getLogger().setLevel(logging.INFO) | ||
run() |
Oops, something went wrong.