TFX Pipelines - Titanic Dataset
import tempfile
import tensorflow as tf
import urllib.request
import os
import pandas as pd
import shutil
import tensorflow_data_validation as tfdv
import tensorflow_model_analysis as tfma
from absl import logging
from pathlib import Path
from tensorflow_metadata.proto.v0 import schema_pb2
from tfx import v1 as tfx
print(f"TensorFlow version: {tf.__version__}")
print(f"TFX version: {tfx.__version__}")
print(f"TensorFlow Data Validation version: {tfdv.__version__}")
logging.set_verbosity(logging.INFO)
TensorFlow version: 2.5.0
TFX version: 1.0.0
TensorFlow Data Validation version: 1.0.0
DATA_DIRECTORY = "titanic"
DATA_SOURCE_TRAIN_PATH = Path(DATA_DIRECTORY) / "titanic-train.csv"
DATA_SOURCE_TEST_PATH = Path(DATA_DIRECTORY) / "titanic-test.csv"
DATA_TRAIN_FILENAME = "train.csv"
DATA_EVAL_FILENAME = "eval.csv"
DATA_TEST_FILENAME = "test.csv"
PIPELINE_NAME = "titanic-pipeline"
PIPELINE_DIRECTORY = Path("pipelines") / PIPELINE_NAME
METADATA_PATH = Path("metadata") / PIPELINE_NAME / "metadata.db"
SCHEMA_DIRECTORY = PIPELINE_DIRECTORY / "schema"
SCHEMA_FILENAME = str(Path(SCHEMA_DIRECTORY) / "schema.pbtxt")
MODEL_DIRECTORY = Path("model")
train_df = pd.read_csv(DATA_SOURCE_TRAIN_PATH)
test_df = pd.read_csv(DATA_SOURCE_TEST_PATH)
datasets = [train_df, test_df]
for dataset in datasets:
dataset.drop(["PassengerId", "Name", "Ticket"], axis=1, inplace=True)
dataset.Fare = dataset.Fare.fillna(train_df.Fare.median())
dataset.Age = dataset.Age.fillna(train_df.Age.median()).astype(int)
dataset.Embarked = dataset.Embarked.fillna("S")
# Let's save the modified data back to the disk. Notice we are saving the
# training data twice (as both train and eval.)
train_df.to_csv(Path(DATA_DIRECTORY) / DATA_TRAIN_FILENAME, index=False)
train_df.to_csv(Path(DATA_DIRECTORY) / DATA_EVAL_FILENAME, index=False)
test_df.to_csv(Path(DATA_DIRECTORY) / DATA_TEST_FILENAME, index=False)
Common functions
def _examples(df):
"""
Converts a DataFrame into a serialized list of examples supported by the model.
"""
examples = []
for index, row in df.iterrows():
features = {
"Pclass": tf.train.Feature(int64_list=tf.train.Int64List(value=[row.Pclass])),
"Sex": tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(str(row.Sex), encoding="raw_unicode_escape")])),
"Age": tf.train.Feature(int64_list=tf.train.Int64List(value=[row.Age])),
"SibSp": tf.train.Feature(int64_list=tf.train.Int64List(value=[row.SibSp])),
"Parch": tf.train.Feature(int64_list=tf.train.Int64List(value=[row.Parch])),
"Fare": tf.train.Feature(float_list=tf.train.FloatList(value=[row.Fare])),
"Cabin": tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(str(row.Cabin), encoding="raw_unicode_escape")])),
"Embarked": tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(str(row.Embarked), encoding="raw_unicode_escape")]))
}
example_proto = tf.train.Example(features=tf.train.Features(feature=features))
examples.append(example_proto.SerializeToString())
return examples
def get_inference_fn(model_directory):
"""
Returns the inference function of the latest published model.
"""
model_directories = (d for d in os.scandir(model_directory) if d.is_dir())
model_path = max(model_directories, key=lambda i: int(i.name)).path
loaded_model = tf.keras.models.load_model(model_path)
return loaded_model.signatures["serving_default"]
Running each component interactively
from tfx.orchestration.experimental.interactive.interactive_context import (
InteractiveContext
)
context = InteractiveContext()
WARNING:absl:InteractiveContext pipeline_root argument not provided: using temporary directory /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl as root for pipeline outputs.
WARNING:absl:InteractiveContext metadata_connection_config not provided: using SQLite ML Metadata database at /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/metadata.sqlite.
Loading the data
from tfx.proto import example_gen_pb2
# Both train.csv and eval.csv contain the same data.
input_config = tfx.proto.Input(splits=[
example_gen_pb2.Input.Split(name='train', pattern=DATA_TRAIN_FILENAME),
example_gen_pb2.Input.Split(name='eval', pattern=DATA_EVAL_FILENAME)
])
example_gen = tfx.components.CsvExampleGen(
input_base=DATA_DIRECTORY,
input_config=input_config
)
context.run(example_gen)
INFO:absl:Running driver for CsvExampleGen
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:select span and version = (0, None)
INFO:absl:latest span and version = (0, None)
INFO:absl:select span and version = (0, None)
INFO:absl:latest span and version = (0, None)
INFO:absl:Running executor for CsvExampleGen
INFO:absl:Generating examples.
WARNING:apache_beam.runners.interactive.interactive_environment:Dependencies required for Interactive Beam PCollection visualization are not available, please use: `pip install apache-beam[interactive]` to install necessary dependencies to enable all data visualization features.
INFO:absl:Processing input csv data titanic/train.csv to TFExample.
INFO:absl:Processing input csv data titanic/eval.csv to TFExample.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:apache_beam.io.tfrecordio:Couldn't find python-snappy so the implementation of _TFRecordUtil._masked_crc32c is not as fast as it could be.
INFO:absl:Examples generated.
INFO:absl:Running publisher for CsvExampleGen
INFO:absl:MetadataStore with DB connection initialized
Computing statistics about the data
statistics_gen = tfx.components.StatisticsGen(
examples=example_gen.outputs["examples"]
)
context.run(statistics_gen)
context.show(statistics_gen.outputs["statistics"])
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Running driver for StatisticsGen
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for StatisticsGen
INFO:absl:Generating statistics for split train.
INFO:absl:Statistics for split train written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/StatisticsGen/statistics/2/Split-train.
INFO:absl:Generating statistics for split eval.
INFO:absl:Statistics for split eval written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/StatisticsGen/statistics/2/Split-eval.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
INFO:absl:Running publisher for StatisticsGen
INFO:absl:MetadataStore with DB connection initialized
Inferring the schema from the data
schema_gen = tfx.components.SchemaGen(
statistics=statistics_gen.outputs["statistics"],
infer_feature_shape=True
)
context.run(schema_gen)
context.show(schema_gen.outputs["schema"])
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Running driver for SchemaGen
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for SchemaGen
INFO:absl:Processing schema from statistics for split train.
INFO:absl:Processing schema from statistics for split eval.
INFO:absl:Schema written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/SchemaGen/schema/3/schema.pbtxt.
INFO:absl:Running publisher for SchemaGen
INFO:absl:MetadataStore with DB connection initialized
/root/venv/lib/python3.6/site-packages/tensorflow_data_validation/utils/display_util.py:180: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.
pd.set_option('max_colwidth', -1)
Adjusting the schema
schema = tfdv.load_schema_text(
os.path.join(schema_gen.outputs['schema']._artifacts[0].uri, "schema.pbtxt")
)
# Let's add the two environments we need.
schema.default_environment.append("TRAINING")
schema.default_environment.append("SERVING")
# We don't have the `Survived` column in the SERVING environment
tfdv.get_feature(schema, "Survived").not_in_environment.append("SERVING")
# Let's adjust the `Cabin` feature to not requiere a specific percentage
# of values.
tfdv.get_feature(schema, "Cabin").presence.min_fraction = 0.0
# We can now display the updated schema
tfdv.display_schema(schema=schema)
/root/venv/lib/python3.6/site-packages/tensorflow_data_validation/utils/display_util.py:180: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.
pd.set_option('max_colwidth', -1)
!mkdir -p {SCHEMA_DIRECTORY}
tfdv.write_schema_text(schema, SCHEMA_FILENAME)
# Let's load the updated schema and display it
schema_importer = tfx.dsl.Importer(
source_uri=str(SCHEMA_DIRECTORY),
artifact_type=tfx.types.standard_artifacts.Schema
).with_id("schema_importer")
context.run(schema_importer)
context.show(schema_importer.outputs["result"])
INFO:absl:Running driver for schema_importer
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Processing source uri: pipelines/titanic-pipeline/schema, properties: {}, custom_properties: {}
INFO:absl:Running executor for schema_importer
INFO:absl:Running publisher for schema_importer
INFO:absl:MetadataStore with DB connection initialized
/root/venv/lib/python3.6/site-packages/tensorflow_data_validation/utils/display_util.py:180: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.
pd.set_option('max_colwidth', -1)
Validating the data using the updated schema
example_validator = tfx.components.ExampleValidator(
statistics=statistics_gen.outputs["statistics"],
schema=schema_importer.outputs["result"]
)
context.run(example_validator)
context.show(example_validator.outputs["anomalies"])
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Running driver for ExampleValidator
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for ExampleValidator
INFO:absl:Validating schema against the computed statistics for split train.
INFO:absl:Validation complete for split train. Anomalies written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/ExampleValidator/anomalies/5/Split-train.
INFO:absl:Validating schema against the computed statistics for split eval.
INFO:absl:Validation complete for split eval. Anomalies written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/ExampleValidator/anomalies/5/Split-eval.
INFO:absl:Running publisher for ExampleValidator
INFO:absl:MetadataStore with DB connection initialized
/root/venv/lib/python3.6/site-packages/tensorflow_data_validation/utils/display_util.py:217: FutureWarning: Passing a negative integer is deprecated in version 1.0 and will not be supported in future version. Instead, use None to not limit the column width.
pd.set_option('max_colwidth', -1)
Transforming the data
CONSTANTS_MODULE_PATH = "titanic_constants.py"
%%writefile {CONSTANTS_MODULE_PATH}
LABEL = "Survived"
Overwriting titanic_constants.py
TRANSFORM_MODULE_PATH = "titanic_transform.py"
%%writefile {TRANSFORM_MODULE_PATH}
import tensorflow as tf
import tensorflow_transform as tft
from tfx import v1 as tfx
import titanic_constants
LABEL = titanic_constants.LABEL
EMBARKED = ["S", "C", "Q"]
def preprocessing_fn(inputs):
"""
This is the Transform's callback function that preprocesses the
input data.
Args:
inputs: The map from feature keys to raw values that need to be
transformed.
Returns:
A map from feature keys to the transformed values.
"""
outputs = dict()
# Let's categorical-encode `Embarked`.
embarked_input = _fillna(inputs["Embarked"], "S")
embarked_initializer = tf.lookup.KeyValueTensorInitializer(
keys=EMBARKED,
values=tf.cast(tf.range(len(EMBARKED)), tf.int64),
key_dtype=tf.string,
value_dtype=tf.int64,
)
embarked_table = tf.lookup.StaticHashTable(embarked_initializer, default_value=-1)
outputs["embarked"] = embarked_table.lookup(embarked_input)
# We are going to create a new feature `has_a_cabin` that indicates
# whether the passenger had a cabin in the ship.
cabin = _fillna(inputs["Cabin"], "")
outputs["has_a_cabin"] = tf.cast(tf.greater(tf.strings.length(cabin), 0), tf.int64)
# Let's now create another new feature `is_traveling_alone` that
# indicates whether the passenger was traveling alone.
sibsp = _fillna(inputs["SibSp"])
parch = _fillna(inputs["Parch"])
family_size = tf.math.add(sibsp, parch)
outputs["is_traveling_alone"] = tf.cast(tf.equal(family_size, 0), tf.int64)
# Let's categorical-encode `Sex`.
outputs["sex"] = tf.cast(tf.equal(inputs["Sex"], "male"), tf.int64)
# Let's bucketize `Fare` into 4 different buckets.
fare = _fillna(inputs["Fare"])
outputs["fare"] = tf.cast(tf.where(
tf.less_equal(fare, 7.91), 0,
tf.where(tf.math.logical_and(tf.greater(fare, 7.91), tf.less_equal(fare, 14.454)), 1,
tf.where(tf.math.logical_and(tf.greater(fare, 14.454), tf.less_equal(fare, 31)), 2, 3))), tf.int64)
# We are going to keep `Pclass` as is.
outputs["pclass"] = inputs["Pclass"]
outputs[LABEL] = inputs[LABEL]
return outputs
def _fillna(t, value=0):
"""
Replaces missing values in a SparseTensor with the supplied value.
Args:
t: A `SparseTensor` of rank 2. Its dense shape should have size at most 1
in the second dimension.
Returns:
A rank 1 tensor where missing values have been filled in.
"""
if not isinstance(t, tf.sparse.SparseTensor):
return t
return tf.squeeze(tf.sparse.to_dense(
tf.SparseTensor(
t.indices,
t.values,
[t.dense_shape[0], 1]
), value),
axis=1)
Overwriting titanic_transform.py
transform = tfx.components.Transform(
examples=example_gen.outputs["examples"],
schema=schema_importer.outputs["result"],
module_file=os.path.abspath(TRANSFORM_MODULE_PATH),
)
context.run(transform, enable_cache=False)
INFO:absl:Generating ephemeral wheel package for '/work/titanic_transform.py' (including modules: ['titanic_trainer', 'titanic_constants', 'titanic_transform']).
INFO:absl:User module package has hash fingerprint version 94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448.
INFO:absl:Executing: ['/usr/local/bin/python', '/tmp/tmp_y_kisrl/_tfx_generated_setup.py', 'bdist_wheel', '--bdist-dir', '/tmp/tmpigje4ql2', '--dist-dir', '/tmp/tmp_0sd2vdx']
INFO:absl:Successfully built user code wheel distribution at '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'; target user module is 'titanic_transform'.
INFO:absl:Full user module path is 'titanic_transform@/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'
INFO:absl:Running driver for Transform
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Transform
INFO:absl:Analyze the 'train' split and transform all splits when splits_config is not set.
INFO:absl:udf_utils.get_fn {'module_file': None, 'module_path': 'titanic_transform@/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl', 'preprocessing_fn': None} 'preprocessing_fn'
INFO:absl:Installing '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/local/bin/python', '-m', 'pip', 'install', '--target', '/tmp/tmph_2fx5qw', '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'.
INFO:absl:udf_utils.get_fn {'module_file': None, 'module_path': 'titanic_transform@/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl', 'stats_options_updater_fn': None} 'stats_options_updater_fn'
INFO:absl:Installing '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/local/bin/python', '-m', 'pip', 'install', '--target', '/tmp/tmpqdv_e5wh', '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:tensorflow:From /root/venv/lib/python3.6/site-packages/tensorflow_transform/tf_utils.py:266: Tensor.experimental_ref (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use ref() instead.
WARNING:tensorflow:From /root/venv/lib/python3.6/site-packages/tensorflow_transform/tf_utils.py:266: Tensor.experimental_ref (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use ref() instead.
WARNING:tensorflow:Tables initialized inside a tf.function will be re-initialized on every invocation of the function. This re-initialization can have significant impact on performance. Consider lifting them out of the graph context using `tf.init_scope`.
WARNING:tensorflow:Tables initialized inside a tf.function will be re-initialized on every invocation of the function. This re-initialization can have significant impact on performance. Consider lifting them out of the graph context using `tf.init_scope`.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:tensorflow:Tables initialized inside a tf.function will be re-initialized on every invocation of the function. This re-initialization can have significant impact on performance. Consider lifting them out of the graph context using `tf.init_scope`.
WARNING:tensorflow:Tables initialized inside a tf.function will be re-initialized on every invocation of the function. This re-initialization can have significant impact on performance. Consider lifting them out of the graph context using `tf.init_scope`.
WARNING:tensorflow:Tables initialized inside a tf.function will be re-initialized on every invocation of the function. This re-initialization can have significant impact on performance. Consider lifting them out of the graph context using `tf.init_scope`.
WARNING:tensorflow:Tables initialized inside a tf.function will be re-initialized on every invocation of the function. This re-initialization can have significant impact on performance. Consider lifting them out of the graph context using `tf.init_scope`.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Installing '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/local/bin/python', '-m', 'pip', 'install', '--target', '/tmp/tmp0vrl6pyj', '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Transform-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'.
WARNING:root:This output type hint will be ignored and not used for type-checking purposes. Typically, output type hints for a PTransform are single (or nested) types wrapped by a PCollection, PDone, or None. Got: Tuple[Dict[str, Union[NoneType, _Dataset]], Union[Dict[str, Dict[str, PCollection]], NoneType]] instead.
WARNING:root:This output type hint will be ignored and not used for type-checking purposes. Typically, output type hints for a PTransform are single (or nested) types wrapped by a PCollection, PDone, or None. Got: Tuple[Dict[str, Union[NoneType, _Dataset]], Union[Dict[str, Dict[str, PCollection]], NoneType]] instead.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:apache_beam.typehints.typehints:Ignoring send_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring return_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring send_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring return_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring send_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring return_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring send_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring return_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring send_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring return_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring send_type hint: <class 'NoneType'>
WARNING:apache_beam.typehints.typehints:Ignoring return_type hint: <class 'NoneType'>
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Transform/transform_graph/6/.temp_path/tftransform_tmp/81b987f2b56d4aee96da5156d3b252cb/assets
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Transform/transform_graph/6/.temp_path/tftransform_tmp/81b987f2b56d4aee96da5156d3b252cb/assets
INFO:absl:Running publisher for Transform
INFO:absl:MetadataStore with DB connection initialized
train_uri = os.path.join(
transform.outputs['transformed_examples'].get()[0].uri,
'Split-train'
)
tfrecord_filenames = [
os.path.join(train_uri, name)
for name in os.listdir(train_uri)
]
dataset = tf.data.TFRecordDataset(tfrecord_filenames, compression_type="GZIP")
for tfrecord in dataset.take(3):
serialized_example = tfrecord.numpy()
example = tf.train.Example()
example.ParseFromString(serialized_example)
print(example)
features {
feature {
key: "Survived"
value {
int64_list {
value: 0
}
}
}
feature {
key: "embarked"
value {
int64_list {
value: 0
}
}
}
feature {
key: "fare"
value {
int64_list {
value: 0
}
}
}
feature {
key: "has_a_cabin"
value {
int64_list {
value: 0
}
}
}
feature {
key: "is_traveling_alone"
value {
int64_list {
value: 0
}
}
}
feature {
key: "pclass"
value {
int64_list {
value: 3
}
}
}
feature {
key: "sex"
value {
int64_list {
value: 1
}
}
}
}
features {
feature {
key: "Survived"
value {
int64_list {
value: 1
}
}
}
feature {
key: "embarked"
value {
int64_list {
value: 1
}
}
}
feature {
key: "fare"
value {
int64_list {
value: 3
}
}
}
feature {
key: "has_a_cabin"
value {
int64_list {
value: 1
}
}
}
feature {
key: "is_traveling_alone"
value {
int64_list {
value: 0
}
}
}
feature {
key: "pclass"
value {
int64_list {
value: 1
}
}
}
feature {
key: "sex"
value {
int64_list {
value: 0
}
}
}
}
features {
feature {
key: "Survived"
value {
int64_list {
value: 1
}
}
}
feature {
key: "embarked"
value {
int64_list {
value: 0
}
}
}
feature {
key: "fare"
value {
int64_list {
value: 1
}
}
}
feature {
key: "has_a_cabin"
value {
int64_list {
value: 0
}
}
}
feature {
key: "is_traveling_alone"
value {
int64_list {
value: 1
}
}
}
feature {
key: "pclass"
value {
int64_list {
value: 3
}
}
}
feature {
key: "sex"
value {
int64_list {
value: 0
}
}
}
}
Training a model
TRAINER_MODULE_PATH = "titanic_trainer.py"
%%writefile {TRAINER_MODULE_PATH}
import tensorflow as tf
import tensorflow_decision_forests as tfdf
import tensorflow_transform as tft
from absl import logging
from tensorflow.keras import layers, Model, optimizers, losses, metrics
from tfx import v1 as tfx
from tfx_bsl.public import tfxio
from typing import List, Text
import titanic_constants
LABEL = titanic_constants.LABEL
BATCH_SIZE = 32
EPOCHS = 10
def _input_fn(
file_pattern: List[Text],
data_accessor: tfx.components.DataAccessor,
tf_transform_output: tft.TFTransformOutput,
batch_size: int,
) -> tf.data.Dataset:
"""
Generates a dataset of features that can be used to train
and evaluate the model.
Args:
file_pattern: List of paths or patterns of input data files.
data_accessor: An instance of DataAccessor that we can use to
convert the input to a RecordBatch.
tf_transform_output: The transformation output.
batch_size: The number of consecutive elements that we should
combine in a single batch.
Returns:
A dataset that contains a tuple of (features, indices) where
features is a dictionary of Tensors, and indices is a single
Tensor of label indices.
"""
dataset = data_accessor.tf_dataset_factory(
file_pattern,
tfxio.TensorFlowDatasetOptions(batch_size=batch_size),
schema=tf_transform_output.raw_metadata.schema,
)
tft_layer = tf_transform_output.transform_features_layer()
def apply_transform(raw_features):
transformed_features = tft_layer(raw_features)
transformed_label = transformed_features.pop(LABEL)
return transformed_features, transformed_label
return dataset.map(apply_transform).repeat()
def _get_serve_tf_examples_fn(model, tf_transform_output):
"""
Returns a function that parses a serialized tf.Example and applies
the transformations during inference.
Args:
model: The model that we are serving.
tf_transform_output: The transformation output that we want to
include with the model.
"""
# Let's make sure we set up the model's tft_layer.
model.tft_layer = tf_transform_output.transform_features_layer()
@tf.function(input_signature=[
tf.TensorSpec(shape=[None], dtype=tf.string, name="examples")
])
def serve_tf_examples_fn(serialized_tf_examples):
feature_spec = tf_transform_output.raw_feature_spec()
# We need to make sure we don't include the target column
# as part of the required list of features. This is important
# because the target column will not be provided when the model
# is deployed.
required_feature_spec = {
k: v for k, v in feature_spec.items() if k != LABEL
}
parsed_features = tf.io.parse_example(
serialized_tf_examples,
required_feature_spec
)
transformed_features = model.tft_layer(parsed_features)
# if LABEL in parsed_features:
# transformed_features.pop(LABEL)
# Run inference with the model on the transformed features.
return model(transformed_features)
return serve_tf_examples_fn
def _model() -> tf.keras.Model:
"""
Creates the Keras model.
"""
inputs = [
layers.Input(shape=(1,), name="embarked"),
layers.Input(shape=(1,), name="fare"),
layers.Input(shape=(1,), name="has_a_cabin"),
layers.Input(shape=(1,), name="is_traveling_alone"),
layers.Input(shape=(1,), name="pclass"),
layers.Input(shape=(1,), name="sex"),
]
x = layers.concatenate(inputs)
x = layers.Dense(8, activation="relu")(x)
x = layers.Dense(8, activation="relu")(x)
outputs = layers.Dense(1, activation="sigmoid")(x)
model = Model(inputs=inputs, outputs=outputs)
model.compile(
optimizer=optimizers.Adam(1e-2),
loss="binary_crossentropy",
metrics=[metrics.BinaryAccuracy()],
)
model.summary(print_fn=logging.info)
return model
def run_fn(fn_args: tfx.components.FnArgs):
"""
The callback function that will be called by the Trainer component
to train the model using the suplied arguments.
Args:
fn_args: A collection of name/value pairs representing the
arguments to train the model.
"""
tf_transform_output = tft.TFTransformOutput(fn_args.transform_output)
train_dataset = _input_fn(
fn_args.train_files,
fn_args.data_accessor,
tf_transform_output,
batch_size=BATCH_SIZE,
)
eval_dataset = _input_fn(
fn_args.eval_files,
fn_args.data_accessor,
tf_transform_output,
batch_size=BATCH_SIZE,
)
model = _model()
model.fit(
train_dataset,
steps_per_epoch=fn_args.train_steps,
validation_data=eval_dataset,
validation_steps=fn_args.eval_steps,
epochs=EPOCHS
)
# We need to modify the default signature to include the transform layer in
# the computational graph.
signatures = {
"serving_default": _get_serve_tf_examples_fn(model, tf_transform_output),
}
model.save(fn_args.serving_model_dir, save_format="tf", signatures=signatures)
Overwriting titanic_trainer.py
trainer = tfx.components.Trainer(
examples=example_gen.outputs["examples"],
transform_graph=transform.outputs["transform_graph"],
train_args=tfx.proto.TrainArgs(num_steps=100),
eval_args=tfx.proto.EvalArgs(num_steps=5),
module_file=os.path.abspath(TRAINER_MODULE_PATH),
)
context.run(trainer, enable_cache=False)
INFO:absl:Generating ephemeral wheel package for '/work/titanic_trainer.py' (including modules: ['titanic_trainer', 'titanic_constants', 'titanic_transform']).
INFO:absl:User module package has hash fingerprint version 94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448.
INFO:absl:Executing: ['/usr/local/bin/python', '/tmp/tmpbr20w18b/_tfx_generated_setup.py', 'bdist_wheel', '--bdist-dir', '/tmp/tmpnxua9ic5', '--dist-dir', '/tmp/tmphac4r3wt']
INFO:absl:Successfully built user code wheel distribution at '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Trainer-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'; target user module is 'titanic_trainer'.
INFO:absl:Full user module path is 'titanic_trainer@/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Trainer-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'
INFO:absl:Running driver for Trainer
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Trainer
INFO:absl:Train on the 'train' split when train_args.splits is not set.
INFO:absl:Evaluate on the 'eval' split when eval_args.splits is not set.
INFO:absl:udf_utils.get_fn {'train_args': '{\n "num_steps": 100\n}', 'eval_args': '{\n "num_steps": 5\n}', 'module_file': None, 'run_fn': None, 'trainer_fn': None, 'custom_config': 'null', 'module_path': 'titanic_trainer@/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Trainer-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'} 'run_fn'
INFO:absl:Installing '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Trainer-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/local/bin/python', '-m', 'pip', 'install', '--target', '/tmp/tmp7rdp57un', '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Trainer-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/_wheels/tfx_user_code_Trainer-0.0+94ba6ce5e60a78ae656bc0463b81eb62effab1b32471c3718e1993abf1842448-py3-none-any.whl'.
INFO:absl:Training model.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Cabin has no shape. Setting to VarLenSparseTensor.
INFO:absl:Feature Embarked has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Fare has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Parch has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Pclass has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Sex has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature SibSp has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Survived has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Model: "model"
INFO:absl:__________________________________________________________________________________________________
INFO:absl:Layer (type) Output Shape Param # Connected to
INFO:absl:==================================================================================================
INFO:absl:embarked (InputLayer) [(None, 1)] 0
INFO:absl:__________________________________________________________________________________________________
INFO:absl:fare (InputLayer) [(None, 1)] 0
INFO:absl:__________________________________________________________________________________________________
INFO:absl:has_a_cabin (InputLayer) [(None, 1)] 0
INFO:absl:__________________________________________________________________________________________________
INFO:absl:is_traveling_alone (InputLayer) [(None, 1)] 0
INFO:absl:__________________________________________________________________________________________________
INFO:absl:pclass (InputLayer) [(None, 1)] 0
INFO:absl:__________________________________________________________________________________________________
INFO:absl:sex (InputLayer) [(None, 1)] 0
INFO:absl:__________________________________________________________________________________________________
INFO:absl:concatenate (Concatenate) (None, 6) 0 embarked[0][0]
INFO:absl: fare[0][0]
INFO:absl: has_a_cabin[0][0]
INFO:absl: is_traveling_alone[0][0]
INFO:absl: pclass[0][0]
INFO:absl: sex[0][0]
INFO:absl:__________________________________________________________________________________________________
INFO:absl:dense (Dense) (None, 8) 56 concatenate[0][0]
INFO:absl:__________________________________________________________________________________________________
INFO:absl:dense_1 (Dense) (None, 8) 72 dense[0][0]
INFO:absl:__________________________________________________________________________________________________
INFO:absl:dense_2 (Dense) (None, 1) 9 dense_1[0][0]
INFO:absl:==================================================================================================
INFO:absl:Total params: 137
INFO:absl:Trainable params: 137
INFO:absl:Non-trainable params: 0
INFO:absl:__________________________________________________________________________________________________
Epoch 1/10
100/100 [==============================] - 1s 7ms/step - loss: 0.5434 - binary_accuracy: 0.7116 - val_loss: 0.4985 - val_binary_accuracy: 0.8062
Epoch 2/10
100/100 [==============================] - 1s 6ms/step - loss: 0.4697 - binary_accuracy: 0.7828 - val_loss: 0.4798 - val_binary_accuracy: 0.7750
Epoch 3/10
100/100 [==============================] - 1s 6ms/step - loss: 0.4571 - binary_accuracy: 0.7922 - val_loss: 0.4724 - val_binary_accuracy: 0.7875
Epoch 4/10
52/100 [==============>...............] - ETA: 0s - loss: 0.4495 - binary_accuracy: 0.7981IOPub message rate exceeded.
The notebook server will temporarily stop sending output
to the client in order to avoid crashing it.
To change this limit, set the config variable
`--NotebookApp.iopub_msg_rate_limit`.
Current values:
NotebookApp.iopub_msg_rate_limit=50.0 (msgs/sec)
NotebookApp.rate_limit_window=3.0 (secs)
100/100 [==============================] - 1s 6ms/step - loss: 0.4484 - binary_accuracy: 0.7956 - val_loss: 0.4480 - val_binary_accuracy: 0.8062
Epoch 5/10
100/100 [==============================] - 1s 6ms/step - loss: 0.4565 - binary_accuracy: 0.7922 - val_loss: 0.4392 - val_binary_accuracy: 0.8062
Epoch 6/10
100/100 [==============================] - 1s 5ms/step - loss: 0.4476 - binary_accuracy: 0.7975 - val_loss: 0.4665 - val_binary_accuracy: 0.7875
Epoch 7/10IOPub message rate exceeded.
The notebook server will temporarily stop sending output
to the client in order to avoid crashing it.
To change this limit, set the config variable
`--NotebookApp.iopub_msg_rate_limit`.
Current values:
NotebookApp.iopub_msg_rate_limit=50.0 (msgs/sec)
NotebookApp.rate_limit_window=3.0 (secs)
100/100 [==============================] - 1s 6ms/step - loss: 0.4378 - binary_accuracy: 0.8025 - val_loss: 0.4436 - val_binary_accuracy: 0.8188
Epoch 8/10
100/100 [==============================] - 1s 6ms/step - loss: 0.4438 - binary_accuracy: 0.8044 - val_loss: 0.4192 - val_binary_accuracy: 0.8125
Epoch 9/10
100/100 [==============================] - 1s 5ms/step - loss: 0.4393 - binary_accuracy: 0.8016 - val_loss: 0.4329 - val_binary_accuracy: 0.8062
Epoch 10/10
100/100 [==============================] - 1s 6ms/step - loss: 0.4416 - binary_accuracy: 0.8003 - val_loss: 0.4624 - val_binary_accuracy: 0.7563
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Trainer/model/7/Format-Serving/assets
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Trainer/model/7/Format-Serving/assets
INFO:absl:Training complete. Model written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Trainer/model/7/Format-Serving. ModelRun written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Trainer/model_run/7
INFO:absl:Running publisher for Trainer
INFO:absl:MetadataStore with DB connection initialized
Evaluating the model
eval_config = tfma.EvalConfig(
model_specs=[
tfma.ModelSpec(
signature_name="serving_default",
preprocessing_function_names=["tft_layer"],
label_key="Survived",
)
],
metrics_specs=[
tfma.MetricsSpec(
per_slice_thresholds={
"binary_accuracy": tfma.config.PerSliceMetricThresholds(
thresholds=[
tfma.PerSliceMetricThreshold(
slicing_specs=[tfma.SlicingSpec()],
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={"value": 0.7}
),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={"value": -1e-10},
),
),
)
]
),
}
)
],
slicing_specs=[
tfma.SlicingSpec(),
tfma.SlicingSpec(feature_keys=["sex"])
],
)
model_resolver = tfx.dsl.Resolver(
strategy_class=tfx.dsl.experimental.LatestBlessedModelStrategy,
model=tfx.dsl.Channel(type=tfx.types.standard_artifacts.Model),
model_blessing=tfx.dsl.Channel(type=tfx.types.standard_artifacts.ModelBlessing),
).with_id("latest_blessed_model_resolver")
context.run(model_resolver)
INFO:absl:Running driver for latest_blessed_model_resolver
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running publisher for latest_blessed_model_resolver
INFO:absl:MetadataStore with DB connection initialized
evaluator = tfx.components.Evaluator(
examples=example_gen.outputs["examples"],
model=trainer.outputs["model"],
eval_config=eval_config,
baseline_model=model_resolver.outputs["model"],
)
context.run(evaluator, enable_cache=False)
INFO:absl:Running driver for Evaluator
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Evaluator
INFO:absl:Nonempty beam arg extra_packages already includes dependency
INFO:absl:udf_utils.get_fn {'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Survived",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "sex"\n ]\n }\n ]\n}', 'feature_slicing_spec': None, 'fairness_indicator_thresholds': None, 'example_splits': 'null', 'module_file': None, 'module_path': None} 'custom_eval_shared_model'
ERROR:absl:There are change thresholds, but the baseline is missing. This is allowed only when rubber stamping (first run).
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Using /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Trainer/model/7/Format-Serving as model.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa734cb8780> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa736e87550>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa734cb8780> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa736e87550>).
INFO:absl:The 'example_splits' parameter is not set, using 'eval' split.
INFO:absl:Evaluating model.
INFO:absl:udf_utils.get_fn {'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Survived",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "sex"\n ]\n }\n ]\n}', 'feature_slicing_spec': None, 'fairness_indicator_thresholds': None, 'example_splits': 'null', 'module_file': None, 'module_path': None} 'custom_extractors'
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa7370e9ef0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa734af76a0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa7370e9ef0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa734af76a0>).
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa73472b6a0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa734698588>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa73472b6a0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa734698588>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa7343f4a20> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa7343f5a58>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa7343f4a20> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa7343f5a58>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa734178cc0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa734167cf8>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa734178cc0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa734167cf8>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa71bf05b38> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa71bf38940>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa71bf05b38> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa71bf38940>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa71bc6a0f0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa71bc48f98>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa71bc6a0f0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa71bc48f98>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa71b9d6dd8> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa71b9c4e80>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa71b9d6dd8> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa71b9c4e80>).
INFO:absl:Evaluation complete. Results written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Evaluator/evaluation/10.
INFO:absl:Checking validation results.
INFO:absl:Blessing result True written to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Evaluator/blessing/10.
INFO:absl:Running publisher for Evaluator
INFO:absl:MetadataStore with DB connection initialized
Pushing the model
pusher = tfx.components.Pusher(
model=trainer.outputs["model"],
model_blessing=evaluator.outputs["blessing"],
push_destination=tfx.proto.PushDestination(
filesystem=tfx.proto.PushDestination.Filesystem(
base_directory=str(MODEL_DIRECTORY)
)
),
)
context.run(pusher)
INFO:absl:Running driver for Pusher
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Pusher
INFO:absl:Model version: 1629219803
INFO:absl:Model written to serving path model/1629219803.
INFO:absl:Model pushed to /tmp/tfx-interactive-2021-08-17T16_33_32.748347-oxcimgsl/Pusher/pushed_model/11.
INFO:absl:Running publisher for Pusher
INFO:absl:MetadataStore with DB connection initialized
Running inference
inference_fn = get_inference_fn(MODEL_DIRECTORY)
result = inference_fn(examples=tf.constant(_examples(test_df)))
print(result["output_0"].numpy())
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa736bb87b8> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa736b38c88>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fa736bb87b8> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fa736b38c88>).
[[0.38541308]
[0.564719 ]
[0.5069299 ]
[0.34107125]
[0.56801045]
[0.34107125]
[0.9181547 ]
[0.47035894]
[0.8916781 ]
[0.39140537]
[0.30566648]
[0.46022284]
[0.97395056]
[0.47035894]
[0.97395056]
[0.9278288 ]
[0.5069299 ]
[0.34444535]
[0.56801045]
[0.8916781 ]
[0.48101017]
[0.34257215]
[0.9805504 ]
[0.48101017]
[0.9802976 ]
[0.39140537]
[0.9802976 ]
[0.34444535]
[0.46022284]
[0.4233736 ]
[0.47035894]
[0.4613565 ]
[0.67500657]
[0.67500657]
[0.48101017]
[0.34444535]
[0.8152502 ]
[0.8152502 ]
[0.34107125]
[0.4171081 ]
[0.4212707 ]
[0.46022284]
[0.30566648]
[0.9319868 ]
[0.97395056]
[0.34107125]
[0.4738568 ]
[0.38541308]
[0.9853724 ]
[0.67500657]
[0.46703267]
[0.49896684]
[0.9057603 ]
[0.97395056]
[0.49896684]
[0.4540072 ]
[0.30566648]
[0.30566648]
[0.39140537]
[0.9853724 ]
[0.30566648]
[0.42335728]
[0.30566648]
[0.9181547 ]
[0.48101017]
[0.9273216 ]
[0.9181547 ]
[0.43834305]
[0.4738568 ]
[0.97395056]
[0.9181547 ]
[0.30566648]
[0.8152502 ]
[0.4738568 ]
[0.9853724 ]
[0.45338663]
[0.34107125]
[0.9594097 ]
[0.42335728]
[0.9181547 ]
[0.4233736 ]
[0.46703267]
[0.46022284]
[0.30566648]
[0.5069299 ]
[0.4233736 ]
[0.9181547 ]
[0.8152502 ]
[0.9181547 ]
[0.47035894]
[0.56801045]
[0.30566648]
[0.97395056]
[0.34107125]
[0.4738568 ]
[0.30566648]
[0.97395056]
[0.34107125]
[0.8312258 ]
[0.34107125]
[0.9802976 ]
[0.47035894]
[0.38541308]
[0.30566648]
[0.69994307]
[0.3783423 ]
[0.38541308]
[0.38541308]
[0.34107125]
[0.42335728]
[0.49896684]
[0.9181547 ]
[0.9853724 ]
[0.9181547 ]
[0.97395056]
[0.4233736 ]
[0.34444535]
[0.67500657]
[0.45338663]
[0.9057603 ]
[0.9273216 ]
[0.4201071 ]
[0.9802976 ]
[0.30566648]
[0.38541308]
[0.67500657]
[0.30566648]
[0.8737593 ]
[0.42335728]
[0.34107125]
[0.34107125]
[0.4738568 ]
[0.67500657]
[0.38057855]
[0.30566648]
[0.30566648]
[0.34444535]
[0.42335728]
[0.8152502 ]
[0.3913797 ]
[0.7892182 ]
[0.9805504 ]
[0.48101017]
[0.46159923]
[0.46022284]
[0.39140537]
[0.43834305]
[0.34107125]
[0.46022284]
[0.47035894]
[0.9802976 ]
[0.34444535]
[0.3783423 ]
[0.56801045]
[0.3913797 ]
[0.30566648]
[0.9805504 ]
[0.8312258 ]
[0.46022284]
[0.56801045]
[0.9181547 ]
[0.4233736 ]
[0.9319868 ]
[0.30566648]
[0.42335728]
[0.67500657]
[0.45338663]
[0.3913797 ]
[0.9770571 ]
[0.8152502 ]
[0.30566648]
[0.34444535]
[0.38213724]
[0.34444535]
[0.3913797 ]
[0.93834466]
[0.95353526]
[0.48101017]
[0.93834466]
[0.9802976 ]
[0.42335728]
[0.48101017]
[0.97395056]
[0.38541308]
[0.9802976 ]
[0.46159923]
[0.9057603 ]
[0.38213724]
[0.7892182 ]
[0.42335728]
[0.47035894]
[0.46022284]
[0.39140537]
[0.5069299 ]
[0.4613565 ]
[0.30566648]
[0.48101017]
[0.8312258 ]
[0.42335728]
[0.8152502 ]
[0.9018489 ]
[0.38213724]
[0.48101017]
[0.9057603 ]
[0.42335728]
[0.4738568 ]
[0.9181547 ]
[0.42335728]
[0.9770571 ]
[0.30566648]
[0.3783423 ]
[0.30566648]
[0.4706237 ]
[0.9057603 ]
[0.564719 ]
[0.43834305]
[0.9181547 ]
[0.46703267]
[0.9802976 ]
[0.34107125]
[0.88841474]
[0.34107125]
[0.9319868 ]
[0.30566648]
[0.9770571 ]
[0.8078854 ]
[0.30566648]
[0.9181547 ]
[0.3783423 ]
[0.42335728]
[0.4706237 ]
[0.9695786 ]
[0.34257215]
[0.38541308]
[0.48101017]
[0.30566648]
[0.48101017]
[0.34444535]
[0.85431874]
[0.9802976 ]
[0.9770571 ]
[0.9057603 ]
[0.48101017]
[0.30566648]
[0.39140537]
[0.46703267]
[0.9273216 ]
[0.4613565 ]
[0.9057603 ]
[0.8078854 ]
[0.9057603 ]
[0.34107125]
[0.48101017]
[0.34107125]
[0.34107125]
[0.30566648]
[0.38541308]
[0.34107125]
[0.9319868 ]
[0.30566648]
[0.39140537]
[0.30566648]
[0.9057603 ]
[0.56801045]
[0.46844658]
[0.30566648]
[0.42322862]
[0.30566648]
[0.8152502 ]
[0.34107125]
[0.45338663]
[0.38541308]
[0.9802976 ]
[0.8737593 ]
[0.34444535]
[0.9057603 ]
[0.42335728]
[0.47035894]
[0.47035894]
[0.42335728]
[0.8152502 ]
[0.38213724]
[0.9181547 ]
[0.8078854 ]
[0.67500657]
[0.30566648]
[0.30566648]
[0.46703267]
[0.34444535]
[0.34107125]
[0.43834305]
[0.9181547 ]
[0.34444535]
[0.46703267]
[0.34107125]
[0.30566648]
[0.9531303 ]
[0.4233736 ]
[0.43834305]
[0.30566648]
[0.30566648]
[0.49896684]
[0.47035894]
[0.34107125]
[0.9181547 ]
[0.9594097 ]
[0.46703267]
[0.38213724]
[0.46703267]
[0.56801045]
[0.34107125]
[0.34444535]
[0.30566648]
[0.9181547 ]
[0.9853724 ]
[0.9181547 ]
[0.48101017]
[0.42335728]
[0.30566648]
[0.4613565 ]
[0.30566648]
[0.34444535]
[0.42335728]
[0.46022284]
[0.9805504 ]
[0.30566648]
[0.93834466]
[0.45338663]
[0.47035894]
[0.42335728]
[0.93834466]
[0.4738568 ]
[0.34444535]
[0.8078854 ]
[0.30566648]
[0.46022284]
[0.42335728]
[0.38057855]
[0.4706237 ]
[0.34444535]
[0.42335728]
[0.30566648]
[0.3913797 ]
[0.9802976 ]
[0.39140537]
[0.8312258 ]
[0.42335728]
[0.8916781 ]
[0.42335728]
[0.9273216 ]
[0.9802976 ]
[0.42335728]
[0.4706237 ]
[0.4613565 ]
[0.67500657]
[0.46022284]
[0.97395056]
[0.30566648]
[0.38541308]
[0.67500657]
[0.3913797 ]
[0.9531303 ]
[0.9273216 ]
[0.34107125]
[0.9802976 ]
[0.7892182 ]
[0.4233736 ]
[0.8491244 ]
[0.9802976 ]
[0.46844658]
[0.47038573]
[0.9853724 ]
[0.42322862]
[0.42335728]
[0.97395056]
[0.9853724 ]
[0.56801045]
[0.42335728]
[0.43834305]
[0.3913797 ]
[0.38541308]
[0.38541308]
[0.7981297 ]
[0.67500657]
[0.42335728]
[0.93834466]
[0.30566648]
[0.42335728]
[0.38541308]
[0.39140537]
[0.43834305]
[0.97395056]
[0.39140537]
[0.42335728]
[0.39140537]
[0.97395056]
[0.38541308]
[0.9802976 ]
[0.30566648]
[0.38541308]
[0.9805504 ]
[0.47035894]
[0.9802976 ]
[0.43834305]
[0.5042321 ]
[0.46844658]
[0.47038573]
[0.48101017]
[0.9181547 ]
[0.56801045]
[0.9181547 ]
[0.9840547 ]
[0.8312258 ]
[0.34107125]
[0.9853724 ]
[0.30566648]
[0.34107125]
[0.4233736 ]]
Pipeline
Setting up the schema
!mkdir -p {SCHEMA_DIRECTORY}
%%writefile {SCHEMA_FILENAME}
feature {
name: "Embarked"
type: BYTES
domain: "Embarked"
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Sex"
type: BYTES
domain: "Sex"
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Cabin"
type: BYTES
presence {
min_fraction: 0.0
min_count: 1
}
}
feature {
name: "Age"
type: INT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Fare"
type: FLOAT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Parch"
type: INT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Pclass"
type: INT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "SibSp"
type: INT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Survived"
type: INT
bool_domain {
}
presence {
min_fraction: 1.0
min_count: 1
}
not_in_environment: "SERVING"
shape {
dim {
size: 1
}
}
}
string_domain {
name: "Embarked"
value: "C"
value: "Q"
value: "S"
}
string_domain {
name: "Sex"
value: "female"
value: "male"
}
default_environment: "TRAINING"
default_environment: "SERVING"
Writing pipelines/titanic-pipeline/schema/schema.pbtxt
Running the pipeline
import tensorflow_model_analysis as tfma
def create_pipeline(
pipeline_name: str,
pipeline_directory: str,
data_directory: str,
schema_path: str,
model_directory: str,
metadata_path: str,
transform_module_path: str,
trainer_module_path: str,
) -> tfx.dsl.Pipeline:
input_config = tfx.proto.Input(splits=[
example_gen_pb2.Input.Split(name='train', pattern=DATA_TRAIN_FILENAME),
example_gen_pb2.Input.Split(name='eval', pattern=DATA_EVAL_FILENAME)
])
example_gen = tfx.components.CsvExampleGen(
input_base=DATA_DIRECTORY,
input_config=input_config
)
statistics_gen = tfx.components.StatisticsGen(
examples=example_gen.outputs["examples"]
)
schema_importer = tfx.dsl.Importer(
source_uri=schema_path,
artifact_type=tfx.types.standard_artifacts.Schema
).with_id("schema_importer")
example_validator = tfx.components.ExampleValidator(
statistics=statistics_gen.outputs["statistics"],
schema=schema_importer.outputs["result"]
)
transform = tfx.components.Transform(
examples=example_gen.outputs["examples"],
schema=schema_importer.outputs["result"],
module_file=os.path.abspath(transform_module_path),
)
trainer = tfx.components.Trainer(
module_file=os.path.abspath(trainer_module_path),
examples=example_gen.outputs["examples"],
transform_graph=transform.outputs["transform_graph"],
train_args=tfx.proto.TrainArgs(num_steps=100),
eval_args=tfx.proto.EvalArgs(num_steps=5),
)
eval_config = tfma.EvalConfig(
model_specs=[
tfma.ModelSpec(
signature_name="serving_default",
preprocessing_function_names=["tft_layer"],
label_key="Survived",
)
],
metrics_specs=[
tfma.MetricsSpec(
per_slice_thresholds={
"binary_accuracy": tfma.config.PerSliceMetricThresholds(
thresholds=[
tfma.PerSliceMetricThreshold(
slicing_specs=[tfma.SlicingSpec()],
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={"value": 0.7}
),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={"value": -1e-10},
),
),
)
]
),
}
)
],
slicing_specs=[
tfma.SlicingSpec(),
tfma.SlicingSpec(feature_keys=["sex"])
],
)
model_resolver = tfx.dsl.Resolver(
strategy_class=tfx.dsl.experimental.LatestBlessedModelStrategy,
model=tfx.dsl.Channel(type=tfx.types.standard_artifacts.Model),
model_blessing=tfx.dsl.Channel(type=tfx.types.standard_artifacts.ModelBlessing),
).with_id("latest_blessed_model_resolver")
evaluator = tfx.components.Evaluator(
examples=example_gen.outputs["examples"],
model=trainer.outputs["model"],
eval_config=eval_config,
baseline_model=model_resolver.outputs["model"],
)
pusher = tfx.components.Pusher(
model=trainer.outputs["model"],
model_blessing=evaluator.outputs["blessing"],
push_destination=tfx.proto.PushDestination(
filesystem=tfx.proto.PushDestination.Filesystem(
base_directory=model_directory
)
),
)
components = [
example_gen,
statistics_gen,
schema_importer,
example_validator,
transform,
trainer,
model_resolver,
evaluator,
pusher,
]
return tfx.dsl.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_directory,
metadata_connection_config=tfx.orchestration.metadata.sqlite_metadata_connection_config(
metadata_path
),
components=components,
)
tfx.orchestration.LocalDagRunner().run(
create_pipeline(
pipeline_name=PIPELINE_NAME,
pipeline_directory=str(PIPELINE_DIRECTORY),
data_directory=DATA_DIRECTORY,
schema_path=str(SCHEMA_DIRECTORY),
model_directory=str(MODEL_DIRECTORY),
metadata_path=str(METADATA_PATH),
transform_module_path=TRANSFORM_MODULE_PATH,
trainer_module_path=TRAINER_MODULE_PATH
)
)
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "Trainer"
upstream_nodes: "latest_blessed_model_resolver"
downstream_nodes: "Pusher"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 8
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=8, input_dict={'baseline_model': [], 'examples': [Artifact(artifact: id: 1
type_id: 6
uri: "pipelines/titanic-pipeline/CsvExampleGen/examples/1"
properties {
key: "split_names"
value {
string_value: "[\"train\", \"eval\"]"
}
}
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:train,num_files:1,total_bytes:23732,xor_checksum:1629143312,sum_checksum:1629143312\nsplit:eval,num_files:1,total_bytes:23732,xor_checksum:1629143312,sum_checksum:1629143312"
}
}
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "payload_format"
value {
string_value: "FORMAT_TF_EXAMPLE"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
state: LIVE
create_time_since_epoch: 1629144924845
last_update_time_since_epoch: 1629144924845
, artifact_type: id: 6
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)], 'model': [Artifact(artifact: id: 13
type_id: 18
uri: "pipelines/titanic-pipeline/Trainer/model/7"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Trainer:model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
state: LIVE
create_time_since_epoch: 1629144955564
last_update_time_since_epoch: 1629144955564
, artifact_type: id: 18
name: "Model"
)]}, output_dict=defaultdict(<class 'list'>, {'blessing': [Artifact(artifact: uri: "pipelines/titanic-pipeline/Evaluator/blessing/8"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Evaluator:blessing:0"
}
}
, artifact_type: name: "ModelBlessing"
)], 'evaluation': [Artifact(artifact: uri: "pipelines/titanic-pipeline/Evaluator/evaluation/8"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Evaluator:evaluation:0"
}
}
, artifact_type: name: "ModelEvaluation"
)]}), exec_properties={'example_splits': 'null', 'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Survived",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "sex"\n ]\n }\n ]\n}'}, execution_output_uri='pipelines/titanic-pipeline/Evaluator/.system/executor_execution/8/executor_output.pb', stateful_working_dir='pipelines/titanic-pipeline/Evaluator/.system/stateful_working_dir/2021-08-16T20:15:22.938616', tmp_dir='pipelines/titanic-pipeline/Evaluator/.system/executor_execution/8/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.evaluator.component.Evaluator"
}
id: "Evaluator"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Evaluator"
}
}
}
}
inputs {
inputs {
key: "baseline_model"
value {
channels {
producer_node_query {
id: "latest_blessed_model_resolver"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.latest_blessed_model_resolver"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
}
}
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
}
outputs {
outputs {
key: "blessing"
value {
artifact_spec {
type {
name: "ModelBlessing"
}
}
}
}
outputs {
key: "evaluation"
value {
artifact_spec {
type {
name: "ModelEvaluation"
}
}
}
}
}
parameters {
parameters {
key: "eval_config"
value {
field_value {
string_value: "{\n \"metrics_specs\": [\n {\n \"per_slice_thresholds\": {\n \"binary_accuracy\": {\n \"thresholds\": [\n {\n \"slicing_specs\": [\n {}\n ],\n \"threshold\": {\n \"change_threshold\": {\n \"absolute\": -1e-10,\n \"direction\": \"HIGHER_IS_BETTER\"\n },\n \"value_threshold\": {\n \"lower_bound\": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n \"model_specs\": [\n {\n \"label_key\": \"Survived\",\n \"preprocessing_function_names\": [\n \"tft_layer\"\n ],\n \"signature_name\": \"serving_default\"\n }\n ],\n \"slicing_specs\": [\n {},\n {\n \"feature_keys\": [\n \"sex\"\n ]\n }\n ]\n}"
}
}
}
parameters {
key: "example_splits"
value {
field_value {
string_value: "null"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "Trainer"
upstream_nodes: "latest_blessed_model_resolver"
downstream_nodes: "Pusher"
execution_options {
caching_options {
}
}
, pipeline_info=id: "titanic-pipeline"
, pipeline_run_id='2021-08-16T20:15:22.938616')
INFO:absl:udf_utils.get_fn {'example_splits': 'null', 'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Survived",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "sex"\n ]\n }\n ]\n}'} 'custom_eval_shared_model'
ERROR:absl:There are change thresholds, but the baseline is missing. This is allowed only when rubber stamping (first run).
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Using pipelines/titanic-pipeline/Trainer/model/7/Format-Serving as model.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f0b9c50> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f28d320>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f0b9c50> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f28d320>).
INFO:absl:The 'example_splits' parameter is not set, using 'eval' split.
INFO:absl:Evaluating model.
INFO:absl:udf_utils.get_fn {'example_splits': 'null', 'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Survived",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "sex"\n ]\n }\n ]\n}'} 'custom_extractors'
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Survived"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "sex"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f9efe48> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f9e60b8>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f9efe48> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f9e60b8>).
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f5c6710> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f5abe80>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f5c6710> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f5abe80>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f2e7be0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f333320>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f2e7be0> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f333320>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0ee0f2e8> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0ee004e0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0ee0f2e8> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0ee004e0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0eb16630> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0eb6fa20>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0eb16630> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0eb6fa20>).
Exception ignored in: <bound method CapturableResource.__del__ of <tensorflow.python.saved_model.load._RestoredResource object at 0x7fce0f19e6a0>>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/tracking.py", line 277, in __del__
self._destroy_resource()
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 889, in __call__
result = self._call(*args, **kwds)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 924, in _call
results = self._stateful_fn(*args, **kwds)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py", line 3022, in __call__
filtered_flat_args) = self._maybe_define_function(args, kwargs)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py", line 3444, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py", line 3289, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py", line 999, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py", line 672, in wrapped_fn
out = weak_wrapped_fn().__wrapped__(*args, **kwds)
AttributeError: 'NoneType' object has no attribute '__wrapped__'
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0e8c0a20> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0e90c780>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0e8c0a20> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0e90c780>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f325860> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f2771d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce0f325860> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce0f2771d0>).
INFO:absl:Evaluation complete. Results written to pipelines/titanic-pipeline/Evaluator/evaluation/8.
INFO:absl:Checking validation results.
INFO:absl:Blessing result True written to pipelines/titanic-pipeline/Evaluator/blessing/8.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 8 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'blessing': [Artifact(artifact: uri: "pipelines/titanic-pipeline/Evaluator/blessing/8"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Evaluator:blessing:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
, artifact_type: name: "ModelBlessing"
)], 'evaluation': [Artifact(artifact: uri: "pipelines/titanic-pipeline/Evaluator/evaluation/8"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Evaluator:evaluation:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
, artifact_type: name: "ModelEvaluation"
)]}) for execution 8
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component Evaluator is finished.
INFO:absl:Component Pusher is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.pusher.component.Pusher"
}
id: "Pusher"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Pusher"
}
}
}
}
inputs {
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "model_blessing"
value {
channels {
producer_node_query {
id: "Evaluator"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Evaluator"
}
}
}
artifact_query {
type {
name: "ModelBlessing"
}
}
output_key: "blessing"
}
}
}
}
outputs {
outputs {
key: "pushed_model"
value {
artifact_spec {
type {
name: "PushedModel"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "push_destination"
value {
field_value {
string_value: "{\n \"filesystem\": {\n \"base_directory\": \"model\"\n }\n}"
}
}
}
}
upstream_nodes: "Evaluator"
upstream_nodes: "Trainer"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 9
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=9, input_dict={'model': [Artifact(artifact: id: 13
type_id: 18
uri: "pipelines/titanic-pipeline/Trainer/model/7"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Trainer:model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
state: LIVE
create_time_since_epoch: 1629144955564
last_update_time_since_epoch: 1629144955564
, artifact_type: id: 18
name: "Model"
)], 'model_blessing': [Artifact(artifact: id: 15
type_id: 21
uri: "pipelines/titanic-pipeline/Evaluator/blessing/8"
custom_properties {
key: "blessed"
value {
int_value: 1
}
}
custom_properties {
key: "current_model"
value {
string_value: "pipelines/titanic-pipeline/Trainer/model/7"
}
}
custom_properties {
key: "current_model_id"
value {
int_value: 13
}
}
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Evaluator:blessing:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
state: LIVE
create_time_since_epoch: 1629144965441
last_update_time_since_epoch: 1629144965441
, artifact_type: id: 21
name: "ModelBlessing"
)]}, output_dict=defaultdict(<class 'list'>, {'pushed_model': [Artifact(artifact: uri: "pipelines/titanic-pipeline/Pusher/pushed_model/9"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Pusher:pushed_model:0"
}
}
, artifact_type: name: "PushedModel"
)]}), exec_properties={'custom_config': 'null', 'push_destination': '{\n "filesystem": {\n "base_directory": "model"\n }\n}'}, execution_output_uri='pipelines/titanic-pipeline/Pusher/.system/executor_execution/9/executor_output.pb', stateful_working_dir='pipelines/titanic-pipeline/Pusher/.system/stateful_working_dir/2021-08-16T20:15:22.938616', tmp_dir='pipelines/titanic-pipeline/Pusher/.system/executor_execution/9/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.pusher.component.Pusher"
}
id: "Pusher"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Pusher"
}
}
}
}
inputs {
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "model_blessing"
value {
channels {
producer_node_query {
id: "Evaluator"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "titanic-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-08-16T20:15:22.938616"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "titanic-pipeline.Evaluator"
}
}
}
artifact_query {
type {
name: "ModelBlessing"
}
}
output_key: "blessing"
}
}
}
}
outputs {
outputs {
key: "pushed_model"
value {
artifact_spec {
type {
name: "PushedModel"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "push_destination"
value {
field_value {
string_value: "{\n \"filesystem\": {\n \"base_directory\": \"model\"\n }\n}"
}
}
}
}
upstream_nodes: "Evaluator"
upstream_nodes: "Trainer"
execution_options {
caching_options {
}
}
, pipeline_info=id: "titanic-pipeline"
, pipeline_run_id='2021-08-16T20:15:22.938616')
INFO:absl:Model version: 1629144965
INFO:absl:Model written to serving path model/1629144965.
INFO:absl:Model pushed to pipelines/titanic-pipeline/Pusher/pushed_model/9.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 9 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'pushed_model': [Artifact(artifact: uri: "pipelines/titanic-pipeline/Pusher/pushed_model/9"
custom_properties {
key: "name"
value {
string_value: "titanic-pipeline:2021-08-16T20:15:22.938616:Pusher:pushed_model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.0.0"
}
}
, artifact_type: name: "PushedModel"
)]}) for execution 9
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component Pusher is finished.
Running inference
inference_fn = get_inference_fn(MODEL_DIRECTORY)
result = inference_fn(examples=tf.constant(_examples(test_df)))
print(result["output_0"].numpy())
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce10505c18> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce1050b908>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<tensorflow.python.keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7fce10505c18> and <tensorflow.python.keras.engine.input_layer.InputLayer object at 0x7fce1050b908>).
[[0.15648982]
[0.6764219 ]
[0.25309995]
[0.3232775 ]
[0.6605456 ]
[0.3232775 ]
[0.81813407]
[0.5495473 ]
[0.78274167]
[0.36518538]
[0.208195 ]
[0.4664083 ]
[0.9580694 ]
[0.5495473 ]
[0.9580694 ]
[0.8674047 ]
[0.25309995]
[0.1809076 ]
[0.6605456 ]
[0.78274167]
[0.6706255 ]
[0.32690546]
[0.978984 ]
[0.6706255 ]
[0.97553676]
[0.36518538]
[0.97553676]
[0.1809076 ]
[0.4664083 ]
[0.5509802 ]
[0.5495473 ]
[0.5128103 ]
[0.70427066]
[0.70427066]
[0.6706255 ]
[0.1809076 ]
[0.734724 ]
[0.734724 ]
[0.3232775 ]
[0.3574509 ]
[0.40312576]
[0.4664083 ]
[0.208195 ]
[0.8490596 ]
[0.9580694 ]
[0.3232775 ]
[0.42425227]
[0.15648982]
[0.98744124]
[0.70427066]
[0.5617648 ]
[0.42294723]
[0.8332637 ]
[0.9580694 ]
[0.42294723]
[0.50756484]
[0.208195 ]
[0.208195 ]
[0.36518538]
[0.98744124]
[0.208195 ]
[0.3244478 ]
[0.208195 ]
[0.81813407]
[0.6706255 ]
[0.86446095]
[0.81813407]
[0.42466256]
[0.42425227]
[0.9580694 ]
[0.81813407]
[0.208195 ]
[0.734724 ]
[0.42425227]
[0.98744124]
[0.539673 ]
[0.3232775 ]
[0.92241 ]
[0.3244478 ]
[0.81813407]
[0.5509802 ]
[0.5617648 ]
[0.4664083 ]
[0.208195 ]
[0.25309995]
[0.5509802 ]
[0.81813407]
[0.734724 ]
[0.81813407]
[0.5495473 ]
[0.6605456 ]
[0.208195 ]
[0.9580694 ]
[0.3232775 ]
[0.42425227]
[0.208195 ]
[0.9580694 ]
[0.3232775 ]
[0.6935422 ]
[0.3232775 ]
[0.97553676]
[0.5495473 ]
[0.15648982]
[0.208195 ]
[0.7596326 ]
[0.37458476]
[0.15648982]
[0.15648982]
[0.3232775 ]
[0.3244478 ]
[0.42294723]
[0.81813407]
[0.98744124]
[0.81813407]
[0.9580694 ]
[0.5509802 ]
[0.1809076 ]
[0.70427066]
[0.539673 ]
[0.8332637 ]
[0.86446095]
[0.26874122]
[0.97553676]
[0.208195 ]
[0.15648982]
[0.70427066]
[0.208195 ]
[0.84150124]
[0.3244478 ]
[0.3232775 ]
[0.3232775 ]
[0.42425227]
[0.70427066]
[0.29700172]
[0.208195 ]
[0.208195 ]
[0.1809076 ]
[0.3244478 ]
[0.734724 ]
[0.28101504]
[0.65009475]
[0.978984 ]
[0.6706255 ]
[0.41980258]
[0.4664083 ]
[0.36518538]
[0.42466256]
[0.3232775 ]
[0.4664083 ]
[0.5495473 ]
[0.97553676]
[0.1809076 ]
[0.37458476]
[0.6605456 ]
[0.28101504]
[0.208195 ]
[0.978984 ]
[0.6935422 ]
[0.4664083 ]
[0.6605456 ]
[0.81813407]
[0.5509802 ]
[0.8490596 ]
[0.208195 ]
[0.3244478 ]
[0.70427066]
[0.539673 ]
[0.28101504]
[0.9490087 ]
[0.734724 ]
[0.208195 ]
[0.1809076 ]
[0.40358454]
[0.1809076 ]
[0.28101504]
[0.82704073]
[0.8449516 ]
[0.6706255 ]
[0.82704073]
[0.97553676]
[0.3244478 ]
[0.6706255 ]
[0.9580694 ]
[0.15648982]
[0.97553676]
[0.41980258]
[0.8332637 ]
[0.40358454]
[0.65009475]
[0.3244478 ]
[0.5495473 ]
[0.4664083 ]
[0.36518538]
[0.25309995]
[0.5128103 ]
[0.208195 ]
[0.6706255 ]
[0.6935422 ]
[0.3244478 ]
[0.734724 ]
[0.87497425]
[0.40358454]
[0.6706255 ]
[0.8332637 ]
[0.3244478 ]
[0.42425227]
[0.81813407]
[0.3244478 ]
[0.9490087 ]
[0.208195 ]
[0.37458476]
[0.208195 ]
[0.41917267]
[0.8332637 ]
[0.6764219 ]
[0.42466256]
[0.81813407]
[0.5617648 ]
[0.97553676]
[0.3232775 ]
[0.8481972 ]
[0.3232775 ]
[0.8490596 ]
[0.208195 ]
[0.9490087 ]
[0.79128706]
[0.208195 ]
[0.81813407]
[0.37458476]
[0.3244478 ]
[0.41917267]
[0.9604112 ]
[0.32690546]
[0.15648982]
[0.6706255 ]
[0.208195 ]
[0.6706255 ]
[0.1809076 ]
[0.8101928 ]
[0.97553676]
[0.9490087 ]
[0.8332637 ]
[0.6706255 ]
[0.208195 ]
[0.36518538]
[0.5617648 ]
[0.86446095]
[0.5128103 ]
[0.8332637 ]
[0.79128706]
[0.8332637 ]
[0.3232775 ]
[0.6706255 ]
[0.3232775 ]
[0.3232775 ]
[0.208195 ]
[0.15648982]
[0.3232775 ]
[0.8490596 ]
[0.208195 ]
[0.36518538]
[0.208195 ]
[0.8332637 ]
[0.6605456 ]
[0.28745514]
[0.208195 ]
[0.20996258]
[0.208195 ]
[0.734724 ]
[0.3232775 ]
[0.539673 ]
[0.15648982]
[0.97553676]
[0.84150124]
[0.1809076 ]
[0.8332637 ]
[0.3244478 ]
[0.5495473 ]
[0.5495473 ]
[0.3244478 ]
[0.734724 ]
[0.40358454]
[0.81813407]
[0.79128706]
[0.70427066]
[0.208195 ]
[0.208195 ]
[0.5617648 ]
[0.1809076 ]
[0.3232775 ]
[0.42466256]
[0.81813407]
[0.1809076 ]
[0.5617648 ]
[0.3232775 ]
[0.208195 ]
[0.8823198 ]
[0.5509802 ]
[0.42466256]
[0.208195 ]
[0.208195 ]
[0.42294723]
[0.5495473 ]
[0.3232775 ]
[0.81813407]
[0.92241 ]
[0.5617648 ]
[0.40358454]
[0.5617648 ]
[0.6605456 ]
[0.3232775 ]
[0.1809076 ]
[0.208195 ]
[0.81813407]
[0.98744124]
[0.81813407]
[0.6706255 ]
[0.3244478 ]
[0.208195 ]
[0.5128103 ]
[0.208195 ]
[0.1809076 ]
[0.3244478 ]
[0.4664083 ]
[0.978984 ]
[0.208195 ]
[0.82704073]
[0.539673 ]
[0.5495473 ]
[0.3244478 ]
[0.82704073]
[0.42425227]
[0.1809076 ]
[0.79128706]
[0.208195 ]
[0.4664083 ]
[0.3244478 ]
[0.29700172]
[0.41917267]
[0.1809076 ]
[0.3244478 ]
[0.208195 ]
[0.28101504]
[0.97553676]
[0.36518538]
[0.6935422 ]
[0.3244478 ]
[0.78274167]
[0.3244478 ]
[0.86446095]
[0.97553676]
[0.3244478 ]
[0.41917267]
[0.5128103 ]
[0.70427066]
[0.4664083 ]
[0.9580694 ]
[0.208195 ]
[0.15648982]
[0.70427066]
[0.28101504]
[0.8823198 ]
[0.86446095]
[0.3232775 ]
[0.97553676]
[0.65009475]
[0.5509802 ]
[0.7256061 ]
[0.97553676]
[0.28745514]
[0.44701397]
[0.98744124]
[0.20996258]
[0.3244478 ]
[0.9580694 ]
[0.98744124]
[0.6605456 ]
[0.3244478 ]
[0.42466256]
[0.28101504]
[0.15648982]
[0.15648982]
[0.7721895 ]
[0.70427066]
[0.3244478 ]
[0.82704073]
[0.208195 ]
[0.3244478 ]
[0.15648982]
[0.36518538]
[0.42466256]
[0.9580694 ]
[0.36518538]
[0.3244478 ]
[0.36518538]
[0.9580694 ]
[0.15648982]
[0.97553676]
[0.208195 ]
[0.15648982]
[0.978984 ]
[0.5495473 ]
[0.97553676]
[0.42466256]
[0.5536233 ]
[0.28745514]
[0.44701397]
[0.6706255 ]
[0.81813407]
[0.6605456 ]
[0.81813407]
[0.96847916]
[0.6935422 ]
[0.3232775 ]
[0.98744124]
[0.208195 ]
[0.3232775 ]
[0.5509802 ]]