# !pip install -q -U tfx==1.4.0
# !pip install tensorflow==2.7.0
# !pip install tensorflow_decision_forests==0.2.1
# creating required directories
!mkdir -p saved_data
!mkdir -p model
!mkdir -p data
import tempfile
import tensorflow as tf
import urllib.request
import os
import pandas as pd
import shutil
import tensorflow_data_validation as tfdv
import tensorflow_model_analysis as tfma
import tensorflow_decision_forests as tfdf
from absl import logging
from pathlib import Path
from tfx import v1 as tfx
from tensorflow_metadata.proto.v0 import schema_pb2
from tfx.proto import example_gen_pb2
print(f"Tensorflow Version: {tf.__version__}")
print(f"TFX Version: {tfx.__version__}")
print(f"TFDF Version: {tfdf.__version__}")
print(f"Tensorflow Data Validation Version: {tfdv.__version__}")
logging.set_verbosity(logging.INFO)
Tensorflow Version: 2.7.0
TFX Version: 1.4.0
TFDF Version: 0.2.1
Tensorflow Data Validation Version: 1.4.0
DATA_DIRECTORY = 'data'
DATA_SOURCE_PATH = Path(DATA_DIRECTORY) / 'Social_Network_Ads.csv'
SAVED_DATA = 'saved_data'
DATA_TRAIN_FILENAME = Path(SAVED_DATA) / 'train.csv'
DATA_TEST_FILENAME = Path(SAVED_DATA) / 'test.csv'
PIPELINE_NAME = 'sample-pipeline'
PIPELINE_DIRECTORY = os.path.join(Path('pipelines'), PIPELINE_NAME)
METADATA_PATH = Path("metadata") / PIPELINE_NAME / "metadata.db"
SCHEMA_DIRECTORY = os.path.join(PIPELINE_DIRECTORY, 'schema')
SCHEMA_FILE_NAME = str(os.path.join(SCHEMA_DIRECTORY, 'schema.pbtxt'))
MODEL_DIRECTORY = Path('model')
# Module Paths
CONSTANTS_MODULE_PATH = 'constants.py'
TRANSFORM_MODULE_PATH = 'transform.py'
TRAINER_MODULE_PATH = 'trainer.py'
data_df = pd.read_csv(DATA_SOURCE_PATH)
# splitting the data for training and testing
data_df = data_df.sample(frac=1)
train_df = data_df[: int(len(data_df) * 0.7)]
test_df = data_df[int(len(data_df) * 0.7): ]
# removing the undesired columns from all the datasets
datasets = [train_df, test_df]
drop_columns = ['User ID']
for dataset in datasets:
dataset.drop(drop_columns, axis=1, inplace=True)
train_df.to_csv(DATA_TRAIN_FILENAME, index=False)
test_df.to_csv(DATA_TEST_FILENAME, index=False)
/usr/local/lib/python3.7/dist-packages/pandas/core/frame.py:4174: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
errors=errors,
# peeking at the data
train_df.info()
<class 'pandas.core.frame.DataFrame'>
Int64Index: 280 entries, 154 to 96
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 Gender 280 non-null object
1 Age 280 non-null int64
2 EstimatedSalary 280 non-null int64
3 Purchased 280 non-null int64
dtypes: int64(3), object(1)
memory usage: 10.9+ KB
# creating the useful conversion functions
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(str(value), encoding="raw_unicode_escape")]))
def _float_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
# This function will be required to convert out test set
# to compatible schema types for inferencing
def _examples(df):
examples = []
for index, row in df.iterrows():
features = {
"Gender": _bytes_feature(row['Gender']),
"Age": _int64_feature(row['Age']),
"EstimatedSalary": _int64_feature(row['EstimatedSalary']),
}
example_proto = tf.train.Example(features=tf.train.Features(feature=features))
examples.append(example_proto.SerializeToString())
return examples
from tfx.orchestration.experimental.interactive.interactive_context import (InteractiveContext)
context = InteractiveContext()
WARNING:absl:InteractiveContext pipeline_root argument not provided: using temporary directory /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5 as root for pipeline outputs.
WARNING:absl:InteractiveContext metadata_connection_config not provided: using SQLite ML Metadata database at /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/metadata.sqlite.
output = tfx.proto.Output(
split_config=example_gen_pb2.SplitConfig(splits=[
tfx.proto.SplitConfig.Split(name="train", hash_buckets=3),
tfx.proto.SplitConfig.Split(name="eval", hash_buckets=1)
]))
example_gen = tfx.components.CsvExampleGen(input_base=SAVED_DATA, output_config=output)
context.run(example_gen)
INFO:absl:Running driver for CsvExampleGen
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:select span and version = (0, None)
INFO:absl:latest span and version = (0, None)
INFO:absl:Running executor for CsvExampleGen
INFO:absl:Generating examples.
INFO:absl:Processing input csv data saved_data/* to TFExample.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
INFO:absl:Examples generated.
INFO:absl:Running publisher for CsvExampleGen
INFO:absl:MetadataStore with DB connection initialized
statistics_gen = tfx.components.StatisticsGen(
examples=example_gen.outputs['examples']
)
context.run(statistics_gen)
context.show(statistics_gen.outputs['statistics'])
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Running driver for StatisticsGen
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for StatisticsGen
INFO:absl:Generating statistics for split train.
INFO:absl:Statistics for split train written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/StatisticsGen/statistics/2/Split-train.
INFO:absl:Generating statistics for split eval.
INFO:absl:Statistics for split eval written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/StatisticsGen/statistics/2/Split-eval.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
INFO:absl:Running publisher for StatisticsGen
INFO:absl:MetadataStore with DB connection initialized
schema_gen = tfx.components.SchemaGen(
statistics=statistics_gen.outputs['statistics'],
infer_feature_shape=True
)
context.run(schema_gen)
context.show(schema_gen.outputs['schema'])
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Running driver for SchemaGen
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for SchemaGen
INFO:absl:Processing schema from statistics for split train.
INFO:absl:Processing schema from statistics for split eval.
INFO:absl:Schema written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/SchemaGen/schema/3/schema.pbtxt.
INFO:absl:Running publisher for SchemaGen
INFO:absl:MetadataStore with DB connection initialized
schema = tfdv.load_schema_text(
os.path.join(schema_gen.outputs['schema']._artifacts[0].uri, "schema.pbtxt")
)
# adding the needed environments
schema.default_environment.append("TRAINING")
schema.default_environment.append("SERVING")
# removing the `Purchased` column from the serving environment
tfdv.get_feature(schema, "Purchased").not_in_environment.append("SERVING")
tfdv.display_schema(schema=schema)
!mkdir -p {SCHEMA_DIRECTORY}
tfdv.write_schema_text(schema, SCHEMA_FILE_NAME)
# loading the updated schema using the importer node.
schema_importer = tfx.dsl.Importer(
source_uri=str(SCHEMA_DIRECTORY),
artifact_type=tfx.types.standard_artifacts.Schema
).with_id("schema_importer")
context.run(schema_importer)
context.show(schema_importer.outputs['result'])
INFO:absl:Running driver for schema_importer
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Processing source uri: pipelines/sample-pipeline/schema, properties: {}, custom_properties: {}
INFO:absl:Running executor for schema_importer
INFO:absl:Running publisher for schema_importer
INFO:absl:MetadataStore with DB connection initialized
example_validator = tfx.components.ExampleValidator(
statistics=statistics_gen.outputs['statistics'],
schema=schema_importer.outputs['result']
)
context.run(example_validator)
context.show(example_validator.outputs['anomalies'])
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Running driver for ExampleValidator
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for ExampleValidator
INFO:absl:Validating schema against the computed statistics for split train.
INFO:absl:Validation complete for split train. Anomalies written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/ExampleValidator/anomalies/5/Split-train.
INFO:absl:Validating schema against the computed statistics for split eval.
INFO:absl:Validation complete for split eval. Anomalies written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/ExampleValidator/anomalies/5/Split-eval.
INFO:absl:Running publisher for ExampleValidator
INFO:absl:MetadataStore with DB connection initialized
%%writefile {CONSTANTS_MODULE_PATH}
LABEL = 'Purchased'
Overwriting constants.py
%%writefile {TRANSFORM_MODULE_PATH}
import tensorflow as tf
import tensorflow_transform as tft
import constants
LABEL = constants.LABEL
def preprocessing_fn(inputs):
outputs = dict()
outputs['Age'] = inputs['Age']
outputs['EstimatedSalary'] = inputs['EstimatedSalary']
# converting the `Gender` into label encoded column.
outputs['Gender'] = tf.cast(tf.equal(inputs['Gender'], 'male'), tf.int64)
outputs[LABEL] = inputs[LABEL]
return outputs
Overwriting transform.py
transform = tfx.components.Transform(
examples=example_gen.outputs['examples'],
schema=schema_importer.outputs['result'],
module_file=os.path.abspath(TRANSFORM_MODULE_PATH),
)
context.run(transform, enable_cache=False)
INFO:absl:Generating ephemeral wheel package for '/content/transform.py' (including modules: ['trainer', 'constants', 'transform']).
INFO:absl:User module package has hash fingerprint version e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4.
INFO:absl:Executing: ['/usr/bin/python3', '/tmp/tmp5ldyz37q/_tfx_generated_setup.py', 'bdist_wheel', '--bdist-dir', '/tmp/tmp8_j5wbbs', '--dist-dir', '/tmp/tmp2fp39iek']
INFO:absl:Successfully built user code wheel distribution at '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'; target user module is 'transform'.
INFO:absl:Full user module path is 'transform@/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'
INFO:absl:Running driver for Transform
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Transform
INFO:absl:Analyze the 'train' split and transform all splits when splits_config is not set.
INFO:absl:udf_utils.get_fn {'module_file': None, 'module_path': 'transform@/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl', 'preprocessing_fn': None} 'preprocessing_fn'
INFO:absl:Installing '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmp5vh10pub', '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:udf_utils.get_fn {'module_file': None, 'module_path': 'transform@/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl', 'stats_options_updater_fn': None} 'stats_options_updater_fn'
INFO:absl:Installing '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmpbkarqsxa', '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:Installing '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmp3j7bp14d', '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:root:This output type hint will be ignored and not used for type-checking purposes. Typically, output type hints for a PTransform are single (or nested) types wrapped by a PCollection, PDone, or None. Got: Tuple[Dict[str, Union[NoneType, _Dataset]], Union[Dict[str, Dict[str, PCollection]], NoneType], int] instead.
WARNING:root:This output type hint will be ignored and not used for type-checking purposes. Typically, output type hints for a PTransform are single (or nested) types wrapped by a PCollection, PDone, or None. Got: Tuple[Dict[str, Union[NoneType, _Dataset]], Union[Dict[str, Dict[str, PCollection]], NoneType], int] instead.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Transform/transform_graph/6/.temp_path/tftransform_tmp/e74cd2d11c8e4573bc37ec74c517a6c1/assets
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Transform/transform_graph/6/.temp_path/tftransform_tmp/e74cd2d11c8e4573bc37ec74c517a6c1/assets
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:absl:Running publisher for Transform
INFO:absl:MetadataStore with DB connection initialized
train_uri = os.path.join(
transform.outputs['transformed_examples'].get()[0].uri,
'Split-train'
)
tfrecord_filenames = [
os.path.join(train_uri, name) for name in os.listdir(train_uri)
]
dataset = tf.data.TFRecordDataset(tfrecord_filenames, compression_type='GZIP')
for tfrecord in dataset.take(3):
serialized_example = tfrecord.numpy()
example = tf.train.Example()
example.ParseFromString(serialized_example)
print(example)
features {
feature {
key: "Age"
value {
int64_list {
value: 42
}
}
}
feature {
key: "EstimatedSalary"
value {
int64_list {
value: 54000
}
}
}
feature {
key: "Gender"
value {
int64_list {
value: 0
}
}
}
feature {
key: "Purchased"
value {
int64_list {
value: 0
}
}
}
}
features {
feature {
key: "Age"
value {
int64_list {
value: 35
}
}
}
feature {
key: "EstimatedSalary"
value {
int64_list {
value: 108000
}
}
}
feature {
key: "Gender"
value {
int64_list {
value: 0
}
}
}
feature {
key: "Purchased"
value {
int64_list {
value: 0
}
}
}
}
features {
feature {
key: "Age"
value {
int64_list {
value: 34
}
}
}
feature {
key: "EstimatedSalary"
value {
int64_list {
value: 112000
}
}
}
feature {
key: "Gender"
value {
int64_list {
value: 0
}
}
}
feature {
key: "Purchased"
value {
int64_list {
value: 1
}
}
}
}
%%writefile {TRAINER_MODULE_PATH}
import tensorflow as tf
import tensorflow_decision_forests as tfdf
import tensorflow_transform as tft
from absl import logging
from tensorflow.keras import layers, Model, optimizers, losses, metrics
from tfx import v1 as tfx
from tfx_bsl.public import tfxio
from typing import List, Text
import constants
LABEL = constants.LABEL
BATCH_SIZE = 32
EPOCHS = 50
def _input_fn(
file_pattern: List[Text],
data_accessor: tfx.components.DataAccessor,
tf_transform_output: tft.TFTransformOutput,
batch_size: int,
) -> tf.data.Dataset:
"""
Generates a dataset of features that can be used to train
and evaluate the model.
Args:
file_pattern: List of paths or patterns of input data files.
data_accessor: An instance of DataAccessor that we can use to
convert the input to a RecordBatch.
tf_transform_output: The transformation output.
batch_size: The number of consecutive elements that we should
combine in a single batch.
Returns:
A dataset that contains a tuple of (features, indices) where
features is a dictionary of Tensors, and indices is a single
Tensor of label indices.
"""
dataset = data_accessor.tf_dataset_factory(
file_pattern,
tfxio.TensorFlowDatasetOptions(batch_size=batch_size),
schema=tf_transform_output.raw_metadata.schema,
)
tft_layer = tf_transform_output.transform_features_layer()
def apply_transform(raw_features):
transformed_features = tft_layer(raw_features)
transformed_label = transformed_features.pop(LABEL)
return transformed_features, transformed_label
return dataset.map(apply_transform).repeat()
def _get_serve_tf_examples_fn(model, tf_transform_output):
"""
Returns a function that parses a serialized tf.Example and applies
the transformations during inference.
Args:
model: The model that we are serving.
tf_transform_output: The transformation output that we want to
include with the model.
"""
model.tft_layer = tf_transform_output.transform_features_layer()
@tf.function(input_signature=[tf.TensorSpec(shape=[None], dtype=tf.string, name="examples")])
def serve_tf_examples_fn(serialized_tf_examples):
feature_spec = tf_transform_output.raw_feature_spec()
required_feature_spec = {
k: v for k, v in feature_spec.items() if k != LABEL
}
parsed_features = tf.io.parse_example(
serialized_tf_examples,
required_feature_spec
)
transformed_features = model.tft_layer(parsed_features)
return model(transformed_features)
return serve_tf_examples_fn
def _model() -> tf.keras.Model:
inputs = [
layers.Input(shape=(1,), name="Age"),
layers.Input(shape=(1,), name="EstimatedSalary"),
layers.Input(shape=(1,), name="Gender")
]
x = layers.concatenate(inputs)
x = layers.Dense(8, activation="relu")(x)
x = layers.Dense(8, activation="relu")(x)
outputs = layers.Dense(1, activation="sigmoid")(x)
model = Model(inputs=inputs, outputs=outputs)
model.compile(
optimizer=optimizers.Adam(1e-2),
loss="binary_crossentropy",
metrics=[metrics.BinaryAccuracy()],
)
model.summary(print_fn=logging.info)
return model
def run_fn(fn_args: tfx.components.FnArgs):
"""
The callback function that will be called by the Trainer component
to train the model using the suplied arguments.
Args:
fn_args: A collection of name/value pairs representing the
arguments to train the model.
"""
tf_transform_output = tft.TFTransformOutput(fn_args.transform_output)
train_dataset = _input_fn(
fn_args.train_files,
fn_args.data_accessor,
tf_transform_output,
batch_size=BATCH_SIZE,
)
eval_dataset = _input_fn(
fn_args.eval_files,
fn_args.data_accessor,
tf_transform_output,
batch_size=BATCH_SIZE,
)
model = _model()
model.fit(
train_dataset,
steps_per_epoch=fn_args.train_steps,
validation_data=eval_dataset,
validation_steps=fn_args.eval_steps,
epochs=EPOCHS
)
# We need to modify the default signature to include the transform layer in
# the computational graph.
signatures = {
"serving_default": _get_serve_tf_examples_fn(model, tf_transform_output),
}
model.save(fn_args.serving_model_dir, save_format="tf", signatures=signatures)
Overwriting trainer.py
trainer = tfx.components.Trainer(
examples=example_gen.outputs["examples"],
transform_graph=transform.outputs["transform_graph"],
train_args=tfx.proto.TrainArgs(num_steps=100),
eval_args=tfx.proto.EvalArgs(num_steps=5),
module_file=os.path.abspath(TRAINER_MODULE_PATH),
)
context.run(trainer, enable_cache=False)
INFO:absl:Generating ephemeral wheel package for '/content/trainer.py' (including modules: ['trainer', 'constants', 'transform']).
INFO:absl:User module package has hash fingerprint version e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4.
INFO:absl:Executing: ['/usr/bin/python3', '/tmp/tmprhfjfj2x/_tfx_generated_setup.py', 'bdist_wheel', '--bdist-dir', '/tmp/tmplt0gdefy', '--dist-dir', '/tmp/tmpxdlcub3_']
INFO:absl:Successfully built user code wheel distribution at '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'; target user module is 'trainer'.
INFO:absl:Full user module path is 'trainer@/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'
INFO:absl:Running driver for Trainer
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Trainer
INFO:absl:Train on the 'train' split when train_args.splits is not set.
INFO:absl:Evaluate on the 'eval' split when eval_args.splits is not set.
INFO:absl:udf_utils.get_fn {'train_args': '{\n "num_steps": 100\n}', 'eval_args': '{\n "num_steps": 5\n}', 'module_file': None, 'run_fn': None, 'trainer_fn': None, 'custom_config': 'null', 'module_path': 'trainer@/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'} 'run_fn'
INFO:absl:Installing '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmpzv402fan', '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed '/tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:Training model.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Model: "model_2"
INFO:absl:__________________________________________________________________________________________________
INFO:absl: Layer (type) Output Shape Param # Connected to
INFO:absl:==================================================================================================
INFO:absl: Age (InputLayer) [(None, 1)] 0 []
INFO:absl:
INFO:absl: EstimatedSalary (InputLayer) [(None, 1)] 0 []
INFO:absl:
INFO:absl: Gender (InputLayer) [(None, 1)] 0 []
INFO:absl:
INFO:absl: concatenate_2 (Concatenate) (None, 3) 0 ['Age[0][0]',
INFO:absl: 'EstimatedSalary[0][0]',
INFO:absl: 'Gender[0][0]']
INFO:absl:
INFO:absl: dense_6 (Dense) (None, 8) 32 ['concatenate_2[0][0]']
INFO:absl:
INFO:absl: dense_7 (Dense) (None, 8) 72 ['dense_6[0][0]']
INFO:absl:
INFO:absl: dense_8 (Dense) (None, 1) 9 ['dense_7[0][0]']
INFO:absl:
INFO:absl:==================================================================================================
INFO:absl:Total params: 113
INFO:absl:Trainable params: 113
INFO:absl:Non-trainable params: 0
INFO:absl:__________________________________________________________________________________________________
Epoch 1/50
100/100 [==============================] - 1s 6ms/step - loss: 267.8823 - binary_accuracy: 0.5066 - val_loss: 106.0862 - val_binary_accuracy: 0.7000
Epoch 2/50
100/100 [==============================] - 0s 4ms/step - loss: 83.4841 - binary_accuracy: 0.5178 - val_loss: 73.6792 - val_binary_accuracy: 0.3000
Epoch 3/50
100/100 [==============================] - 0s 5ms/step - loss: 119.2248 - binary_accuracy: 0.4991 - val_loss: 143.3806 - val_binary_accuracy: 0.7250
Epoch 4/50
100/100 [==============================] - 0s 5ms/step - loss: 110.0067 - binary_accuracy: 0.5119 - val_loss: 570.8691 - val_binary_accuracy: 0.3000
Epoch 5/50
100/100 [==============================] - 0s 5ms/step - loss: 152.6463 - binary_accuracy: 0.4953 - val_loss: 71.6391 - val_binary_accuracy: 0.2875
Epoch 6/50
100/100 [==============================] - 0s 5ms/step - loss: 93.2976 - binary_accuracy: 0.5069 - val_loss: 49.7022 - val_binary_accuracy: 0.7000
Epoch 7/50
100/100 [==============================] - 0s 5ms/step - loss: 85.2265 - binary_accuracy: 0.4956 - val_loss: 181.0211 - val_binary_accuracy: 0.7125
Epoch 8/50
100/100 [==============================] - 0s 5ms/step - loss: 105.3722 - binary_accuracy: 0.5056 - val_loss: 163.2860 - val_binary_accuracy: 0.7000
Epoch 9/50
100/100 [==============================] - 0s 5ms/step - loss: 85.2704 - binary_accuracy: 0.5069 - val_loss: 84.9767 - val_binary_accuracy: 0.6812
Epoch 10/50
100/100 [==============================] - 0s 4ms/step - loss: 55.2350 - binary_accuracy: 0.5141 - val_loss: 96.7759 - val_binary_accuracy: 0.3125
Epoch 11/50
100/100 [==============================] - 1s 6ms/step - loss: 61.3633 - binary_accuracy: 0.5059 - val_loss: 2.9306 - val_binary_accuracy: 0.6938
Epoch 12/50
100/100 [==============================] - 0s 4ms/step - loss: 66.3227 - binary_accuracy: 0.4978 - val_loss: 24.4477 - val_binary_accuracy: 0.7125
Epoch 13/50
100/100 [==============================] - 0s 5ms/step - loss: 70.9514 - binary_accuracy: 0.5066 - val_loss: 67.2346 - val_binary_accuracy: 0.7000
Epoch 14/50
100/100 [==============================] - 0s 5ms/step - loss: 85.1895 - binary_accuracy: 0.5075 - val_loss: 13.2118 - val_binary_accuracy: 0.2875
Epoch 15/50
100/100 [==============================] - 0s 5ms/step - loss: 65.0425 - binary_accuracy: 0.5166 - val_loss: 66.6009 - val_binary_accuracy: 0.6938
Epoch 16/50
100/100 [==============================] - 1s 5ms/step - loss: 46.1735 - binary_accuracy: 0.5063 - val_loss: 23.2638 - val_binary_accuracy: 0.3000
Epoch 17/50
100/100 [==============================] - 1s 5ms/step - loss: 52.2928 - binary_accuracy: 0.5078 - val_loss: 13.3263 - val_binary_accuracy: 0.6875
Epoch 18/50
100/100 [==============================] - 0s 5ms/step - loss: 54.0791 - binary_accuracy: 0.4981 - val_loss: 1.1034 - val_binary_accuracy: 0.6875
Epoch 19/50
100/100 [==============================] - 0s 5ms/step - loss: 30.9694 - binary_accuracy: 0.5116 - val_loss: 41.8447 - val_binary_accuracy: 0.3000
Epoch 20/50
100/100 [==============================] - 0s 5ms/step - loss: 33.5929 - binary_accuracy: 0.5031 - val_loss: 34.4179 - val_binary_accuracy: 0.3125
Epoch 21/50
100/100 [==============================] - 0s 4ms/step - loss: 36.5009 - binary_accuracy: 0.5022 - val_loss: 38.2492 - val_binary_accuracy: 0.6938
Epoch 22/50
100/100 [==============================] - 0s 4ms/step - loss: 32.6677 - binary_accuracy: 0.5175 - val_loss: 56.9790 - val_binary_accuracy: 0.3125
Epoch 23/50
100/100 [==============================] - 0s 5ms/step - loss: 19.0544 - binary_accuracy: 0.5259 - val_loss: 13.7187 - val_binary_accuracy: 0.7188
Epoch 24/50
100/100 [==============================] - 0s 5ms/step - loss: 37.1442 - binary_accuracy: 0.5013 - val_loss: 26.5512 - val_binary_accuracy: 0.6875
Epoch 25/50
100/100 [==============================] - 0s 4ms/step - loss: 18.1173 - binary_accuracy: 0.5269 - val_loss: 5.0738 - val_binary_accuracy: 0.6875
Epoch 26/50
100/100 [==============================] - 0s 5ms/step - loss: 16.7445 - binary_accuracy: 0.5153 - val_loss: 6.9730 - val_binary_accuracy: 0.7250
Epoch 27/50
100/100 [==============================] - 0s 5ms/step - loss: 13.5267 - binary_accuracy: 0.5181 - val_loss: 19.1609 - val_binary_accuracy: 0.7063
Epoch 28/50
100/100 [==============================] - 0s 5ms/step - loss: 21.3662 - binary_accuracy: 0.5297 - val_loss: 6.4211 - val_binary_accuracy: 0.7000
Epoch 29/50
100/100 [==============================] - 1s 5ms/step - loss: 10.2222 - binary_accuracy: 0.5469 - val_loss: 15.4098 - val_binary_accuracy: 0.3125
Epoch 30/50
100/100 [==============================] - 1s 5ms/step - loss: 8.7514 - binary_accuracy: 0.5263 - val_loss: 60.3655 - val_binary_accuracy: 0.3000
Epoch 31/50
100/100 [==============================] - 1s 6ms/step - loss: 23.4979 - binary_accuracy: 0.4919 - val_loss: 9.6138 - val_binary_accuracy: 0.6875
Epoch 32/50
100/100 [==============================] - 0s 4ms/step - loss: 19.6520 - binary_accuracy: 0.4922 - val_loss: 6.4057 - val_binary_accuracy: 0.4000
Epoch 33/50
100/100 [==============================] - 1s 5ms/step - loss: 9.0125 - binary_accuracy: 0.5341 - val_loss: 7.4587 - val_binary_accuracy: 0.3688
Epoch 34/50
100/100 [==============================] - 0s 5ms/step - loss: 8.9847 - binary_accuracy: 0.5222 - val_loss: 0.8087 - val_binary_accuracy: 0.7875
Epoch 35/50
100/100 [==============================] - 0s 5ms/step - loss: 9.5232 - binary_accuracy: 0.5150 - val_loss: 7.6546 - val_binary_accuracy: 0.6938
Epoch 36/50
100/100 [==============================] - 0s 5ms/step - loss: 8.6998 - binary_accuracy: 0.5312 - val_loss: 6.2835 - val_binary_accuracy: 0.6812
Epoch 37/50
100/100 [==============================] - 0s 5ms/step - loss: 3.5878 - binary_accuracy: 0.5622 - val_loss: 2.7051 - val_binary_accuracy: 0.6875
Epoch 38/50
100/100 [==============================] - 0s 5ms/step - loss: 7.6853 - binary_accuracy: 0.5466 - val_loss: 2.3886 - val_binary_accuracy: 0.6875
Epoch 39/50
100/100 [==============================] - 0s 5ms/step - loss: 7.1228 - binary_accuracy: 0.5409 - val_loss: 11.8322 - val_binary_accuracy: 0.3375
Epoch 40/50
100/100 [==============================] - 0s 5ms/step - loss: 4.2872 - binary_accuracy: 0.5406 - val_loss: 1.0799 - val_binary_accuracy: 0.6938
Epoch 41/50
100/100 [==============================] - 0s 4ms/step - loss: 2.9106 - binary_accuracy: 0.5756 - val_loss: 4.5029 - val_binary_accuracy: 0.7000
Epoch 42/50
100/100 [==============================] - 1s 5ms/step - loss: 4.5573 - binary_accuracy: 0.5541 - val_loss: 3.3100 - val_binary_accuracy: 0.7000
Epoch 43/50
100/100 [==============================] - 0s 5ms/step - loss: 2.7604 - binary_accuracy: 0.5603 - val_loss: 2.1735 - val_binary_accuracy: 0.6938
Epoch 44/50
100/100 [==============================] - 0s 5ms/step - loss: 3.8381 - binary_accuracy: 0.5866 - val_loss: 10.3195 - val_binary_accuracy: 0.3375
Epoch 45/50
100/100 [==============================] - 0s 4ms/step - loss: 2.7016 - binary_accuracy: 0.5797 - val_loss: 2.6194 - val_binary_accuracy: 0.3625
Epoch 46/50
100/100 [==============================] - 0s 5ms/step - loss: 2.4798 - binary_accuracy: 0.5925 - val_loss: 0.9044 - val_binary_accuracy: 0.7250
Epoch 47/50
100/100 [==============================] - 0s 5ms/step - loss: 2.5936 - binary_accuracy: 0.5697 - val_loss: 1.7987 - val_binary_accuracy: 0.7000
Epoch 48/50
100/100 [==============================] - 0s 5ms/step - loss: 3.1322 - binary_accuracy: 0.5597 - val_loss: 0.6785 - val_binary_accuracy: 0.7563
Epoch 49/50
100/100 [==============================] - 0s 5ms/step - loss: 1.2377 - binary_accuracy: 0.6534 - val_loss: 1.9626 - val_binary_accuracy: 0.6625
Epoch 50/50
100/100 [==============================] - 0s 4ms/step - loss: 1.2679 - binary_accuracy: 0.6428 - val_loss: 0.7345 - val_binary_accuracy: 0.7688
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Trainer/model/7/Format-Serving/assets
INFO:tensorflow:Assets written to: /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Trainer/model/7/Format-Serving/assets
INFO:absl:Training complete. Model written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Trainer/model/7/Format-Serving. ModelRun written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Trainer/model_run/7
INFO:absl:Running publisher for Trainer
INFO:absl:MetadataStore with DB connection initialized
eval_config = tfma.EvalConfig(
model_specs=[
tfma.ModelSpec(
signature_name="serving_default",
preprocessing_function_names=['tft_layer'],
label_key="Purchased",
)
],
metrics_specs = [
tfma.MetricsSpec(
per_slice_thresholds={
"binary_accuracy": tfma.PerSliceMetricThresholds(
thresholds=[
tfma.PerSliceMetricThreshold(
slicing_specs=[tfma.SlicingSpec()],
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={"value":0.7}
),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={"value": -1e-10},
),
),
)
]
),
}
)
],
slicing_specs=[
tfma.SlicingSpec(),
tfma.SlicingSpec(feature_keys=["Gender"])
]
)
model_resolver = tfx.dsl.Resolver(
strategy_class=tfx.dsl.experimental.LatestBlessedModelStrategy,
model=tfx.dsl.Channel(type=tfx.types.standard_artifacts.Model),
model_blessings=tfx.dsl.Channel(type=tfx.types.standard_artifacts.ModelBlessing),
).with_id("latest_blessed_model_resolver")
context.run(model_resolver)
INFO:absl:Running driver for latest_blessed_model_resolver
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running publisher for latest_blessed_model_resolver
INFO:absl:MetadataStore with DB connection initialized
evaluator = tfx.components.Evaluator(
examples=example_gen.outputs["examples"],
model=trainer.outputs["model"],
eval_config=eval_config,
baseline_model=model_resolver.outputs["model"],
)
context.run(evaluator, enable_cache=False)
INFO:absl:Running driver for Evaluator
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Evaluator
INFO:absl:Nonempty beam arg extra_packages already includes dependency
INFO:absl:udf_utils.get_fn {'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Purchased",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "Gender"\n ]\n }\n ]\n}', 'feature_slicing_spec': None, 'fairness_indicator_thresholds': 'null', 'example_splits': 'null', 'module_file': None, 'module_path': None} 'custom_eval_shared_model'
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Using /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Trainer/model/7/Format-Serving as model.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5a653150> and <keras.engine.input_layer.InputLayer object at 0x7f3d5ce51410>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5a653150> and <keras.engine.input_layer.InputLayer object at 0x7f3d5ce51410>).
INFO:absl:The 'example_splits' parameter is not set, using 'eval' split.
INFO:absl:Evaluating model.
INFO:absl:udf_utils.get_fn {'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Purchased",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "Gender"\n ]\n }\n ]\n}', 'feature_slicing_spec': None, 'fairness_indicator_thresholds': 'null', 'example_splits': 'null', 'module_file': None, 'module_path': None} 'custom_extractors'
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5cc6dc90> and <keras.engine.input_layer.InputLayer object at 0x7f3d5cc5f650>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5cc6dc90> and <keras.engine.input_layer.InputLayer object at 0x7f3d5cc5f650>).
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5bb1da10> and <keras.engine.input_layer.InputLayer object at 0x7f3d5bb3e890>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5bb1da10> and <keras.engine.input_layer.InputLayer object at 0x7f3d5bb3e890>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d593f5890> and <keras.engine.input_layer.InputLayer object at 0x7f3d593dfb10>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d593f5890> and <keras.engine.input_layer.InputLayer object at 0x7f3d593dfb10>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d591adc10> and <keras.engine.input_layer.InputLayer object at 0x7f3d5919c490>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d591adc10> and <keras.engine.input_layer.InputLayer object at 0x7f3d5919c490>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5a692f10> and <keras.engine.input_layer.InputLayer object at 0x7f3e50cca0d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5a692f10> and <keras.engine.input_layer.InputLayer object at 0x7f3e50cca0d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3e50ce0450> and <keras.engine.input_layer.InputLayer object at 0x7f3dd071fd10>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3e50ce0450> and <keras.engine.input_layer.InputLayer object at 0x7f3dd071fd10>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d58a1bd10> and <keras.engine.input_layer.InputLayer object at 0x7f3d58a2eb50>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d58a1bd10> and <keras.engine.input_layer.InputLayer object at 0x7f3d58a2eb50>).
INFO:absl:Evaluation complete. Results written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Evaluator/evaluation/9.
INFO:absl:Checking validation results.
INFO:absl:Blessing result True written to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Evaluator/blessing/9.
INFO:absl:Running publisher for Evaluator
INFO:absl:MetadataStore with DB connection initialized
pusher = tfx.components.Pusher(
model=trainer.outputs["model"],
model_blessing=evaluator.outputs["blessing"],
push_destination=tfx.proto.PushDestination(
filesystem=tfx.proto.PushDestination.Filesystem(
base_directory=str(MODEL_DIRECTORY)
)
),
)
context.run(pusher)
INFO:absl:Running driver for Pusher
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Running executor for Pusher
INFO:absl:Model version: 1638251875
INFO:absl:Model written to serving path model/1638251875.
INFO:absl:Model pushed to /tmp/tfx-interactive-2021-11-30T05_56_03.974969-yzfsa_v5/Pusher/pushed_model/10.
INFO:absl:Running publisher for Pusher
INFO:absl:MetadataStore with DB connection initialized
def get_inference_fn(model_directory):
model_directories = (d for d in os.scandir(model_directory) if d.is_dir())
model_path = max(model_directories, key=lambda i: int(i.name)).path
loaded_model = tf.keras.models.load_model(model_path)
return loaded_model.signatures["serving_default"]
inference_fn = get_inference_fn(MODEL_DIRECTORY)
result = inference_fn(examples=tf.constant(_examples(test_df)))
print(result["output_0"].numpy())
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d589a8790> and <keras.engine.input_layer.InputLayer object at 0x7f3dd0451110>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d589a8790> and <keras.engine.input_layer.InputLayer object at 0x7f3dd0451110>).
[[0.03220685]
[0.46029472]
[0.5184892 ]
[0.17297481]
[0.03745157]
[0.67159355]
[0.05825204]
[0.08253836]
[0.03109209]
[0.14211378]
[0.01026638]
[0.54865664]
[0.00307088]
[0.11654628]
[0.0051414 ]
[0.04682363]
[0.11298493]
[0.6830877 ]
[0.02954738]
[0.19606875]
[0.04218721]
[0.00264895]
[0.26473102]
[0.05261479]
[0.00801955]
[0.84577835]
[0.09096617]
[0.00268115]
[0.00590247]
[0.14293896]
[0.00727324]
[0.00475701]
[0.14112698]
[0.19416368]
[0.11682624]
[0.0055347 ]
[0.08142502]
[0.02018276]
[0.04202535]
[0.04229586]
[0.4267416 ]
[0.07690777]
[0.01121056]
[0.02657227]
[0.00833632]
[0.03555671]
[0.23300353]
[0.00363223]
[0.03032882]
[0.1238111 ]
[0.01567248]
[0.01212719]
[0.16708386]
[0.01242075]
[0.20534608]
[0.04540218]
[0.23566048]
[0.08876477]
[0.14211378]
[0.13409239]
[0.46330968]
[0.00233451]
[0.53326845]
[0.04616746]
[0.08898594]
[0.02742235]
[0.00609537]
[0.20777936]
[0.08233638]
[0.02595322]
[0.10678811]
[0.0370178 ]
[0.11696012]
[0.18243128]
[0.04616746]
[0.00647432]
[0.6141573 ]
[0.06803803]
[0.05607099]
[0.9046775 ]
[0.06005136]
[0.11475087]
[0.01169908]
[0.0147258 ]
[0.01368825]
[0.09752264]
[0.04100535]
[0.0265025 ]
[0.03745157]
[0.11097376]
[0.10873419]
[0.02320905]
[0.6167141 ]
[0.5526473 ]
[0.08975026]
[0.06924812]
[0.08481439]
[0.0838787 ]
[0.47606975]
[0.10064986]
[0.24427153]
[0.01598805]
[0.1486468 ]
[0.11991532]
[0.04063642]
[0.02639965]
[0.04143259]
[0.01411222]
[0.01552854]
[0.10614578]
[0.00433616]
[0.0082146 ]
[0.89032626]
[0.15595527]
[0.05614655]
[0.02269477]
[0.18180625]
[0.04373924]
[0.34885147]
[0.00666774]]
%%writefile {SCHEMA_FILE_NAME}
feature {
name: "Gender"
type: BYTES
domain: "Gender"
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Age"
type: INT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "EstimatedSalary"
type: INT
presence {
min_fraction: 1.0
min_count: 1
}
shape {
dim {
size: 1
}
}
}
feature {
name: "Purchased"
type: INT
bool_domain {
}
presence {
min_fraction: 1.0
min_count: 1
}
not_in_environment: "SERVING"
shape {
dim {
size: 1
}
}
}
string_domain {
name: "Gender"
value: "Female"
value: "Male"
}
default_environment: "TRAINING"
default_environment: "SERVING"
Overwriting pipelines/sample-pipeline/schema/schema.pbtxt
import tensorflow_model_analysis as tfma
def create_pipeline(
pipeline_name: str,
pipeline_directory: str,
data_directory: str,
schema_path: str,
model_directory: str,
metadata_path: str,
transform_module_path: str,
trainer_module_path: str,
)-> tfx.dsl.Pipeline:
output = tfx.proto.Output(
split_config=example_gen_pb2.SplitConfig(splits=[
tfx.proto.SplitConfig.Split(name="train", hash_buckets=3),
tfx.proto.SplitConfig.Split(name="eval", hash_buckets=1)
]))
example_gen = tfx.components.CsvExampleGen(input_base=SAVED_DATA, output_config=output)
statistics_gen = tfx.components.StatisticsGen(
examples=example_gen.outputs['examples']
)
schema_importer = tfx.dsl.Importer(
source_uri=str(SCHEMA_DIRECTORY),
artifact_type=tfx.types.standard_artifacts.Schema
).with_id("schema_importer")
example_validator = tfx.components.ExampleValidator(
statistics=statistics_gen.outputs['statistics'],
schema=schema_importer.outputs['result']
)
transform = tfx.components.Transform(
examples=example_gen.outputs['examples'],
schema=schema_importer.outputs['result'],
module_file=os.path.abspath(TRANSFORM_MODULE_PATH),
)
trainer = tfx.components.Trainer(
examples=example_gen.outputs["examples"],
transform_graph=transform.outputs["transform_graph"],
train_args=tfx.proto.TrainArgs(num_steps=100),
eval_args=tfx.proto.EvalArgs(num_steps=5),
module_file=os.path.abspath(TRAINER_MODULE_PATH),
)
eval_config = tfma.EvalConfig(
model_specs=[
tfma.ModelSpec(
signature_name="serving_default",
preprocessing_function_names=['tft_layer'],
label_key="Purchased",
)
],
metrics_specs = [
tfma.MetricsSpec(
per_slice_thresholds={
"binary_accuracy": tfma.PerSliceMetricThresholds(
thresholds=[
tfma.PerSliceMetricThreshold(
slicing_specs=[tfma.SlicingSpec()],
threshold=tfma.MetricThreshold(
value_threshold=tfma.GenericValueThreshold(
lower_bound={"value":0.7}
),
change_threshold=tfma.GenericChangeThreshold(
direction=tfma.MetricDirection.HIGHER_IS_BETTER,
absolute={"value": -1e-10},
),
),
)
]
),
}
)
],
slicing_specs=[
tfma.SlicingSpec(),
tfma.SlicingSpec(feature_keys=["Gender"])
]
)
model_resolver = tfx.dsl.Resolver(
strategy_class=tfx.dsl.experimental.LatestBlessedModelStrategy,
model=tfx.dsl.Channel(type=tfx.types.standard_artifacts.Model),
model_blessings=tfx.dsl.Channel(type=tfx.types.standard_artifacts.ModelBlessing),
).with_id("latest_blessed_model_resolver")
evaluator = tfx.components.Evaluator(
examples=example_gen.outputs["examples"],
model=trainer.outputs["model"],
eval_config=eval_config,
baseline_model=model_resolver.outputs["model"],
)
pusher = tfx.components.Pusher(
model=trainer.outputs["model"],
model_blessing=evaluator.outputs["blessing"],
push_destination=tfx.proto.PushDestination(
filesystem=tfx.proto.PushDestination.Filesystem(
base_directory=str(MODEL_DIRECTORY)
)
),
)
components = [
example_gen,
statistics_gen,
schema_importer,
example_validator,
transform,
trainer,
model_resolver,
evaluator,
pusher
]
return tfx.dsl.Pipeline(
pipeline_name=pipeline_name,
pipeline_root=pipeline_directory,
metadata_connection_config=tfx.orchestration.metadata.sqlite_metadata_connection_config(
metadata_path
),
components=components,
)
tfx.orchestration.LocalDagRunner().run(
create_pipeline(
pipeline_name=PIPELINE_NAME,
pipeline_directory=str(PIPELINE_DIRECTORY),
data_directory=DATA_DIRECTORY,
schema_path=str(SCHEMA_DIRECTORY),
model_directory=str(MODEL_DIRECTORY),
metadata_path=str(METADATA_PATH),
transform_module_path=TRANSFORM_MODULE_PATH,
trainer_module_path=TRAINER_MODULE_PATH
)
)
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Excluding no splits because exclude_splits is not set.
INFO:absl:Generating ephemeral wheel package for '/content/transform.py' (including modules: ['trainer', 'constants', 'transform']).
INFO:absl:User module package has hash fingerprint version e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4.
INFO:absl:Executing: ['/usr/bin/python3', '/tmp/tmpq1110qux/_tfx_generated_setup.py', 'bdist_wheel', '--bdist-dir', '/tmp/tmpz2s0jkly', '--dist-dir', '/tmp/tmpliy4sev0']
INFO:absl:Successfully built user code wheel distribution at 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'; target user module is 'transform'.
INFO:absl:Full user module path is 'transform@pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'
INFO:absl:Generating ephemeral wheel package for '/content/trainer.py' (including modules: ['trainer', 'constants', 'transform']).
INFO:absl:User module package has hash fingerprint version e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4.
INFO:absl:Executing: ['/usr/bin/python3', '/tmp/tmp7vw_jsp4/_tfx_generated_setup.py', 'bdist_wheel', '--bdist-dir', '/tmp/tmpxxavxwzn', '--dist-dir', '/tmp/tmpaa8_06we']
INFO:absl:Successfully built user code wheel distribution at 'pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'; target user module is 'trainer'.
INFO:absl:Full user module path is 'trainer@pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'
INFO:absl:Using deployment config:
executor_specs {
key: "CsvExampleGen"
value {
beam_executable_spec {
python_executor_spec {
class_path: "tfx.components.example_gen.csv_example_gen.executor.Executor"
}
}
}
}
executor_specs {
key: "Evaluator"
value {
beam_executable_spec {
python_executor_spec {
class_path: "tfx.components.evaluator.executor.Executor"
}
}
}
}
executor_specs {
key: "ExampleValidator"
value {
python_class_executable_spec {
class_path: "tfx.components.example_validator.executor.Executor"
}
}
}
executor_specs {
key: "Pusher"
value {
python_class_executable_spec {
class_path: "tfx.components.pusher.executor.Executor"
}
}
}
executor_specs {
key: "StatisticsGen"
value {
beam_executable_spec {
python_executor_spec {
class_path: "tfx.components.statistics_gen.executor.Executor"
}
}
}
}
executor_specs {
key: "Trainer"
value {
python_class_executable_spec {
class_path: "tfx.components.trainer.executor.GenericExecutor"
}
}
}
executor_specs {
key: "Transform"
value {
beam_executable_spec {
python_executor_spec {
class_path: "tfx.components.transform.executor.Executor"
}
}
}
}
custom_driver_specs {
key: "CsvExampleGen"
value {
python_class_executable_spec {
class_path: "tfx.components.example_gen.driver.FileBasedDriver"
}
}
}
metadata_connection_config {
sqlite {
filename_uri: "metadata/sample-pipeline/metadata.db"
connection_mode: READWRITE_OPENCREATE
}
}
INFO:absl:Using connection config:
sqlite {
filename_uri: "metadata/sample-pipeline/metadata.db"
connection_mode: READWRITE_OPENCREATE
}
INFO:absl:Component CsvExampleGen is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.example_gen.csv_example_gen.component.CsvExampleGen"
}
id: "CsvExampleGen"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
}
outputs {
outputs {
key: "examples"
value {
artifact_spec {
type {
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
}
}
}
}
}
parameters {
parameters {
key: "input_base"
value {
field_value {
string_value: "saved_data"
}
}
}
parameters {
key: "input_config"
value {
field_value {
string_value: "{\n \"splits\": [\n {\n \"name\": \"single_split\",\n \"pattern\": \"*\"\n }\n ]\n}"
}
}
}
parameters {
key: "output_config"
value {
field_value {
string_value: "{\n \"split_config\": {\n \"splits\": [\n {\n \"hash_buckets\": 3,\n \"name\": \"train\"\n },\n {\n \"hash_buckets\": 1,\n \"name\": \"eval\"\n }\n ]\n }\n}"
}
}
}
parameters {
key: "output_data_format"
value {
field_value {
int_value: 6
}
}
}
parameters {
key: "output_file_format"
value {
field_value {
int_value: 5
}
}
}
}
downstream_nodes: "Evaluator"
downstream_nodes: "StatisticsGen"
downstream_nodes: "Trainer"
downstream_nodes: "Transform"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:select span and version = (0, None)
INFO:absl:latest span and version = (0, None)
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 1
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=1, input_dict={}, output_dict=defaultdict(<class 'list'>, {'examples': [Artifact(artifact: uri: "pipelines/sample-pipeline/CsvExampleGen/examples/1"
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
, artifact_type: name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)]}), exec_properties={'output_config': '{\n "split_config": {\n "splits": [\n {\n "hash_buckets": 3,\n "name": "train"\n },\n {\n "hash_buckets": 1,\n "name": "eval"\n }\n ]\n }\n}', 'output_file_format': 5, 'output_data_format': 6, 'input_base': 'saved_data', 'input_config': '{\n "splits": [\n {\n "name": "single_split",\n "pattern": "*"\n }\n ]\n}', 'span': 0, 'version': None, 'input_fingerprint': 'split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522'}, execution_output_uri='pipelines/sample-pipeline/CsvExampleGen/.system/executor_execution/1/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/CsvExampleGen/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/CsvExampleGen/.system/executor_execution/1/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.example_gen.csv_example_gen.component.CsvExampleGen"
}
id: "CsvExampleGen"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
}
outputs {
outputs {
key: "examples"
value {
artifact_spec {
type {
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
}
}
}
}
}
parameters {
parameters {
key: "input_base"
value {
field_value {
string_value: "saved_data"
}
}
}
parameters {
key: "input_config"
value {
field_value {
string_value: "{\n \"splits\": [\n {\n \"name\": \"single_split\",\n \"pattern\": \"*\"\n }\n ]\n}"
}
}
}
parameters {
key: "output_config"
value {
field_value {
string_value: "{\n \"split_config\": {\n \"splits\": [\n {\n \"hash_buckets\": 3,\n \"name\": \"train\"\n },\n {\n \"hash_buckets\": 1,\n \"name\": \"eval\"\n }\n ]\n }\n}"
}
}
}
parameters {
key: "output_data_format"
value {
field_value {
int_value: 6
}
}
}
parameters {
key: "output_file_format"
value {
field_value {
int_value: 5
}
}
}
}
downstream_nodes: "Evaluator"
downstream_nodes: "StatisticsGen"
downstream_nodes: "Trainer"
downstream_nodes: "Transform"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:Generating examples.
INFO:absl:Processing input csv data saved_data/* to TFExample.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
INFO:absl:Examples generated.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 1 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'examples': [Artifact(artifact: uri: "pipelines/sample-pipeline/CsvExampleGen/examples/1"
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)]}) for execution 1
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component CsvExampleGen is finished.
INFO:absl:Component latest_blessed_model_resolver is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.dsl.components.common.resolver.Resolver"
}
id: "latest_blessed_model_resolver"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.latest_blessed_model_resolver"
}
}
}
}
inputs {
inputs {
key: "model"
value {
channels {
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
artifact_query {
type {
name: "Model"
}
}
}
}
}
inputs {
key: "model_blessings"
value {
channels {
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
artifact_query {
type {
name: "ModelBlessing"
}
}
}
}
}
resolver_config {
resolver_steps {
class_path: "tfx.dsl.input_resolution.strategies.latest_blessed_model_strategy.LatestBlessedModelStrategy"
config_json: "{}"
input_keys: "model"
input_keys: "model_blessings"
}
}
}
downstream_nodes: "Evaluator"
execution_options {
caching_options {
}
}
INFO:absl:Running as an resolver node.
INFO:absl:MetadataStore with DB connection initialized
WARNING:absl:Artifact type Model is not found in MLMD.
WARNING:absl:Artifact type ModelBlessing is not found in MLMD.
INFO:absl:Component latest_blessed_model_resolver is finished.
INFO:absl:Component schema_importer is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.dsl.components.common.importer.Importer"
}
id: "schema_importer"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.schema_importer"
}
}
}
}
outputs {
outputs {
key: "result"
value {
artifact_spec {
type {
name: "Schema"
}
}
}
}
}
parameters {
parameters {
key: "artifact_uri"
value {
field_value {
string_value: "pipelines/sample-pipeline/schema"
}
}
}
parameters {
key: "reimport"
value {
field_value {
int_value: 0
}
}
}
}
downstream_nodes: "ExampleValidator"
downstream_nodes: "Transform"
execution_options {
caching_options {
}
}
INFO:absl:Running as an importer node.
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Processing source uri: pipelines/sample-pipeline/schema, properties: {}, custom_properties: {}
INFO:absl:Component schema_importer is finished.
INFO:absl:Component StatisticsGen is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.statistics_gen.component.StatisticsGen"
}
id: "StatisticsGen"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.StatisticsGen"
}
}
}
}
inputs {
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
}
outputs {
outputs {
key: "statistics"
value {
artifact_spec {
type {
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
}
parameters {
parameters {
key: "exclude_splits"
value {
field_value {
string_value: "[]"
}
}
}
}
upstream_nodes: "CsvExampleGen"
downstream_nodes: "ExampleValidator"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 4
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=4, input_dict={'examples': [Artifact(artifact: id: 1
type_id: 15
uri: "pipelines/sample-pipeline/CsvExampleGen/examples/1"
properties {
key: "split_names"
value {
string_value: "[\"train\", \"eval\"]"
}
}
custom_properties {
key: "file_format"
value {
string_value: "tfrecords_gzip"
}
}
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "payload_format"
value {
string_value: "FORMAT_TF_EXAMPLE"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252955751
last_update_time_since_epoch: 1638252955751
, artifact_type: id: 15
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)]}, output_dict=defaultdict(<class 'list'>, {'statistics': [Artifact(artifact: uri: "pipelines/sample-pipeline/StatisticsGen/statistics/4"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:StatisticsGen:statistics:0"
}
}
, artifact_type: name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}), exec_properties={'exclude_splits': '[]'}, execution_output_uri='pipelines/sample-pipeline/StatisticsGen/.system/executor_execution/4/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/StatisticsGen/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/StatisticsGen/.system/executor_execution/4/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.statistics_gen.component.StatisticsGen"
}
id: "StatisticsGen"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.StatisticsGen"
}
}
}
}
inputs {
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
}
outputs {
outputs {
key: "statistics"
value {
artifact_spec {
type {
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
}
parameters {
parameters {
key: "exclude_splits"
value {
field_value {
string_value: "[]"
}
}
}
}
upstream_nodes: "CsvExampleGen"
downstream_nodes: "ExampleValidator"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:Generating statistics for split train.
INFO:absl:Statistics for split train written to pipelines/sample-pipeline/StatisticsGen/statistics/4/Split-train.
INFO:absl:Generating statistics for split eval.
INFO:absl:Statistics for split eval written to pipelines/sample-pipeline/StatisticsGen/statistics/4/Split-eval.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 4 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'statistics': [Artifact(artifact: uri: "pipelines/sample-pipeline/StatisticsGen/statistics/4"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:StatisticsGen:statistics:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}) for execution 4
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component StatisticsGen is finished.
INFO:absl:Component Transform is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.transform.component.Transform"
}
id: "Transform"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Transform"
}
}
}
}
inputs {
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
inputs {
key: "schema"
value {
channels {
producer_node_query {
id: "schema_importer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.schema_importer"
}
}
}
artifact_query {
type {
name: "Schema"
}
}
output_key: "result"
}
min_count: 1
}
}
}
outputs {
outputs {
key: "post_transform_anomalies"
value {
artifact_spec {
type {
name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
outputs {
key: "post_transform_schema"
value {
artifact_spec {
type {
name: "Schema"
}
}
}
}
outputs {
key: "post_transform_stats"
value {
artifact_spec {
type {
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
outputs {
key: "pre_transform_schema"
value {
artifact_spec {
type {
name: "Schema"
}
}
}
}
outputs {
key: "pre_transform_stats"
value {
artifact_spec {
type {
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
outputs {
key: "transform_graph"
value {
artifact_spec {
type {
name: "TransformGraph"
}
}
}
}
outputs {
key: "transformed_examples"
value {
artifact_spec {
type {
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
}
}
}
}
outputs {
key: "updated_analyzer_cache"
value {
artifact_spec {
type {
name: "TransformCache"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "disable_statistics"
value {
field_value {
int_value: 0
}
}
}
parameters {
key: "force_tf_compat_v1"
value {
field_value {
int_value: 0
}
}
}
parameters {
key: "module_path"
value {
field_value {
string_value: "transform@pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "schema_importer"
downstream_nodes: "Trainer"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 5
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=5, input_dict={'schema': [Artifact(artifact: id: 2
type_id: 18
uri: "pipelines/sample-pipeline/schema"
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252955888
last_update_time_since_epoch: 1638252955888
, artifact_type: id: 18
name: "Schema"
)], 'examples': [Artifact(artifact: id: 1
type_id: 15
uri: "pipelines/sample-pipeline/CsvExampleGen/examples/1"
properties {
key: "split_names"
value {
string_value: "[\"train\", \"eval\"]"
}
}
custom_properties {
key: "file_format"
value {
string_value: "tfrecords_gzip"
}
}
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "payload_format"
value {
string_value: "FORMAT_TF_EXAMPLE"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252955751
last_update_time_since_epoch: 1638252955751
, artifact_type: id: 15
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)]}, output_dict=defaultdict(<class 'list'>, {'transformed_examples': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/transformed_examples/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:transformed_examples:0"
}
}
, artifact_type: name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)], 'updated_analyzer_cache': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/updated_analyzer_cache/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:updated_analyzer_cache:0"
}
}
, artifact_type: name: "TransformCache"
)], 'transform_graph': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/transform_graph/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:transform_graph:0"
}
}
, artifact_type: name: "TransformGraph"
)], 'post_transform_anomalies': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/post_transform_anomalies/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:post_transform_anomalies:0"
}
}
, artifact_type: name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)], 'pre_transform_schema': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/pre_transform_schema/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:pre_transform_schema:0"
}
}
, artifact_type: name: "Schema"
)], 'post_transform_schema': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/post_transform_schema/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:post_transform_schema:0"
}
}
, artifact_type: name: "Schema"
)], 'pre_transform_stats': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/pre_transform_stats/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:pre_transform_stats:0"
}
}
, artifact_type: name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)], 'post_transform_stats': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/post_transform_stats/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:post_transform_stats:0"
}
}
, artifact_type: name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}), exec_properties={'disable_statistics': 0, 'force_tf_compat_v1': 0, 'custom_config': 'null', 'module_path': 'transform@pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'}, execution_output_uri='pipelines/sample-pipeline/Transform/.system/executor_execution/5/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/Transform/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/Transform/.system/executor_execution/5/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.transform.component.Transform"
}
id: "Transform"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Transform"
}
}
}
}
inputs {
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
inputs {
key: "schema"
value {
channels {
producer_node_query {
id: "schema_importer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.schema_importer"
}
}
}
artifact_query {
type {
name: "Schema"
}
}
output_key: "result"
}
min_count: 1
}
}
}
outputs {
outputs {
key: "post_transform_anomalies"
value {
artifact_spec {
type {
name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
outputs {
key: "post_transform_schema"
value {
artifact_spec {
type {
name: "Schema"
}
}
}
}
outputs {
key: "post_transform_stats"
value {
artifact_spec {
type {
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
outputs {
key: "pre_transform_schema"
value {
artifact_spec {
type {
name: "Schema"
}
}
}
}
outputs {
key: "pre_transform_stats"
value {
artifact_spec {
type {
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
outputs {
key: "transform_graph"
value {
artifact_spec {
type {
name: "TransformGraph"
}
}
}
}
outputs {
key: "transformed_examples"
value {
artifact_spec {
type {
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
}
}
}
}
outputs {
key: "updated_analyzer_cache"
value {
artifact_spec {
type {
name: "TransformCache"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "disable_statistics"
value {
field_value {
int_value: 0
}
}
}
parameters {
key: "force_tf_compat_v1"
value {
field_value {
int_value: 0
}
}
}
parameters {
key: "module_path"
value {
field_value {
string_value: "transform@pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "schema_importer"
downstream_nodes: "Trainer"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:Analyze the 'train' split and transform all splits when splits_config is not set.
INFO:absl:udf_utils.get_fn {'module_file': None, 'module_path': 'transform@pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl', 'preprocessing_fn': None} 'preprocessing_fn'
INFO:absl:Installing 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmpzt10plmn', 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:udf_utils.get_fn {'module_file': None, 'module_path': 'transform@pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl', 'stats_options_updater_fn': None} 'stats_options_updater_fn'
INFO:absl:Installing 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmpye1t0qvk', 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:Installing 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmpa8aj57dm', 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed 'pipelines/sample-pipeline/_wheels/tfx_user_code_Transform-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:root:This output type hint will be ignored and not used for type-checking purposes. Typically, output type hints for a PTransform are single (or nested) types wrapped by a PCollection, PDone, or None. Got: Tuple[Dict[str, Union[NoneType, _Dataset]], Union[Dict[str, Dict[str, PCollection]], NoneType], int] instead.
WARNING:root:This output type hint will be ignored and not used for type-checking purposes. Typically, output type hints for a PTransform are single (or nested) types wrapped by a PCollection, PDone, or None. Got: Tuple[Dict[str, Union[NoneType, _Dataset]], Union[Dict[str, Dict[str, PCollection]], NoneType], int] instead.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
INFO:tensorflow:Assets written to: pipelines/sample-pipeline/Transform/transform_graph/5/.temp_path/tftransform_tmp/449ca0cbece04589a94073520e23c8a8/assets
INFO:tensorflow:Assets written to: pipelines/sample-pipeline/Transform/transform_graph/5/.temp_path/tftransform_tmp/449ca0cbece04589a94073520e23c8a8/assets
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 5 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'transformed_examples': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/transformed_examples/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:transformed_examples:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)], 'updated_analyzer_cache': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/updated_analyzer_cache/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:updated_analyzer_cache:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "TransformCache"
)], 'transform_graph': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/transform_graph/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:transform_graph:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "TransformGraph"
)], 'post_transform_anomalies': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/post_transform_anomalies/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:post_transform_anomalies:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)], 'pre_transform_schema': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/pre_transform_schema/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:pre_transform_schema:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "Schema"
)], 'post_transform_schema': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/post_transform_schema/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:post_transform_schema:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "Schema"
)], 'pre_transform_stats': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/pre_transform_stats/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:pre_transform_stats:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)], 'post_transform_stats': [Artifact(artifact: uri: "pipelines/sample-pipeline/Transform/post_transform_stats/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:post_transform_stats:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}) for execution 5
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component Transform is finished.
INFO:absl:Component ExampleValidator is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.example_validator.component.ExampleValidator"
}
id: "ExampleValidator"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.ExampleValidator"
}
}
}
}
inputs {
inputs {
key: "schema"
value {
channels {
producer_node_query {
id: "schema_importer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.schema_importer"
}
}
}
artifact_query {
type {
name: "Schema"
}
}
output_key: "result"
}
min_count: 1
}
}
inputs {
key: "statistics"
value {
channels {
producer_node_query {
id: "StatisticsGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.StatisticsGen"
}
}
}
artifact_query {
type {
name: "ExampleStatistics"
}
}
output_key: "statistics"
}
min_count: 1
}
}
}
outputs {
outputs {
key: "anomalies"
value {
artifact_spec {
type {
name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
}
parameters {
parameters {
key: "exclude_splits"
value {
field_value {
string_value: "[]"
}
}
}
}
upstream_nodes: "StatisticsGen"
upstream_nodes: "schema_importer"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 6
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=6, input_dict={'schema': [Artifact(artifact: id: 2
type_id: 18
uri: "pipelines/sample-pipeline/schema"
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252955888
last_update_time_since_epoch: 1638252955888
, artifact_type: id: 18
name: "Schema"
)], 'statistics': [Artifact(artifact: id: 3
type_id: 20
uri: "pipelines/sample-pipeline/StatisticsGen/statistics/4"
properties {
key: "split_names"
value {
string_value: "[\"train\", \"eval\"]"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:StatisticsGen:statistics:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252959610
last_update_time_since_epoch: 1638252959610
, artifact_type: id: 20
name: "ExampleStatistics"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}, output_dict=defaultdict(<class 'list'>, {'anomalies': [Artifact(artifact: uri: "pipelines/sample-pipeline/ExampleValidator/anomalies/6"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:ExampleValidator:anomalies:0"
}
}
, artifact_type: name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}), exec_properties={'exclude_splits': '[]'}, execution_output_uri='pipelines/sample-pipeline/ExampleValidator/.system/executor_execution/6/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/ExampleValidator/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/ExampleValidator/.system/executor_execution/6/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.example_validator.component.ExampleValidator"
}
id: "ExampleValidator"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.ExampleValidator"
}
}
}
}
inputs {
inputs {
key: "schema"
value {
channels {
producer_node_query {
id: "schema_importer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.schema_importer"
}
}
}
artifact_query {
type {
name: "Schema"
}
}
output_key: "result"
}
min_count: 1
}
}
inputs {
key: "statistics"
value {
channels {
producer_node_query {
id: "StatisticsGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.StatisticsGen"
}
}
}
artifact_query {
type {
name: "ExampleStatistics"
}
}
output_key: "statistics"
}
min_count: 1
}
}
}
outputs {
outputs {
key: "anomalies"
value {
artifact_spec {
type {
name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
}
}
}
}
}
parameters {
parameters {
key: "exclude_splits"
value {
field_value {
string_value: "[]"
}
}
}
}
upstream_nodes: "StatisticsGen"
upstream_nodes: "schema_importer"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:Validating schema against the computed statistics for split train.
INFO:absl:Validation complete for split train. Anomalies written to pipelines/sample-pipeline/ExampleValidator/anomalies/6/Split-train.
INFO:absl:Validating schema against the computed statistics for split eval.
INFO:absl:Validation complete for split eval. Anomalies written to pipelines/sample-pipeline/ExampleValidator/anomalies/6/Split-eval.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 6 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'anomalies': [Artifact(artifact: uri: "pipelines/sample-pipeline/ExampleValidator/anomalies/6"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:ExampleValidator:anomalies:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ExampleAnomalies"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
)]}) for execution 6
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component ExampleValidator is finished.
INFO:absl:Component Trainer is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.trainer.component.Trainer"
}
id: "Trainer"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Trainer"
}
}
}
}
inputs {
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
inputs {
key: "transform_graph"
value {
channels {
producer_node_query {
id: "Transform"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Transform"
}
}
}
artifact_query {
type {
name: "TransformGraph"
}
}
output_key: "transform_graph"
}
}
}
}
outputs {
outputs {
key: "model"
value {
artifact_spec {
type {
name: "Model"
}
}
}
}
outputs {
key: "model_run"
value {
artifact_spec {
type {
name: "ModelRun"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "eval_args"
value {
field_value {
string_value: "{\n \"num_steps\": 5\n}"
}
}
}
parameters {
key: "module_path"
value {
field_value {
string_value: "trainer@pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl"
}
}
}
parameters {
key: "train_args"
value {
field_value {
string_value: "{\n \"num_steps\": 100\n}"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "Transform"
downstream_nodes: "Evaluator"
downstream_nodes: "Pusher"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 7
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=7, input_dict={'examples': [Artifact(artifact: id: 1
type_id: 15
uri: "pipelines/sample-pipeline/CsvExampleGen/examples/1"
properties {
key: "split_names"
value {
string_value: "[\"train\", \"eval\"]"
}
}
custom_properties {
key: "file_format"
value {
string_value: "tfrecords_gzip"
}
}
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "payload_format"
value {
string_value: "FORMAT_TF_EXAMPLE"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252955751
last_update_time_since_epoch: 1638252955751
, artifact_type: id: 15
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)], 'transform_graph': [Artifact(artifact: id: 6
type_id: 23
uri: "pipelines/sample-pipeline/Transform/transform_graph/5"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Transform:transform_graph:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252978582
last_update_time_since_epoch: 1638252978582
, artifact_type: id: 23
name: "TransformGraph"
)]}, output_dict=defaultdict(<class 'list'>, {'model_run': [Artifact(artifact: uri: "pipelines/sample-pipeline/Trainer/model_run/7"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Trainer:model_run:0"
}
}
, artifact_type: name: "ModelRun"
)], 'model': [Artifact(artifact: uri: "pipelines/sample-pipeline/Trainer/model/7"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Trainer:model:0"
}
}
, artifact_type: name: "Model"
)]}), exec_properties={'custom_config': 'null', 'train_args': '{\n "num_steps": 100\n}', 'module_path': 'trainer@pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl', 'eval_args': '{\n "num_steps": 5\n}'}, execution_output_uri='pipelines/sample-pipeline/Trainer/.system/executor_execution/7/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/Trainer/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/Trainer/.system/executor_execution/7/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.trainer.component.Trainer"
}
id: "Trainer"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Trainer"
}
}
}
}
inputs {
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
inputs {
key: "transform_graph"
value {
channels {
producer_node_query {
id: "Transform"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Transform"
}
}
}
artifact_query {
type {
name: "TransformGraph"
}
}
output_key: "transform_graph"
}
}
}
}
outputs {
outputs {
key: "model"
value {
artifact_spec {
type {
name: "Model"
}
}
}
}
outputs {
key: "model_run"
value {
artifact_spec {
type {
name: "ModelRun"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "eval_args"
value {
field_value {
string_value: "{\n \"num_steps\": 5\n}"
}
}
}
parameters {
key: "module_path"
value {
field_value {
string_value: "trainer@pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl"
}
}
}
parameters {
key: "train_args"
value {
field_value {
string_value: "{\n \"num_steps\": 100\n}"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "Transform"
downstream_nodes: "Evaluator"
downstream_nodes: "Pusher"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:Train on the 'train' split when train_args.splits is not set.
INFO:absl:Evaluate on the 'eval' split when eval_args.splits is not set.
INFO:absl:udf_utils.get_fn {'custom_config': 'null', 'train_args': '{\n "num_steps": 100\n}', 'module_path': 'trainer@pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl', 'eval_args': '{\n "num_steps": 5\n}'} 'run_fn'
INFO:absl:Installing 'pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl' to a temporary directory.
INFO:absl:Executing: ['/usr/bin/python3', '-m', 'pip', 'install', '--target', '/tmp/tmpedvn2r86', 'pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl']
INFO:absl:Successfully installed 'pipelines/sample-pipeline/_wheels/tfx_user_code_Trainer-0.0+e743a58cd10fd86e3c3a1181d29fc5e40f1ecc0d04cf50e7103fce5e259a0af4-py3-none-any.whl'.
INFO:absl:Training model.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:tensorflow_text is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:tensorflow:struct2tensor is not available.
INFO:absl:Feature Gender has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Age has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature EstimatedSalary has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Feature Purchased has a shape dim {
size: 1
}
. Setting to DenseTensor.
INFO:absl:Model: "model_3"
INFO:absl:__________________________________________________________________________________________________
INFO:absl: Layer (type) Output Shape Param # Connected to
INFO:absl:==================================================================================================
INFO:absl: Age (InputLayer) [(None, 1)] 0 []
INFO:absl:
INFO:absl: EstimatedSalary (InputLayer) [(None, 1)] 0 []
INFO:absl:
INFO:absl: Gender (InputLayer) [(None, 1)] 0 []
INFO:absl:
INFO:absl: concatenate_3 (Concatenate) (None, 3) 0 ['Age[0][0]',
INFO:absl: 'EstimatedSalary[0][0]',
INFO:absl: 'Gender[0][0]']
INFO:absl:
INFO:absl: dense_9 (Dense) (None, 8) 32 ['concatenate_3[0][0]']
INFO:absl:
INFO:absl: dense_10 (Dense) (None, 8) 72 ['dense_9[0][0]']
INFO:absl:
INFO:absl: dense_11 (Dense) (None, 1) 9 ['dense_10[0][0]']
INFO:absl:
INFO:absl:==================================================================================================
INFO:absl:Total params: 113
INFO:absl:Trainable params: 113
INFO:absl:Non-trainable params: 0
INFO:absl:__________________________________________________________________________________________________
Epoch 1/50
100/100 [==============================] - 1s 6ms/step - loss: 611.6609 - binary_accuracy: 0.5163 - val_loss: 234.0648 - val_binary_accuracy: 0.3000
Epoch 2/50
100/100 [==============================] - 0s 5ms/step - loss: 532.1701 - binary_accuracy: 0.5069 - val_loss: 378.9553 - val_binary_accuracy: 0.2812
Epoch 3/50
100/100 [==============================] - 0s 5ms/step - loss: 233.1967 - binary_accuracy: 0.5103 - val_loss: 159.2652 - val_binary_accuracy: 0.6875
Epoch 4/50
100/100 [==============================] - 0s 5ms/step - loss: 173.1308 - binary_accuracy: 0.5159 - val_loss: 491.4152 - val_binary_accuracy: 0.6938
Epoch 5/50
100/100 [==============================] - 0s 5ms/step - loss: 278.7457 - binary_accuracy: 0.5144 - val_loss: 213.2489 - val_binary_accuracy: 0.6875
Epoch 6/50
100/100 [==============================] - 1s 5ms/step - loss: 248.0846 - binary_accuracy: 0.5091 - val_loss: 31.9954 - val_binary_accuracy: 0.7000
Epoch 7/50
100/100 [==============================] - 1s 5ms/step - loss: 118.4426 - binary_accuracy: 0.5125 - val_loss: 57.3558 - val_binary_accuracy: 0.7250
Epoch 8/50
100/100 [==============================] - 1s 6ms/step - loss: 125.5891 - binary_accuracy: 0.5041 - val_loss: 44.1153 - val_binary_accuracy: 0.6562
Epoch 9/50
100/100 [==============================] - 1s 5ms/step - loss: 172.0170 - binary_accuracy: 0.4997 - val_loss: 521.1081 - val_binary_accuracy: 0.7000
Epoch 10/50
100/100 [==============================] - 1s 5ms/step - loss: 178.5008 - binary_accuracy: 0.5128 - val_loss: 168.4674 - val_binary_accuracy: 0.2937
Epoch 11/50
100/100 [==============================] - 1s 6ms/step - loss: 118.9339 - binary_accuracy: 0.5206 - val_loss: 125.9589 - val_binary_accuracy: 0.3000
Epoch 12/50
100/100 [==============================] - 1s 6ms/step - loss: 142.0052 - binary_accuracy: 0.4863 - val_loss: 344.8056 - val_binary_accuracy: 0.3187
Epoch 13/50
100/100 [==============================] - 1s 6ms/step - loss: 153.8785 - binary_accuracy: 0.5075 - val_loss: 24.1027 - val_binary_accuracy: 0.6875
Epoch 14/50
100/100 [==============================] - 1s 5ms/step - loss: 99.7250 - binary_accuracy: 0.5097 - val_loss: 41.9368 - val_binary_accuracy: 0.7000
Epoch 15/50
100/100 [==============================] - 1s 6ms/step - loss: 153.3181 - binary_accuracy: 0.5013 - val_loss: 24.9935 - val_binary_accuracy: 0.7000
Epoch 16/50
100/100 [==============================] - 1s 5ms/step - loss: 127.3726 - binary_accuracy: 0.5075 - val_loss: 82.0845 - val_binary_accuracy: 0.7000
Epoch 17/50
100/100 [==============================] - 1s 5ms/step - loss: 162.5916 - binary_accuracy: 0.5116 - val_loss: 14.4302 - val_binary_accuracy: 0.6875
Epoch 18/50
100/100 [==============================] - 1s 6ms/step - loss: 67.6229 - binary_accuracy: 0.5125 - val_loss: 115.1465 - val_binary_accuracy: 0.2688
Epoch 19/50
100/100 [==============================] - 1s 5ms/step - loss: 88.8526 - binary_accuracy: 0.5031 - val_loss: 50.1166 - val_binary_accuracy: 0.6938
Epoch 20/50
100/100 [==============================] - 0s 5ms/step - loss: 64.0974 - binary_accuracy: 0.5206 - val_loss: 4.4420 - val_binary_accuracy: 0.7000
Epoch 21/50
100/100 [==============================] - 0s 5ms/step - loss: 64.9050 - binary_accuracy: 0.5150 - val_loss: 30.2708 - val_binary_accuracy: 0.7125
Epoch 22/50
100/100 [==============================] - 0s 5ms/step - loss: 71.9325 - binary_accuracy: 0.5053 - val_loss: 97.7169 - val_binary_accuracy: 0.3187
Epoch 23/50
100/100 [==============================] - 0s 5ms/step - loss: 92.2252 - binary_accuracy: 0.5166 - val_loss: 228.5210 - val_binary_accuracy: 0.2875
Epoch 24/50
100/100 [==============================] - 0s 4ms/step - loss: 76.8550 - binary_accuracy: 0.5125 - val_loss: 0.9654 - val_binary_accuracy: 0.7437
Epoch 25/50
100/100 [==============================] - 1s 5ms/step - loss: 36.4056 - binary_accuracy: 0.5025 - val_loss: 26.5662 - val_binary_accuracy: 0.3063
Epoch 26/50
100/100 [==============================] - 0s 5ms/step - loss: 45.4737 - binary_accuracy: 0.5131 - val_loss: 36.3379 - val_binary_accuracy: 0.2937
Epoch 27/50
100/100 [==============================] - 0s 5ms/step - loss: 28.1423 - binary_accuracy: 0.5231 - val_loss: 24.1019 - val_binary_accuracy: 0.2875
Epoch 28/50
100/100 [==============================] - 1s 6ms/step - loss: 51.8974 - binary_accuracy: 0.4916 - val_loss: 23.5530 - val_binary_accuracy: 0.6750
Epoch 29/50
100/100 [==============================] - 1s 5ms/step - loss: 46.3152 - binary_accuracy: 0.5184 - val_loss: 51.3735 - val_binary_accuracy: 0.2937
Epoch 30/50
100/100 [==============================] - 1s 6ms/step - loss: 28.4707 - binary_accuracy: 0.5172 - val_loss: 27.8268 - val_binary_accuracy: 0.7063
Epoch 31/50
100/100 [==============================] - 0s 5ms/step - loss: 35.5179 - binary_accuracy: 0.5191 - val_loss: 8.3424 - val_binary_accuracy: 0.3688
Epoch 32/50
100/100 [==============================] - 0s 5ms/step - loss: 35.3719 - binary_accuracy: 0.5144 - val_loss: 5.7474 - val_binary_accuracy: 0.7125
Epoch 33/50
100/100 [==============================] - 1s 6ms/step - loss: 20.2015 - binary_accuracy: 0.5169 - val_loss: 16.7679 - val_binary_accuracy: 0.2937
Epoch 34/50
100/100 [==============================] - 1s 6ms/step - loss: 22.1189 - binary_accuracy: 0.5275 - val_loss: 18.3843 - val_binary_accuracy: 0.6938
Epoch 35/50
100/100 [==============================] - 0s 5ms/step - loss: 18.8791 - binary_accuracy: 0.5159 - val_loss: 10.9875 - val_binary_accuracy: 0.3187
Epoch 36/50
100/100 [==============================] - 1s 5ms/step - loss: 20.6867 - binary_accuracy: 0.5141 - val_loss: 4.2972 - val_binary_accuracy: 0.3625
Epoch 37/50
100/100 [==============================] - 0s 4ms/step - loss: 23.0144 - binary_accuracy: 0.5303 - val_loss: 18.8749 - val_binary_accuracy: 0.7125
Epoch 38/50
100/100 [==============================] - 1s 5ms/step - loss: 23.6652 - binary_accuracy: 0.5009 - val_loss: 41.8353 - val_binary_accuracy: 0.7000
Epoch 39/50
100/100 [==============================] - 1s 5ms/step - loss: 17.1090 - binary_accuracy: 0.5094 - val_loss: 1.3267 - val_binary_accuracy: 0.5250
Epoch 40/50
100/100 [==============================] - 1s 5ms/step - loss: 13.8181 - binary_accuracy: 0.5509 - val_loss: 0.8573 - val_binary_accuracy: 0.7125
Epoch 41/50
100/100 [==============================] - 0s 5ms/step - loss: 12.8407 - binary_accuracy: 0.5337 - val_loss: 8.0226 - val_binary_accuracy: 0.7250
Epoch 42/50
100/100 [==============================] - 0s 5ms/step - loss: 17.2855 - binary_accuracy: 0.5125 - val_loss: 9.4283 - val_binary_accuracy: 0.3688
Epoch 43/50
100/100 [==============================] - 0s 5ms/step - loss: 7.8038 - binary_accuracy: 0.5522 - val_loss: 16.8740 - val_binary_accuracy: 0.3063
Epoch 44/50
100/100 [==============================] - 1s 5ms/step - loss: 19.7426 - binary_accuracy: 0.5356 - val_loss: 37.1366 - val_binary_accuracy: 0.2688
Epoch 45/50
100/100 [==============================] - 1s 5ms/step - loss: 17.5712 - binary_accuracy: 0.5394 - val_loss: 8.8076 - val_binary_accuracy: 0.3750
Epoch 46/50
100/100 [==============================] - 1s 6ms/step - loss: 14.7252 - binary_accuracy: 0.5188 - val_loss: 0.7154 - val_binary_accuracy: 0.8188
Epoch 47/50
100/100 [==============================] - 0s 5ms/step - loss: 8.1625 - binary_accuracy: 0.5450 - val_loss: 10.7563 - val_binary_accuracy: 0.3375
Epoch 48/50
100/100 [==============================] - 1s 5ms/step - loss: 6.6818 - binary_accuracy: 0.5550 - val_loss: 1.1332 - val_binary_accuracy: 0.6938
Epoch 49/50
100/100 [==============================] - 0s 5ms/step - loss: 14.1747 - binary_accuracy: 0.5131 - val_loss: 9.7920 - val_binary_accuracy: 0.6812
Epoch 50/50
100/100 [==============================] - 1s 5ms/step - loss: 6.3901 - binary_accuracy: 0.5481 - val_loss: 4.6488 - val_binary_accuracy: 0.7063
INFO:tensorflow:Assets written to: pipelines/sample-pipeline/Trainer/model/7/Format-Serving/assets
INFO:tensorflow:Assets written to: pipelines/sample-pipeline/Trainer/model/7/Format-Serving/assets
INFO:absl:Training complete. Model written to pipelines/sample-pipeline/Trainer/model/7/Format-Serving. ModelRun written to pipelines/sample-pipeline/Trainer/model_run/7
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 7 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'model_run': [Artifact(artifact: uri: "pipelines/sample-pipeline/Trainer/model_run/7"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Trainer:model_run:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ModelRun"
)], 'model': [Artifact(artifact: uri: "pipelines/sample-pipeline/Trainer/model/7"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Trainer:model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "Model"
)]}) for execution 7
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component Trainer is finished.
INFO:absl:Component Evaluator is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.evaluator.component.Evaluator"
}
id: "Evaluator"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Evaluator"
}
}
}
}
inputs {
inputs {
key: "baseline_model"
value {
channels {
producer_node_query {
id: "latest_blessed_model_resolver"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.latest_blessed_model_resolver"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
}
outputs {
outputs {
key: "blessing"
value {
artifact_spec {
type {
name: "ModelBlessing"
}
}
}
}
outputs {
key: "evaluation"
value {
artifact_spec {
type {
name: "ModelEvaluation"
}
}
}
}
}
parameters {
parameters {
key: "eval_config"
value {
field_value {
string_value: "{\n \"metrics_specs\": [\n {\n \"per_slice_thresholds\": {\n \"binary_accuracy\": {\n \"thresholds\": [\n {\n \"slicing_specs\": [\n {}\n ],\n \"threshold\": {\n \"change_threshold\": {\n \"absolute\": -1e-10,\n \"direction\": \"HIGHER_IS_BETTER\"\n },\n \"value_threshold\": {\n \"lower_bound\": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n \"model_specs\": [\n {\n \"label_key\": \"Purchased\",\n \"preprocessing_function_names\": [\n \"tft_layer\"\n ],\n \"signature_name\": \"serving_default\"\n }\n ],\n \"slicing_specs\": [\n {},\n {\n \"feature_keys\": [\n \"Gender\"\n ]\n }\n ]\n}"
}
}
}
parameters {
key: "example_splits"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "fairness_indicator_thresholds"
value {
field_value {
string_value: "null"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "Trainer"
upstream_nodes: "latest_blessed_model_resolver"
downstream_nodes: "Pusher"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 8
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=8, input_dict={'baseline_model': [], 'model': [Artifact(artifact: id: 14
type_id: 28
uri: "pipelines/sample-pipeline/Trainer/model/7"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Trainer:model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638253025772
last_update_time_since_epoch: 1638253025772
, artifact_type: id: 28
name: "Model"
)], 'examples': [Artifact(artifact: id: 1
type_id: 15
uri: "pipelines/sample-pipeline/CsvExampleGen/examples/1"
properties {
key: "split_names"
value {
string_value: "[\"train\", \"eval\"]"
}
}
custom_properties {
key: "file_format"
value {
string_value: "tfrecords_gzip"
}
}
custom_properties {
key: "input_fingerprint"
value {
string_value: "split:single_split,num_files:2,total_bytes:6956,xor_checksum:0,sum_checksum:3276503522"
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:CsvExampleGen:examples:0"
}
}
custom_properties {
key: "payload_format"
value {
string_value: "FORMAT_TF_EXAMPLE"
}
}
custom_properties {
key: "span"
value {
int_value: 0
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638252955751
last_update_time_since_epoch: 1638252955751
, artifact_type: id: 15
name: "Examples"
properties {
key: "span"
value: INT
}
properties {
key: "split_names"
value: STRING
}
properties {
key: "version"
value: INT
}
)]}, output_dict=defaultdict(<class 'list'>, {'blessing': [Artifact(artifact: uri: "pipelines/sample-pipeline/Evaluator/blessing/8"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Evaluator:blessing:0"
}
}
, artifact_type: name: "ModelBlessing"
)], 'evaluation': [Artifact(artifact: uri: "pipelines/sample-pipeline/Evaluator/evaluation/8"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Evaluator:evaluation:0"
}
}
, artifact_type: name: "ModelEvaluation"
)]}), exec_properties={'fairness_indicator_thresholds': 'null', 'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Purchased",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "Gender"\n ]\n }\n ]\n}', 'example_splits': 'null'}, execution_output_uri='pipelines/sample-pipeline/Evaluator/.system/executor_execution/8/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/Evaluator/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/Evaluator/.system/executor_execution/8/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.evaluator.component.Evaluator"
}
id: "Evaluator"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Evaluator"
}
}
}
}
inputs {
inputs {
key: "baseline_model"
value {
channels {
producer_node_query {
id: "latest_blessed_model_resolver"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.latest_blessed_model_resolver"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "examples"
value {
channels {
producer_node_query {
id: "CsvExampleGen"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.CsvExampleGen"
}
}
}
artifact_query {
type {
name: "Examples"
}
}
output_key: "examples"
}
min_count: 1
}
}
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
}
outputs {
outputs {
key: "blessing"
value {
artifact_spec {
type {
name: "ModelBlessing"
}
}
}
}
outputs {
key: "evaluation"
value {
artifact_spec {
type {
name: "ModelEvaluation"
}
}
}
}
}
parameters {
parameters {
key: "eval_config"
value {
field_value {
string_value: "{\n \"metrics_specs\": [\n {\n \"per_slice_thresholds\": {\n \"binary_accuracy\": {\n \"thresholds\": [\n {\n \"slicing_specs\": [\n {}\n ],\n \"threshold\": {\n \"change_threshold\": {\n \"absolute\": -1e-10,\n \"direction\": \"HIGHER_IS_BETTER\"\n },\n \"value_threshold\": {\n \"lower_bound\": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n \"model_specs\": [\n {\n \"label_key\": \"Purchased\",\n \"preprocessing_function_names\": [\n \"tft_layer\"\n ],\n \"signature_name\": \"serving_default\"\n }\n ],\n \"slicing_specs\": [\n {},\n {\n \"feature_keys\": [\n \"Gender\"\n ]\n }\n ]\n}"
}
}
}
parameters {
key: "example_splits"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "fairness_indicator_thresholds"
value {
field_value {
string_value: "null"
}
}
}
}
upstream_nodes: "CsvExampleGen"
upstream_nodes: "Trainer"
upstream_nodes: "latest_blessed_model_resolver"
downstream_nodes: "Pusher"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:udf_utils.get_fn {'fairness_indicator_thresholds': 'null', 'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Purchased",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "Gender"\n ]\n }\n ]\n}', 'example_splits': 'null'} 'custom_eval_shared_model'
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Using pipelines/sample-pipeline/Trainer/model/7/Format-Serving as model.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d58ea9050> and <keras.engine.input_layer.InputLayer object at 0x7f3d5870b850>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d58ea9050> and <keras.engine.input_layer.InputLayer object at 0x7f3d5870b850>).
INFO:absl:The 'example_splits' parameter is not set, using 'eval' split.
INFO:absl:Evaluating model.
INFO:absl:udf_utils.get_fn {'fairness_indicator_thresholds': 'null', 'eval_config': '{\n "metrics_specs": [\n {\n "per_slice_thresholds": {\n "binary_accuracy": {\n "thresholds": [\n {\n "slicing_specs": [\n {}\n ],\n "threshold": {\n "change_threshold": {\n "absolute": -1e-10,\n "direction": "HIGHER_IS_BETTER"\n },\n "value_threshold": {\n "lower_bound": 0.7\n }\n }\n }\n ]\n }\n }\n }\n ],\n "model_specs": [\n {\n "label_key": "Purchased",\n "preprocessing_function_names": [\n "tft_layer"\n ],\n "signature_name": "serving_default"\n }\n ],\n "slicing_specs": [\n {},\n {\n "feature_keys": [\n "Gender"\n ]\n }\n ]\n}', 'example_splits': 'null'} 'custom_extractors'
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
INFO:absl:Request was made to ignore the baseline ModelSpec and any change thresholds. This is likely because a baseline model was not provided: updated_config=
model_specs {
signature_name: "serving_default"
label_key: "Purchased"
preprocessing_function_names: "tft_layer"
}
slicing_specs {
}
slicing_specs {
feature_keys: "Gender"
}
metrics_specs {
model_names: ""
per_slice_thresholds {
key: "binary_accuracy"
value {
thresholds {
slicing_specs {
}
threshold {
value_threshold {
lower_bound {
value: 0.7
}
}
}
}
}
}
}
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5a419f90> and <keras.engine.input_layer.InputLayer object at 0x7f3d5d0b42d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d5a419f90> and <keras.engine.input_layer.InputLayer object at 0x7f3d5d0b42d0>).
WARNING:root:Make sure that locally built Python SDK docker image has Python 3.7 interpreter.
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d58bbaf50> and <keras.engine.input_layer.InputLayer object at 0x7f3d591779d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d58bbaf50> and <keras.engine.input_layer.InputLayer object at 0x7f3d591779d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d57ac76d0> and <keras.engine.input_layer.InputLayer object at 0x7f3d57adced0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d57ac76d0> and <keras.engine.input_layer.InputLayer object at 0x7f3d57adced0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d578869d0> and <keras.engine.input_layer.InputLayer object at 0x7f3d57890e10>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d578869d0> and <keras.engine.input_layer.InputLayer object at 0x7f3d57890e10>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d57673b90> and <keras.engine.input_layer.InputLayer object at 0x7f3d5766f250>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d57673b90> and <keras.engine.input_layer.InputLayer object at 0x7f3d5766f250>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d573e0f90> and <keras.engine.input_layer.InputLayer object at 0x7f3d57426590>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d573e0f90> and <keras.engine.input_layer.InputLayer object at 0x7f3d57426590>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d56192250> and <keras.engine.input_layer.InputLayer object at 0x7f3d561922d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d56192250> and <keras.engine.input_layer.InputLayer object at 0x7f3d561922d0>).
INFO:absl:Evaluation complete. Results written to pipelines/sample-pipeline/Evaluator/evaluation/8.
INFO:absl:Checking validation results.
INFO:absl:Blessing result False written to pipelines/sample-pipeline/Evaluator/blessing/8.
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 8 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'blessing': [Artifact(artifact: uri: "pipelines/sample-pipeline/Evaluator/blessing/8"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Evaluator:blessing:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ModelBlessing"
)], 'evaluation': [Artifact(artifact: uri: "pipelines/sample-pipeline/Evaluator/evaluation/8"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Evaluator:evaluation:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "ModelEvaluation"
)]}) for execution 8
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component Evaluator is finished.
INFO:absl:Component Pusher is running.
INFO:absl:Running launcher for node_info {
type {
name: "tfx.components.pusher.component.Pusher"
}
id: "Pusher"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Pusher"
}
}
}
}
inputs {
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "model_blessing"
value {
channels {
producer_node_query {
id: "Evaluator"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Evaluator"
}
}
}
artifact_query {
type {
name: "ModelBlessing"
}
}
output_key: "blessing"
}
}
}
}
outputs {
outputs {
key: "pushed_model"
value {
artifact_spec {
type {
name: "PushedModel"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "push_destination"
value {
field_value {
string_value: "{\n \"filesystem\": {\n \"base_directory\": \"model\"\n }\n}"
}
}
}
}
upstream_nodes: "Evaluator"
upstream_nodes: "Trainer"
execution_options {
caching_options {
}
}
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Going to run a new execution 9
INFO:absl:Going to run a new execution: ExecutionInfo(execution_id=9, input_dict={'model': [Artifact(artifact: id: 14
type_id: 28
uri: "pipelines/sample-pipeline/Trainer/model/7"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Trainer:model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638253025772
last_update_time_since_epoch: 1638253025772
, artifact_type: id: 28
name: "Model"
)], 'model_blessing': [Artifact(artifact: id: 15
type_id: 30
uri: "pipelines/sample-pipeline/Evaluator/blessing/8"
custom_properties {
key: "blessed"
value {
int_value: 0
}
}
custom_properties {
key: "current_model"
value {
string_value: "pipelines/sample-pipeline/Trainer/model/7"
}
}
custom_properties {
key: "current_model_id"
value {
int_value: 14
}
}
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Evaluator:blessing:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
state: LIVE
create_time_since_epoch: 1638253034153
last_update_time_since_epoch: 1638253034153
, artifact_type: id: 30
name: "ModelBlessing"
)]}, output_dict=defaultdict(<class 'list'>, {'pushed_model': [Artifact(artifact: uri: "pipelines/sample-pipeline/Pusher/pushed_model/9"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Pusher:pushed_model:0"
}
}
, artifact_type: name: "PushedModel"
)]}), exec_properties={'custom_config': 'null', 'push_destination': '{\n "filesystem": {\n "base_directory": "model"\n }\n}'}, execution_output_uri='pipelines/sample-pipeline/Pusher/.system/executor_execution/9/executor_output.pb', stateful_working_dir='pipelines/sample-pipeline/Pusher/.system/stateful_working_dir/2021-11-30T06:15:53.934297', tmp_dir='pipelines/sample-pipeline/Pusher/.system/executor_execution/9/.temp/', pipeline_node=node_info {
type {
name: "tfx.components.pusher.component.Pusher"
}
id: "Pusher"
}
contexts {
contexts {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
contexts {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
contexts {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Pusher"
}
}
}
}
inputs {
inputs {
key: "model"
value {
channels {
producer_node_query {
id: "Trainer"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Trainer"
}
}
}
artifact_query {
type {
name: "Model"
}
}
output_key: "model"
}
}
}
inputs {
key: "model_blessing"
value {
channels {
producer_node_query {
id: "Evaluator"
}
context_queries {
type {
name: "pipeline"
}
name {
field_value {
string_value: "sample-pipeline"
}
}
}
context_queries {
type {
name: "pipeline_run"
}
name {
field_value {
string_value: "2021-11-30T06:15:53.934297"
}
}
}
context_queries {
type {
name: "node"
}
name {
field_value {
string_value: "sample-pipeline.Evaluator"
}
}
}
artifact_query {
type {
name: "ModelBlessing"
}
}
output_key: "blessing"
}
}
}
}
outputs {
outputs {
key: "pushed_model"
value {
artifact_spec {
type {
name: "PushedModel"
}
}
}
}
}
parameters {
parameters {
key: "custom_config"
value {
field_value {
string_value: "null"
}
}
}
parameters {
key: "push_destination"
value {
field_value {
string_value: "{\n \"filesystem\": {\n \"base_directory\": \"model\"\n }\n}"
}
}
}
}
upstream_nodes: "Evaluator"
upstream_nodes: "Trainer"
execution_options {
caching_options {
}
}
, pipeline_info=id: "sample-pipeline"
, pipeline_run_id='2021-11-30T06:15:53.934297')
INFO:absl:Model on pipelines/sample-pipeline/Evaluator/blessing/8 was not blessed by model validation
INFO:absl:Cleaning up stateless execution info.
INFO:absl:Execution 9 succeeded.
INFO:absl:Cleaning up stateful execution info.
INFO:absl:Publishing output artifacts defaultdict(<class 'list'>, {'pushed_model': [Artifact(artifact: uri: "pipelines/sample-pipeline/Pusher/pushed_model/9"
custom_properties {
key: "name"
value {
string_value: "sample-pipeline:2021-11-30T06:15:53.934297:Pusher:pushed_model:0"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "1.4.0"
}
}
, artifact_type: name: "PushedModel"
)]}) for execution 9
INFO:absl:MetadataStore with DB connection initialized
INFO:absl:Component Pusher is finished.
inference_fn = get_inference_fn(MODEL_DIRECTORY)
result = inference_fn(examples=tf.constant(_examples(test_df)))
print(result["output_0"].numpy())
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d556c1690> and <keras.engine.input_layer.InputLayer object at 0x7f3d556bb3d0>).
WARNING:tensorflow:Inconsistent references when loading the checkpoint into this object graph. Either the Trackable object references in the Python program have changed in an incompatible way, or the checkpoint was generated in an incompatible program.
Two checkpoint references resolved to different objects (<keras.saving.saved_model.load.TensorFlowTransform>TransformFeaturesLayer object at 0x7f3d556c1690> and <keras.engine.input_layer.InputLayer object at 0x7f3d556bb3d0>).
[[0.03220685]
[0.46029472]
[0.5184892 ]
[0.17297481]
[0.03745157]
[0.67159355]
[0.05825204]
[0.08253836]
[0.03109209]
[0.14211378]
[0.01026638]
[0.54865664]
[0.00307088]
[0.11654628]
[0.0051414 ]
[0.04682363]
[0.11298493]
[0.6830877 ]
[0.02954738]
[0.19606875]
[0.04218721]
[0.00264895]
[0.26473102]
[0.05261479]
[0.00801955]
[0.84577835]
[0.09096617]
[0.00268115]
[0.00590247]
[0.14293896]
[0.00727324]
[0.00475701]
[0.14112698]
[0.19416368]
[0.11682624]
[0.0055347 ]
[0.08142502]
[0.02018276]
[0.04202535]
[0.04229586]
[0.4267416 ]
[0.07690777]
[0.01121056]
[0.02657227]
[0.00833632]
[0.03555671]
[0.23300353]
[0.00363223]
[0.03032882]
[0.1238111 ]
[0.01567248]
[0.01212719]
[0.16708386]
[0.01242075]
[0.20534608]
[0.04540218]
[0.23566048]
[0.08876477]
[0.14211378]
[0.13409239]
[0.46330968]
[0.00233451]
[0.53326845]
[0.04616746]
[0.08898594]
[0.02742235]
[0.00609537]
[0.20777936]
[0.08233638]
[0.02595322]
[0.10678811]
[0.0370178 ]
[0.11696012]
[0.18243128]
[0.04616746]
[0.00647432]
[0.6141573 ]
[0.06803803]
[0.05607099]
[0.9046775 ]
[0.06005136]
[0.11475087]
[0.01169908]
[0.0147258 ]
[0.01368825]
[0.09752264]
[0.04100535]
[0.0265025 ]
[0.03745157]
[0.11097376]
[0.10873419]
[0.02320905]
[0.6167141 ]
[0.5526473 ]
[0.08975026]
[0.06924812]
[0.08481439]
[0.0838787 ]
[0.47606975]
[0.10064986]
[0.24427153]
[0.01598805]
[0.1486468 ]
[0.11991532]
[0.04063642]
[0.02639965]
[0.04143259]
[0.01411222]
[0.01552854]
[0.10614578]
[0.00433616]
[0.0082146 ]
[0.89032626]
[0.15595527]
[0.05614655]
[0.02269477]
[0.18180625]
[0.04373924]
[0.34885147]
[0.00666774]]