def run_census(flags_obj): """Construct all necessary functions and call run_loop. Args: flags_obj: Object containing user specified flags. """ if flags_obj.download_if_missing: census_dataset.download(flags_obj.data_dir) train_file = os.path.join(flags_obj.data_dir, census_dataset.TRAINING_FILE) test_file = os.path.join(flags_obj.data_dir, census_dataset.EVAL_FILE) # Train and evaluate the model every `flags.epochs_between_evals` epochs. def train_input_fn(): return census_dataset.input_fn( train_file, flags_obj.epochs_between_evals, True, flags_obj.batch_size) def eval_input_fn(): return census_dataset.input_fn(test_file, 1, False, flags_obj.batch_size) tensors_to_log = { 'average_loss': '{loss_prefix}head/truediv', 'loss': '{loss_prefix}head/weighted_loss/Sum' } wide_deep_run_loop.run_loop( name="Census Income", train_input_fn=train_input_fn, eval_input_fn=eval_input_fn, model_column_fn=census_dataset.build_model_columns, build_estimator_fn=build_estimator, flags_obj=flags_obj, tensors_to_log=tensors_to_log, early_stop=True)
def run_census(flags_obj): """Construct all necessary functions and call run_loop. Args: flags_obj: Object containing user specified flags. """ if flags_obj.download_if_missing: census_dataset.download(flags_obj.data_dir) train_file = os.path.join(flags_obj.data_dir, census_dataset.TRAINING_FILE) test_file = os.path.join(flags_obj.data_dir, census_dataset.EVAL_FILE) # Train and evaluate the model every `flags.epochs_between_evals` epochs. def train_input_fn(): return census_dataset.input_fn( train_file, flags_obj.epochs_between_evals, True, flags_obj.batch_size) def eval_input_fn(): return census_dataset.input_fn(test_file, 1, False, flags_obj.batch_size) tensors_to_log = { 'average_loss': '{loss_prefix}head/truediv', 'loss': '{loss_prefix}head/weighted_loss/Sum' } wide_deep_run_loop.run_loop( name="Census Income", train_input_fn=train_input_fn, eval_input_fn=eval_input_fn, model_column_fn=census_dataset.build_model_columns, build_estimator_fn=build_estimator, flags_obj=flags_obj, tensors_to_log=tensors_to_log, early_stop=True)
def main(unused_argvs): TENSORFLOW_PATH = '/media/haoweiliu/Data/tensorflow/models' models_path = os.path.join(TENSORFLOW_PATH, 'models') sys.path.append(models_path) from official.wide_deep import census_dataset from official.wide_deep import census_main census_dataset.download("./dataset/") return 0
import tensorflow as tf import tensorflow.feature_column as fc import os import sys import matplotlib.pyplot as plt tf.enable_eager_execution() models_path = os.path.join(os.getcwd(), 'models') sys.path.append(models_path) from official.wide_deep import census_dataset from official.wide_deep import census_main census_dataset.download("/tmp/census_data/") # export PYTHONPATH=${PYTHONPATH}:"$(pwd)/models" # running from python you need to set the `os.environ` or the subprocess will not see the directory. if "PYTHONPATH" in os.environ: os.environ['PYTHONPATH'] += os.pathsep + models_path else: os.environ['PYTHONPATH'] = models_path import pandas train_df = pandas.read_csv(train_file, header=None, names=census_dataset._CSV_COLUMNS) test_df = pandas.read_csv(test_file, header=None,
import pandas import functools import numpy as np tf.enable_eager_execution() models_path = os.path.join(os.getcwd(), 'models') sys.path.append(models_path) from official.wide_deep import census_dataset from official.wide_deep import census_main # Download dataset census_dataset.download("./dataset/") train_file = "./dataset/adult.data" test_file = "./dataset/adult.test" # Read the U.S. Census data train_df = pandas.read_csv(train_file, header=None, names=census_dataset._CSV_COLUMNS) test_df = pandas.read_csv(test_file, header=None, names=census_dataset._CSV_COLUMNS) train_df.head()