In [1]:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os
import urllib

from urllib.request import urlopen
import numpy as np
import tensorflow as tf

In [2]:
# Data sets
IRIS_TRAINING = "iris_training.csv"
IRIS_TRAINING_URL = "http://download.tensorflow.org/data/iris_training.csv"

IRIS_TEST = "iris_test.csv"
IRIS_TEST_URL = "http://download.tensorflow.org/data/iris_test.csv"

In [9]:
test_input_fn = ''

# If the training and test sets aren't stored locally, download them. 
if not os.path.exists(IRIS_TRAINING):
    raw = urlopen(IRIS_TRAINING_URL).read()
    with open(IRIS_TRAINING, "wb") as f:
        f.write(raw)

if not os.path.exists(IRIS_TEST):
    raw = urlopen(IRIS_TEST_URL).read()
    with open(IRIS_TEST, "wb") as f:
        f.write(raw)

# Load datasets.
# training_set = tf.contrib.learn.datasets.base.load_csv_with_header(
#    filename=IRIS_TRAINING,
#    target_dtype=np.int,
#    features_dtype=np.float32)
#test_set = tf.contrib.learn.datasets.base.load_csv_with_header(
#    filename=IRIS_TEST,
#    target_dtype=np.int,
#    features_dtype=np.float32)

training_set = tf.contrib.learn.datasets.base.load_csv_with_header(filename=IRIS_TRAINING,
                                                       target_dtype=np.int)
test_set = tf.contrib.learn.datasets.base.load_csv_with_header(filename=IRIS_TEST,
                                                   target_dtype=np.int)

# Specify that all features have real-value data
feature_columns = [tf.feature_column.numeric_column("x", shape=[4])]

# Build 3 layer DNN with 10, 20, 10 units respectively.
classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
                                        hidden_units=[10, 20, 10],
                                        n_classes=3,
                                        model_dir="/tmp/iris_model")
# Define the training inputs
train_input_fn = tf.estimator.inputs.numpy_input_fn(
    x={"x": np.array(training_set.data)},
    y=np.array(training_set.target),
    num_epochs=None,
    shuffle=True)

# Train model.
classifier.train(input_fn=train_input_fn, steps=2000)

# Define the test inputs
test_input_fn = tf.estimator.inputs.numpy_input_fn(
    x={"x": np.array(test_set.data)},
    y=np.array(test_set.target),
    num_epochs=1,
    shuffle=False)

# Evaluate accuracy.
accuracy_score = classifier.evaluate(input_fn=test_input_fn)["accuracy"]

print("\nTest Accuracy: {0:f}\n".format(accuracy_score))

# Classify two new flower samples.
new_samples = np.array(
    [[6.4, 3.2, 4.5, 1.5],
     [5.8, 3.1, 5.0, 1.7]], dtype=np.float32)
predict_input_fn = tf.estimator.inputs.numpy_input_fn(
    x={"x": new_samples},
    num_epochs=1,
    shuffle=False)

predictions = list(classifier.predict(input_fn=predict_input_fn))
predicted_classes = [p["classes"] for p in predictions]

print(
    "New Samples, Class Predictions:    {}\n"
    .format(predicted_classes))


---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-9-78348fe7069a> in <module>()
     23 
     24 training_set = tf.contrib.learn.datasets.base.load_csv_with_header(filename=IRIS_TRAINING,
---> 25                                                        target_dtype=np.int)
     26 test_set = tf.contrib.learn.datasets.base.load_csv_with_header(filename=IRIS_TEST,
     27                                                    target_dtype=np.int)

TypeError: load_csv_with_header() missing 1 required positional argument: 'features_dtype'

In [47]:
test_set = np.random.rand(5,50)
test_target = [0,1,2,1,2]

test_input_fn = tf.estimator.inputs.numpy_input_fn(
    x={"x": np.array(test_set)},
    y=np.array(test_target),
    num_epochs=1,
    shuffle=False)

In [48]:
test_input_fn()


Out[48]:
({'x': <tf.Tensor 'fifo_queue_DequeueUpTo_8:1' shape=(?, 50) dtype=float64>},
 <tf.Tensor 'fifo_queue_DequeueUpTo_8:2' shape=(?,) dtype=int32>)

In [ ]:
# Will have to change this to tf.data instead of tf.contrib.data in the next day or two...

In [ ]:


In [ ]: