In [1]:
from pyspark.sql import Row

import tensorflow as tf
import tensorframes as tfs

In [2]:
data = [Row(x=float(x)) for x in range(10)]
df = sqlContext.createDataFrame(data)
with tf.Graph().as_default() as g:
    # The TensorFlow placeholder that corresponds to column 'x'.
    # The shape of the placeholder is automatically inferred from the DataFrame.
    x = tfs.block(df, "x")
    # The output that adds 3 to x
    z = tf.add(x, 3, name='z')
    # The resulting dataframe
    df2 = tfs.map_blocks(z, df)

# The transform is lazy as for most DataFrame operations. This will trigger it:
df2.collect()


Out[2]:
[Row(z=3.0, x=0.0),
 Row(z=4.0, x=1.0),
 Row(z=5.0, x=2.0),
 Row(z=6.0, x=3.0),
 Row(z=7.0, x=4.0),
 Row(z=8.0, x=5.0),
 Row(z=9.0, x=6.0),
 Row(z=10.0, x=7.0),
 Row(z=11.0, x=8.0),
 Row(z=12.0, x=9.0)]

In [ ]: