In [1]:
import logging
import importlib
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
importlib.reload(logging)
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%I:%M:%S')

In [5]:
def f(x): return x
f.__qualname__


Out[5]:
'f'

In [2]:
import grpc

from karps.proto import interface_pb2_grpc
from karps.proto import interface_pb2
from karps.proto.computation_pb2 import SessionId

In [3]:
import karps as ks
import karps.functions as f
from karps.display import show_phase

In [4]:
df = ks.dataframe([(1,)], schema="ee")
print(df)
ct = f.collect(df)
print(ct)

print(df.ee)


/distributed_literal_0@org.spark.DistributedLiteral:{ee:int}
/collect_list_1!org.spark.StructuredReduce:[{ee:int}]
ee:int<-/distributed_literal_0@org.spark.DistributedLiteral:{ee:int}

In [5]:
f.max(df.ee)


Out[5]:
/max_3!org.spark.StructuredReduce:int

In [6]:
ct = f.collect(df)
ct


Out[6]:
/collect_list_4!org.spark.StructuredReduce:[{ee:int}]

In [7]:
s = ks.session("test")


---------------------------------------------------------------------------
_Rendezvous                               Traceback (most recent call last)
<ipython-input-7-8fa71d614496> in <module>()
----> 1 s = ks.session("test")

~/work/karps-python/karps/session.py in session(name, port, address)
     90   # Make sure that the session exists before returning it.
     91   z = stub.CreateSession(interface_pb2.CreateSessionRequest(
---> 92     requested_session=sessionId))
     93   return Session(name, stub)
     94 

~/.local/share/virtualenvs/karps-python-NPcrF1gX/lib/python3.5/site-packages/grpc/_channel.py in __call__(self, request, timeout, metadata, credentials)
    490         state, call, deadline = self._blocking(request, timeout, metadata,
    491                                                credentials)
--> 492         return _end_unary_response_blocking(state, call, False, deadline)
    493 
    494     def with_call(self, request, timeout=None, metadata=None, credentials=None):

~/.local/share/virtualenvs/karps-python-NPcrF1gX/lib/python3.5/site-packages/grpc/_channel.py in _end_unary_response_blocking(state, call, with_call, deadline)
    438             return state.response
    439     else:
--> 440         raise _Rendezvous(state, None, None, deadline)
    441 
    442 

_Rendezvous: <_Rendezvous of RPC that terminated with (StatusCode.UNAVAILABLE, Connect Failed)>

In [ ]:
comp = s.compute(ct)

In [8]:
df.ee + df.ee


---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-8-2294970ce757> in <module>()
      1 
----> 2 df.ee + df.ee

~/work/karps-python/karps/column.py in __add__(self, other)
     61   def __add__(self, other):
     62     from .functions import plus
---> 63     return plus(self, other)
     64 
     65   def __sub__(self, other):

~/work/karps-python/karps/functions_std/utils.py in function(df, name)
     54   def function(df, name=None):
     55     if isinstance(df, (DataFrame, Column)):
---> 56       return function_karps(df, name)
     57     # TODO: check for Spark
     58     # Assume this is a python object, pass it to python:

~/work/karps-python/karps/functions_std/utils.py in function_karps(df, name)
     39   def function_karps(df, name):
     40     df = check_df(df)
---> 41     type_out = typefun(df.type)
     42     # the proto that defines the aggregation.
     43     p = std_pb2.StructuredReduce(agg_op=st_pb2.Aggregation(

TypeError: _check_same_2() missing 1 required positional argument: 'dt2'

In [ ]:
show_phase(comp, "initial")

In [ ]:
show_phase(comp, "final")

In [ ]:
comp.values()

In [ ]:
f.inv(df.ee)

In [ ]:
id(1)

In [ ]: