Web: https://www.meetup.com/Tel-Aviv-Deep-Learning-Bootcamp/events/241762893/
Notebooks: On GitHub
Shlomo Kashani
In [1]:
# !pip install pycuda
%reset -f
import numpy
import numpy as np
# imports
import numpy as np # numeric python lib
import matplotlib.image as mpimg # reading images to numpy arrays
import matplotlib.pyplot as plt # to plot any graph
import matplotlib.patches as mpatches # to draw a circle at the mean contour
import scipy.ndimage as ndi # to determine shape centrality
# matplotlib setup
%matplotlib inline
from pylab import rcParams
rcParams['figure.figsize'] = (6, 6) # setting default size of plots
import tensorflow as tf
print("tensorflow:" + tf.__version__)
!set "KERAS_BACKEND=tensorflow"
import torch
import sys
print('__Python VERSION:', sys.version)
print('__pyTorch VERSION:', torch.__version__)
print('__CUDA VERSION')
from subprocess import call
# call(["nvcc", "--version"]) does not work
! nvcc --version
print('__CUDNN VERSION:', torch.backends.cudnn.version())
print('__Number CUDA Devices:', torch.cuda.device_count())
print('__Devices')
call(["nvidia-smi", "--format=csv", "--query-gpu=index,name,driver_version,memory.total,memory.used,memory.free"])
print('Active CUDA Device: GPU', torch.cuda.current_device())
print ('Available devices ', torch.cuda.device_count())
print ('Current cuda device ', torch.cuda.current_device())
Autograd is a Python package for automatic differentiation: github.com/HIPS/autograd
pip install autograd
There are a couple of things to note from this example. autograd.numpy is a thinly- wrapped NumPy. Also, grad() returns a function that computes the gradient of your original function. This new function which returns the gradient accepts the same parameters as the original function
In [3]:
# Thinly wrapped numpy
import autograd.numpy as np
# Basically everything you need
from autograd import grad
# Define a function like normal with Python and Numpy
def tanh(x):
y = np.exp(-x)
return (1.0 - y) / (1.0 + y)
# Create a function to compute the gradient
grad_tanh = grad(tanh)
# Evaluate the gradient at x = 1.0
print(grad_tanh(1.0))
In [4]:
f = lambda x,y: 3*x*y + 2*y - x**3
grad_f = grad(f, argnum=0) #gradient with respect to the first variable
print grad_f(.25,.5)
grad_f = grad(f, argnum=1) #gradient with respect to the second variable
print grad_f(.25,.5)
# Finding the gradient with respect to multiple variables can by done using multigrad()
# by specifying which variables in the argnums parameter.
from autograd import multigrad
grad_fun = multigrad(grad_f, argnums=[0,1])
print grad_fun(.25,.5)
In [ ]: