In [2]:
require 'nngraph';
In [8]:
a = torch.Tensor{1,2,3}
print(a)
Out[8]:
For whatever reason, the identity module is the first step of every nngraph
In [6]:
module1 = nn.Identity()
In [9]:
module1:forward(a)
Out[9]:
Notice the extra parentheses. "The extra () contain properties of this module when embedded into a graph"
In [10]:
x1 = nn.Identity()()
m = nn.gModule({x1},{x1})
In [11]:
m:forward(a)
Out[11]:
In [14]:
add = nn.CAddTable()
t1 = torch.Tensor{3,4,10}
x=add:forward({a,t1})
In [16]:
x
Out[16]:
Creating z = x1 + x1 * linear(x3)
In [10]:
-- Declare some tensors
t1 = torch.Tensor{1,2,3}
t2 = torch.Tensor{3,4,5}
x1 = nn.Identity()()
x2 = nn.Identity()()
a = nn.CAddTable()({x1,x2})
m = nn.gModule({x1,x2},{a})
print(m:forward({t1,t2}))
Out[10]:
In [9]:
x1 = nn.Identity()()
x2 = nn.Identity()()
add = nn.CAddTable()({x1,x2})
mul = nn.CMulTable()({add,x1})
m = nn.gModule({x1,x2},{mul})
print(m:forward({t1,t2}))
In [36]:
a = torch.Tensor{1,2,3,1}
b = torch.Tensor{3,4,5,0}
print(m:forward({a,b}))
Out[36]:
In [4]:
x = torch.ones(4)
y = torch.ones(5)
z = torch.ones(2)
In [7]:
--inputs
ix = nn.Identity()()
iy = nn.Identity()()
iz = nn.Identity()()
--Wx + b
h1 = nn.Linear(4,2)({ix})
h2 = nn.Linear(5,2)({iy})
-- tanh, sigmoid
tanh = nn.Tanh()({h1})
sigmoid = nn.Sigmoid()({h2})
-- square
tsq = nn.Square()({tanh})
ssq = nn.Square()({sigmoid})
-- cmul
cmul = nn.CMulTable()({tsq,ssq})
a = nn.CAddTable()({cmul,iz})
-- final graph
output = nn.gModule({ix,iy,iz},{a})
In [8]:
h1.data.module.weight = torch.ones(2,4)
h1.data.module.bias = torch.ones(2)
h2.data.module.weight = torch.ones(2,5)
h2.data.module.bias = torch.ones(2)
In [12]:
print(output:forward({x,y,z}))
Out[12]:
In [ ]:
graph.dot(output.fg, 'output','outputBaseName')
In [111]:
gradOutput = torch.ones(2)
In [112]:
foo = {[0]=1,[1]=5}
In [113]:
foo
Out[113]:
Out[113]:
In [122]:
foo = torch.ones(4,4)
In [123]:
foo
Out[123]:
In [125]:
print(nn.Reshape(2,2):forward(foo))
Out[125]:
In [ ]:
vm = torch.load('vocab_map.tab')
In [ ]: