In [1]:
# Imports and notebook statements
%load_ext autoreload
%autoreload 2
%matplotlib notebook
%load_ext line_profiler

import torch
from torch.nn.functional import conv2d, relu_
if torch.cuda.is_available():
    device = torch.device('cuda')
else:
    device = torch.device('cpu')

import numpy as np
from scipy import stats
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import seaborn as sns
from sklearn.linear_model import LinearRegression

from utils import *

import warnings
warnings.filterwarnings('ignore')

In [26]:
# STDP kernel time constant in seconds.  Used for the default kernel.
STDP_TIME_CONSTANT = 0.012

def w_0(x):
    """
    @param x (numpy array)
    A distance
    """
    a = 1.00
    lambda_net = 13.0
    beta = 3.0 / lambda_net ** 2
    gamma = 1.05 * beta

    return a * np.exp(-gamma * x) - np.exp(-beta * x)

def w_1(x):
    """
    @param x (numpy array)
    A distance
    """
    lambda_net = 13.0
    beta = 3.15 / lambda_net ** 2

    return - np.exp(-beta * x)   

# Random walk builder
def buildTrajectory(length, stepSize, width=1., directionStability=0.95, wrap=False):
    trajectory = np.zeros((int(length), 2))
    x = np.random.rand()
    y = np.random.rand()
    direction = np.random.rand() * 2 * np.pi
    twopi = 2*np.pi
    for i in range(int(length)):
        while True:
            # This is a random value between (-180, +180) scaled by directionStability
            dirChange = (((np.random.rand() * twopi) - np.pi) *
                       (1.0 - directionStability))
            direction = (direction + dirChange) % twopi
            rotation = np.asarray([np.cos(direction), np.sin(direction)])
            movement = stepSize*rotation
            if 0 < (movement[0] + x) < 1 and 0 < (movement[1] + y) < 1 or wrap:
                x += movement[0]
                y += movement[1]
                trajectory[i] = (x, y)
                break

    return(trajectory)

In [212]:
class GCN2D(object):
    
    def __init__(self,
                 numX,
                 numY,
                 inhibitionWindow,
                 inhibitionRadius,                 
                 inhibitionStrength,
                 boostEffect=10,
                 boostDecay=3.,
                 dt=0.001,
                 numPlaces=200,
                 globalTonic=20,
                 decayConstant=0.03,
                 envelopeWidth=0.25,
                 envelopeFactor=10,
                 stdpWindow=10,
                 sigmaLoc=0.05,
                 learningRate=0.015,
                 negativeLearnFactor=.9,
                 initialWeightFactor=.2,
                 weightDecay=60,
                 boostGradientX=1,
                 wideningFactor=0,
                ):
        self.activity = torch.zeros([1., 1., numX, numY], device=device, dtype=torch.float)
        self.filter = torch.zeros([1, 1, 1+2*inhibitionWindow, 1+2*inhibitionWindow], dtype=torch.float,
                                 device=device)
        self.numX = numX
        self.numY = numY
        self.numPlaces=numPlaces
        
        for i in range(1+2*inhibitionWindow):
            for j in range(1+2*inhibitionWindow):
                xComp = np.abs(i - (inhibitionWindow))
                yComp = np.abs(j - (inhibitionWindow))
                dist = np.asarray((xComp, yComp))
                dist = dist[0] ** 2 + dist[1] ** 2
                
                dist = max(dist - wideningFactor, 0)
                if dist <= 0:
                    weight = 0.
                else:
                    weight = w_1(dist/inhibitionRadius)*inhibitionStrength
                
                self.filter[0, 0, i, j] = weight
                
        self.activationHistory = torch.zeros([1, 1, numX, numY], device=device, dtype=torch.float)
        self.instantaneous = torch.zeros([1, 1, numX, numY], device=device, dtype=torch.float)
        self.boostEffect = torch.tensor(np.repeat(
            np.linspace(1, boostGradientX, self.numX)[:, np.newaxis], self.numY, axis=-1)*boostEffect,
                                       device=device, dtype=torch.float)
        self.boostDecay = boostDecay
        self.dt = dt
        self.globalTonic = torch.tensor([globalTonic], device=device, dtype=torch.float)
        self.decay = decayConstant
        self.inhibitionWindow = inhibitionWindow
        self.envelopeWidth = envelopeWidth
        self.envelopeFactor = envelopeFactor
        self.sigmaLoc = 0.01
        self.learningRate = learningRate
        self.negativeLearnFactor = negativeLearnFactor
        self.weightDecay = weightDecay
        
        self.zero = torch.zeros([1], device=device, dtype=torch.float)
        
        self.places = torch.tensor(np.random.rand(numPlaces, 2), device=device, dtype=torch.float)
        
        self.placeWeights = torch.tensor(np.random.rand(numX, numY, numPlaces)*initialWeightFactor,
                                         device=device, dtype=torch.float)
        
        self.placeActivity = torch.zeros([numPlaces,], device=device, dtype=torch.float)
        
        self.envelope = torch.tensor(self.computeEnvelope(), device=device, dtype=torch.float)
        
        self.stdpWindow = stdpWindow

    
    def computeEnvelope(self):
        """
        Compute an envelope for use in suppressing border cells.
        :return: A numpy array that can be elementwise-multiplied with activations
                 for the given cell population to apply the envelope.
        """
#       envelope = np.zeros((self.numX, self.numY))
#         for i, ip in enumerate(np.linspace(-1, 1, self.numX)):
#             for j, jp in enumerate( np.linspace(-1, 1, self.numY)):
#                 dist = np.sqrt(ip**2 + jp**2)
#                 if dist < 1 - self.envelopeWidth:
#                     envelope[i, j] = 1.
#                 else:
#                     envelope[i, j] = np.exp(-1.*self.envelopeFactor *
#                           ((dist - 1 + self.envelopeWidth)/self.envelopeWidth)**2)
                
        envelopeX = [1 if self.numX/2. - np.abs(p) > self.envelopeWidth else
                          np.exp(-1.*self.envelopeFactor *
                          ((-self.numX/2. + np.abs(p) + self.envelopeWidth)/self.envelopeWidth)**2)
                          for p in np.arange(self.numX) - self.numX/2.]

        envelopeY = [1 if self.numY/2. - np.abs(p) > self.envelopeWidth else
                          np.exp(-1.*self.envelopeFactor *
                          ((-self.numY/2. + np.abs(p) + self.envelopeWidth)/self.envelopeWidth)**2)
                          for p in np.arange(self.numY) - self.numY/2.]

        return np.outer(envelopeX, envelopeY)
    
    def randomLesions(self, numLesions, lesionRadius, lesionInnerCutoff):
        lesions = []
        for i in range(numLesions):
            x = int(np.random.rand()*self.numX)
            y = int(np.random.rand()*self.numY)
            lesions.append((x, y))
            
        radii = [lesionRadius] * numLesions
        cutoffs = [lesionInnerCutoff] * numLesions
        
        self.addLesions(lesions, radii, cutoffs)
    
    def addLesions(self, lesionCenters, lesionRadii, lesionInnerCutoffs):
        for center, radius, cutoff in zip(lesionCenters, lesionRadii, lesionInnerCutoffs):
            for x in range(self.numX):
                for y in range(self.numY):
                    distance = np.sqrt((x - center[0])**2 + (y - center[1])**2)
                    if distance < cutoff:
                        self.envelope[x, y] = 0.
                    elif distance < radius:
                        value = (distance - cutoff)/(radius - cutoff)
                        self.envelope[x, y] = min(value, self.envelope[x, y])


        
    def step(self, speed=1, place=True): 
        if place:
            self.instantaneous = torch.matmul(self.placeWeights, self.placeActivity).view(1, 1, self.numX, self.numY)
        else:
            self.instantaneous.fill_(0.)
        self.instantaneous += conv2d(self.activity, self.filter, padding=self.inhibitionWindow)
        self.instantaneous *= self.envelope
        self.instantaneous += self.activationHistory * self.boostEffect
        self.instantaneous *= min(speed, 1)
        relu_(self.instantaneous)
        self.activity += (self.instantaneous - self.activity/self.decay)*self.dt
        #torch.min(self.activity, self.zero + 1., out=self.activity)
        self.activationHistory += (self.globalTonic - self.activity)*self.dt*self.envelope #torch.sum(self.activity)/torch.sum(self.envelope) - 
        self.activationHistory -= self.dt*self.activationHistory/self.boostDecay
        
        
    def simulate(self, time, logFreq = 10, startFrom = 0):
        self.activity = torch.tensor(np.random.rand(1, 1, self.numX, self.numY)*0.1, device=device,
                                                 dtype=torch.float)
        
        self.activationHistory.fill_(self.globalTonic[0])
        
        numSteps = int(time/self.dt)
        numLogs = int(((time - startFrom)/self.dt)/logFreq)
        output = torch.zeros([numLogs, self.numX, self.numY], device=device, dtype=torch.float)
        
        s = 0
        for t in range(numSteps):
            self.step(place=False)
            if t % logFreq == 0 and t*self.dt >= startFrom:
                print("At {}".format(t*self.dt))
                output[s].copy_(self.activity.view(self.numX, self.numY))
                s += 1
                
        return output.cpu().numpy()
    
    def decayWeights(self):
        """
        Only decay place weights
        """
        self.placeWeights -= self.dt*self.placeWeights/self.weightDecay
    
    def learn(self, time, plotting=True, plotInterval=100, runLength=10, oneD=False):
        if plotting:
                fig, (ax1, ax2, ax3) = plt.subplots(3,1)
                ax1.scatter(self.places[:, 0].cpu().numpy(),
                    self.places[:, 1].cpu().numpy(),
                    c = self.placeActivity.cpu().numpy(),
                    cmap = plt.get_cmap("coolwarm"))
                ax2.matshow(self.activity.view((self.numX, self.numY)).cpu().numpy())
                im = ax3.scatter(self.places[:, 0].cpu().numpy(),
                            self.places[:, 1].cpu().numpy(),
                            c = self.placeWeights[self.numX//2, self.numY//2, :].cpu().numpy(),
                            cmap = plt.get_cmap("coolwarm"))
                fig.colorbar(im, ax=ax3)
                plt.show()
                
        self.activityBuffer = torch.zeros([self.stdpWindow, self.numX*self.numY],
                                          device=device, dtype=torch.float)
        self.placeBuffer = torch.zeros([self.stdpWindow, self.numPlaces],
                                       device=device, dtype=torch.float)
        
        self.stdpValues = torch.tensor(np.exp(-self.dt*np.arange(0, self.stdpWindow)/STDP_TIME_CONSTANT),
                                       device=device, dtype=torch.float)*self.learningRate*self.dt
        self.bufferIndex = 0
        
        times = np.arange(0, time, self.dt)
        
        self.activity = torch.tensor(np.random.rand(1, 1, self.numX, self.numY)*0.1, device=device,
                                                 dtype=torch.float)
        self.activationHistory.fill_(self.globalTonic[0])
        self.activationHistory *= self.envelope
        #trajectory = np.zeros((len(times), 2))
        
#         trajectories = []
#         times = []
#         oldPosition=np.asarray([0.5, 0.5])
#         for t in np.arange(0, time, runLength):
#             currentTimes = np.arange(0, runLength, self.dt)
#             newDest = np.random.sample((2,))
#             movement = np.sin(currentTimes*np.pi/(runLength*2))
#             trajectory = np.outer(movement, newDest) + np.outer(1 - movement, oldPosition)
            
#             trajectories.append(trajectory)
#             times.append(currentTimes + t)
#             oldPosition = newDest
            
#         trajectory = np.concatenate(trajectories, axis=0)
#         times = np.concatenate(times)
            
        #trajectory[:, 0] = (np.sin((times * np.pi / (10*1.34754)) + offsets[0]) + 1)/2
        #trajectory[:, 1] = (np.sin((times * np.pi / (10*1.6383478)) + offsets[1]) + 1)/2
        
        trajectory = buildTrajectory(len(times), 1*self.dt, wrap=True, directionStability=0.95)
        
        if oneD:
              trajectory[:, 1] = 0.
        velocity = np.diff(trajectory, axis=0)/self.dt
        
        trajectory = np.mod(trajectory, 1)
        trajectory = torch.tensor(trajectory, device=device, dtype=torch.float)
        velocity = torch.tensor(velocity, device=device, dtype=torch.float)
        speed = torch.norm(velocity, 2, dim=-1)
        distances = torch.zeros((self.numPlaces, 2), device=device, dtype=torch.float)
        
        for i, t in enumerate(times[:-1]):
            pos = trajectory[i, :]
            s = min(speed[i]/torch.mean(speed), 1.)
            distances[:,0] = torch.min(torch.abs(self.places[:,0] - pos[0]), 1 - torch.abs(self.places[:,0] - pos[0]))
            if not oneD:
                distances[:,1] = torch.min(torch.abs(self.places[:,1] - pos[1]), 1 - torch.abs(self.places[:,1] - pos[1]))
            else:
                distances[:,1] = 0.
            torch.exp(-1.*torch.norm(distances, 2, dim=-1)/(2*(self.sigmaLoc)), out=self.placeActivity)
            self.placeActivity *= s
            self.step(speed=s)
            self.stdpUpdate(i)
            torch.max(self.placeWeights, self.zero, out=self.placeWeights)
            torch.min(self.placeWeights, self.zero + 2., out=self.placeWeights)
           #self.placeWeights *= .6667/torch.mean(self.placeWeights, -1, keepdim=True)
            #self.decayWeights()
            if i % plotInterval == 0:
                if plotting:
                    ax1.scatter(self.places[:, 0].cpu().numpy(),
                        self.places[:, 1].cpu().numpy(),
                        c = self.placeActivity.cpu().numpy(),
                        cmap = plt.get_cmap("coolwarm"))
                    ax2.matshow(self.activity.view((self.numX, self.numY)).cpu().numpy())
                    im = ax3.scatter(self.places[:, 0].cpu().numpy(),
                                self.places[:, 1].cpu().numpy(),
                                c = self.placeWeights[self.numX//2, self.numY//2, :].cpu().numpy(),
                                cmap = plt.get_cmap("coolwarm"))
                    ax1.set_title(str(t))
                    fig.canvas.draw()
                    
                
    def stdpUpdate(self, time, clearBuffer=False):
        if time < self.activityBuffer.shape[0]:
            self.activityBuffer[self.bufferIndex].copy_(self.activity.view(self.numX*self.numY,))
            self.placeBuffer[self.bufferIndex].copy_(self.placeActivity)
            self.bufferIndex += 1
            
        else:
            for t in range(self.stdpWindow):
                i = (self.bufferIndex - t) % self.stdpWindow
                self.placeWeights += torch.ger(self.activity.view(self.numX*self.numY), self.placeBuffer[i]* \
                                        self.stdpValues[t]).view(self.numX, self.numY, self.numPlaces)
                self.placeWeights -= (torch.ger(self.activityBuffer[i], self.placeActivity) *\
                                        self.stdpValues[t]).view(self.numX, self.numY, self.numPlaces) *\
                                      self.negativeLearnFactor
                
            self.bufferIndex = (self.bufferIndex + 1) % self.stdpWindow
            self.activityBuffer[self.bufferIndex].copy_(self.activity.view(self.numX*self.numY,))
            self.placeBuffer[self.bufferIndex].copy_(self.placeActivity)
            
            
        if clearBuffer:
            pass

In [218]:
plt.rcParams['figure.figsize'] = [5, 5]

GCN = GCN2D(32,
            32,
            9,
            .3,
            25.,
            globalTonic=.25,
            stdpWindow=1,
            dt=0.01,
            boostEffect=50,
            boostDecay=100.,
            numPlaces=1000,
            learningRate=1.,
            initialWeightFactor=.1,
            boostGradientX=1,
            weightDecay=500,
            wideningFactor=2,
            negativeLearnFactor=1.,
            envelopeWidth=12,
            envelopeFactor=1.2,
            sigmaLoc=.0005)

#GCN.randomLesions(10, 10, 5)

plt.matshow(GCN.filter[0,0])
plt.show()

plt.matshow(GCN.envelope)
plt.show()


# plt.matshow(GCN.boostEffect)
# plt.show()

# plt.figure()
# plt.scatter(GCN.places[:, 0].cpu().numpy(),
#              GCN.places[:, 1].cpu().numpy())
# plt.show()



In [214]:
plt.rcParams['figure.figsize'] = [5, 5]
results = GCN.simulate(100, logFreq=10, startFrom = 0)
plt.matshow(results[-2])
plt.show()


At 0.0
At 0.1
At 0.2
At 0.3
At 0.4
At 0.5
At 0.6
At 0.7000000000000001
At 0.8
At 0.9
At 1.0
At 1.1
At 1.2
At 1.3
At 1.4000000000000001
At 1.5
At 1.6
At 1.7
At 1.8
At 1.9000000000000001
At 2.0
At 2.1
At 2.2
At 2.3000000000000003
At 2.4
At 2.5
At 2.6
At 2.7
At 2.8000000000000003
At 2.9
At 3.0
At 3.1
At 3.2
At 3.3000000000000003
At 3.4
At 3.5
At 3.6
At 3.7
At 3.8000000000000003
At 3.9
At 4.0
At 4.1
At 4.2
At 4.3
At 4.4
At 4.5
At 4.6000000000000005
At 4.7
At 4.8
At 4.9
At 5.0
At 5.1000000000000005
At 5.2
At 5.3
At 5.4
At 5.5
At 5.6000000000000005
At 5.7
At 5.8
At 5.9
At 6.0
At 6.1000000000000005
At 6.2
At 6.3
At 6.4
At 6.5
At 6.6000000000000005
At 6.7
At 6.8
At 6.9
At 7.0
At 7.1000000000000005
At 7.2
At 7.3
At 7.4
At 7.5
At 7.6000000000000005
At 7.7
At 7.8
At 7.9
At 8.0
At 8.1
At 8.2
At 8.3
At 8.4
At 8.5
At 8.6
At 8.700000000000001
At 8.8
At 8.9
At 9.0
At 9.1
At 9.200000000000001
At 9.3
At 9.4
At 9.5
At 9.6
At 9.700000000000001
At 9.8
At 9.9
At 10.0
At 10.1
At 10.200000000000001
At 10.3
At 10.4
At 10.5
At 10.6
At 10.700000000000001
At 10.8
At 10.9
At 11.0
At 11.1
At 11.200000000000001
At 11.3
At 11.4
At 11.5
At 11.6
At 11.700000000000001
At 11.8
At 11.9
At 12.0
At 12.1
At 12.200000000000001
At 12.3
At 12.4
At 12.5
At 12.6
At 12.700000000000001
At 12.8
At 12.9
At 13.0
At 13.1
At 13.200000000000001
At 13.3
At 13.4
At 13.5
At 13.6
At 13.700000000000001
At 13.8
At 13.9
At 14.0
At 14.1
At 14.200000000000001
At 14.3
At 14.4
At 14.5
At 14.6
At 14.700000000000001
At 14.8
At 14.9
At 15.0
At 15.1
At 15.200000000000001
At 15.3
At 15.4
At 15.5
At 15.6
At 15.700000000000001
At 15.8
At 15.9
At 16.0
At 16.1
At 16.2
At 16.3
At 16.4
At 16.5
At 16.6
At 16.7
At 16.8
At 16.9
At 17.0
At 17.1
At 17.2
At 17.3
At 17.400000000000002
At 17.5
At 17.6
At 17.7
At 17.8
At 17.900000000000002
At 18.0
At 18.1
At 18.2
At 18.3
At 18.400000000000002
At 18.5
At 18.6
At 18.7
At 18.8
At 18.900000000000002
At 19.0
At 19.1
At 19.2
At 19.3
At 19.400000000000002
At 19.5
At 19.6
At 19.7
At 19.8
At 19.900000000000002
At 20.0
At 20.1
At 20.2
At 20.3
At 20.400000000000002
At 20.5
At 20.6
At 20.7
At 20.8
At 20.900000000000002
At 21.0
At 21.1
At 21.2
At 21.3
At 21.400000000000002
At 21.5
At 21.6
At 21.7
At 21.8
At 21.900000000000002
At 22.0
At 22.1
At 22.2
At 22.3
At 22.400000000000002
At 22.5
At 22.6
At 22.7
At 22.8
At 22.900000000000002
At 23.0
At 23.1
At 23.2
At 23.3
At 23.400000000000002
At 23.5
At 23.6
At 23.7
At 23.8
At 23.900000000000002
At 24.0
At 24.1
At 24.2
At 24.3
At 24.400000000000002
At 24.5
At 24.6
At 24.7
At 24.8
At 24.900000000000002
At 25.0
At 25.1
At 25.2
At 25.3
At 25.400000000000002
At 25.5
At 25.6
At 25.7
At 25.8
At 25.900000000000002
At 26.0
At 26.1
At 26.2
At 26.3
At 26.400000000000002
At 26.5
At 26.6
At 26.7
At 26.8
At 26.900000000000002
At 27.0
At 27.1
At 27.2
At 27.3
At 27.400000000000002
At 27.5
At 27.6
At 27.7
At 27.8
At 27.900000000000002
At 28.0
At 28.1
At 28.2
At 28.3
At 28.400000000000002
At 28.5
At 28.6
At 28.7
At 28.8
At 28.900000000000002
At 29.0
At 29.1
At 29.2
At 29.3
At 29.400000000000002
At 29.5
At 29.6
At 29.7
At 29.8
At 29.900000000000002
At 30.0
At 30.1
At 30.2
At 30.3
At 30.400000000000002
At 30.5
At 30.6
At 30.7
At 30.8
At 30.900000000000002
At 31.0
At 31.1
At 31.2
At 31.3
At 31.400000000000002
At 31.5
At 31.6
At 31.7
At 31.8
At 31.900000000000002
At 32.0
At 32.1
At 32.2
At 32.3
At 32.4
At 32.5
At 32.6
At 32.7
At 32.8
At 32.9
At 33.0
At 33.1
At 33.2
At 33.3
At 33.4
At 33.5
At 33.6
At 33.7
At 33.8
At 33.9
At 34.0
At 34.1
At 34.2
At 34.300000000000004
At 34.4
At 34.5
At 34.6
At 34.7
At 34.800000000000004
At 34.9
At 35.0
At 35.1
At 35.2
At 35.300000000000004
At 35.4
At 35.5
At 35.6
At 35.7
At 35.800000000000004
At 35.9
At 36.0
At 36.1
At 36.2
At 36.300000000000004
At 36.4
At 36.5
At 36.6
At 36.7
At 36.800000000000004
At 36.9
At 37.0
At 37.1
At 37.2
At 37.300000000000004
At 37.4
At 37.5
At 37.6
At 37.7
At 37.800000000000004
At 37.9
At 38.0
At 38.1
At 38.2
At 38.300000000000004
At 38.4
At 38.5
At 38.6
At 38.7
At 38.800000000000004
At 38.9
At 39.0
At 39.1
At 39.2
At 39.300000000000004
At 39.4
At 39.5
At 39.6
At 39.7
At 39.800000000000004
At 39.9
At 40.0
At 40.1
At 40.2
At 40.300000000000004
At 40.4
At 40.5
At 40.6
At 40.7
At 40.800000000000004
At 40.9
At 41.0
At 41.1
At 41.2
At 41.300000000000004
At 41.4
At 41.5
At 41.6
At 41.7
At 41.800000000000004
At 41.9
At 42.0
At 42.1
At 42.2
At 42.300000000000004
At 42.4
At 42.5
At 42.6
At 42.7
At 42.800000000000004
At 42.9
At 43.0
At 43.1
At 43.2
At 43.300000000000004
At 43.4
At 43.5
At 43.6
At 43.7
At 43.800000000000004
At 43.9
At 44.0
At 44.1
At 44.2
At 44.300000000000004
At 44.4
At 44.5
At 44.6
At 44.7
At 44.800000000000004
At 44.9
At 45.0
At 45.1
At 45.2
At 45.300000000000004
At 45.4
At 45.5
At 45.6
At 45.7
At 45.800000000000004
At 45.9
At 46.0
At 46.1
At 46.2
At 46.300000000000004
At 46.4
At 46.5
At 46.6
At 46.7
At 46.800000000000004
At 46.9
At 47.0
At 47.1
At 47.2
At 47.300000000000004
At 47.4
At 47.5
At 47.6
At 47.7
At 47.800000000000004
At 47.9
At 48.0
At 48.1
At 48.2
At 48.300000000000004
At 48.4
At 48.5
At 48.6
At 48.7
At 48.800000000000004
At 48.9
At 49.0
At 49.1
At 49.2
At 49.300000000000004
At 49.4
At 49.5
At 49.6
At 49.7
At 49.800000000000004
At 49.9
At 50.0
At 50.1
At 50.2
At 50.300000000000004
At 50.4
At 50.5
At 50.6
At 50.7
At 50.800000000000004
At 50.9
At 51.0
At 51.1
At 51.2
At 51.300000000000004
At 51.4
At 51.5
At 51.6
At 51.7
At 51.800000000000004
At 51.9
At 52.0
At 52.1
At 52.2
At 52.300000000000004
At 52.4
At 52.5
At 52.6
At 52.7
At 52.800000000000004
At 52.9
At 53.0
At 53.1
At 53.2
At 53.300000000000004
At 53.4
At 53.5
At 53.6
At 53.7
At 53.800000000000004
At 53.9
At 54.0
At 54.1
At 54.2
At 54.300000000000004
At 54.4
At 54.5
At 54.6
At 54.7
At 54.800000000000004
At 54.9
At 55.0
At 55.1
At 55.2
At 55.300000000000004
At 55.4
At 55.5
At 55.6
At 55.7
At 55.800000000000004
At 55.9
At 56.0
At 56.1
At 56.2
At 56.300000000000004
At 56.4
At 56.5
At 56.6
At 56.7
At 56.800000000000004
At 56.9
At 57.0
At 57.1
At 57.2
At 57.300000000000004
At 57.4
At 57.5
At 57.6
At 57.7
At 57.800000000000004
At 57.9
At 58.0
At 58.1
At 58.2
At 58.300000000000004
At 58.4
At 58.5
At 58.6
At 58.7
At 58.800000000000004
At 58.9
At 59.0
At 59.1
At 59.2
At 59.300000000000004
At 59.4
At 59.5
At 59.6
At 59.7
At 59.800000000000004
At 59.9
At 60.0
At 60.1
At 60.2
At 60.300000000000004
At 60.4
At 60.5
At 60.6
At 60.7
At 60.800000000000004
At 60.9
At 61.0
At 61.1
At 61.2
At 61.300000000000004
At 61.4
At 61.5
At 61.6
At 61.7
At 61.800000000000004
At 61.9
At 62.0
At 62.1
At 62.2
At 62.300000000000004
At 62.4
At 62.5
At 62.6
At 62.7
At 62.800000000000004
At 62.9
At 63.0
At 63.1
At 63.2
At 63.300000000000004
At 63.4
At 63.5
At 63.6
At 63.7
At 63.800000000000004
At 63.9
At 64.0
At 64.1
At 64.2
At 64.3
At 64.4
At 64.5
At 64.6
At 64.7
At 64.8
At 64.9
At 65.0
At 65.1
At 65.2
At 65.3
At 65.4
At 65.5
At 65.6
At 65.7
At 65.8
At 65.9
At 66.0
At 66.1
At 66.2
At 66.3
At 66.4
At 66.5
At 66.6
At 66.7
At 66.8
At 66.9
At 67.0
At 67.1
At 67.2
At 67.3
At 67.4
At 67.5
At 67.6
At 67.7
At 67.8
At 67.9
At 68.0
At 68.1
At 68.2
At 68.3
At 68.4
At 68.5
At 68.60000000000001
At 68.7
At 68.8
At 68.9
At 69.0
At 69.10000000000001
At 69.2
At 69.3
At 69.4
At 69.5
At 69.60000000000001
At 69.7
At 69.8
At 69.9
At 70.0
At 70.10000000000001
At 70.2
At 70.3
At 70.4
At 70.5
At 70.60000000000001
At 70.7
At 70.8
At 70.9
At 71.0
At 71.10000000000001
At 71.2
At 71.3
At 71.4
At 71.5
At 71.60000000000001
At 71.7
At 71.8
At 71.9
At 72.0
At 72.10000000000001
At 72.2
At 72.3
At 72.4
At 72.5
At 72.60000000000001
At 72.7
At 72.8
At 72.9
At 73.0
At 73.10000000000001
At 73.2
At 73.3
At 73.4
At 73.5
At 73.60000000000001
At 73.7
At 73.8
At 73.9
At 74.0
At 74.10000000000001
At 74.2
At 74.3
At 74.4
At 74.5
At 74.60000000000001
At 74.7
At 74.8
At 74.9
At 75.0
At 75.10000000000001
At 75.2
At 75.3
At 75.4
At 75.5
At 75.60000000000001
At 75.7
At 75.8
At 75.9
At 76.0
At 76.10000000000001
At 76.2
At 76.3
At 76.4
At 76.5
At 76.60000000000001
At 76.7
At 76.8
At 76.9
At 77.0
At 77.10000000000001
At 77.2
At 77.3
At 77.4
At 77.5
At 77.60000000000001
At 77.7
At 77.8
At 77.9
At 78.0
At 78.10000000000001
At 78.2
At 78.3
At 78.4
At 78.5
At 78.60000000000001
At 78.7
At 78.8
At 78.9
At 79.0
At 79.10000000000001
At 79.2
At 79.3
At 79.4
At 79.5
At 79.60000000000001
At 79.7
At 79.8
At 79.9
At 80.0
At 80.10000000000001
At 80.2
At 80.3
At 80.4
At 80.5
At 80.60000000000001
At 80.7
At 80.8
At 80.9
At 81.0
At 81.10000000000001
At 81.2
At 81.3
At 81.4
At 81.5
At 81.60000000000001
At 81.7
At 81.8
At 81.9
At 82.0
At 82.10000000000001
At 82.2
At 82.3
At 82.4
At 82.5
At 82.60000000000001
At 82.7
At 82.8
At 82.9
At 83.0
At 83.10000000000001
At 83.2
At 83.3
At 83.4
At 83.5
At 83.60000000000001
At 83.7
At 83.8
At 83.9
At 84.0
At 84.10000000000001
At 84.2
At 84.3
At 84.4
At 84.5
At 84.60000000000001
At 84.7
At 84.8
At 84.9
At 85.0
At 85.10000000000001
At 85.2
At 85.3
At 85.4
At 85.5
At 85.60000000000001
At 85.7
At 85.8
At 85.9
At 86.0
At 86.10000000000001
At 86.2
At 86.3
At 86.4
At 86.5
At 86.60000000000001
At 86.7
At 86.8
At 86.9
At 87.0
At 87.10000000000001
At 87.2
At 87.3
At 87.4
At 87.5
At 87.60000000000001
At 87.7
At 87.8
At 87.9
At 88.0
At 88.10000000000001
At 88.2
At 88.3
At 88.4
At 88.5
At 88.60000000000001
At 88.7
At 88.8
At 88.9
At 89.0
At 89.10000000000001
At 89.2
At 89.3
At 89.4
At 89.5
At 89.60000000000001
At 89.7
At 89.8
At 89.9
At 90.0
At 90.10000000000001
At 90.2
At 90.3
At 90.4
At 90.5
At 90.60000000000001
At 90.7
At 90.8
At 90.9
At 91.0
At 91.10000000000001
At 91.2
At 91.3
At 91.4
At 91.5
At 91.60000000000001
At 91.7
At 91.8
At 91.9
At 92.0
At 92.10000000000001
At 92.2
At 92.3
At 92.4
At 92.5
At 92.60000000000001
At 92.7
At 92.8
At 92.9
At 93.0
At 93.10000000000001
At 93.2
At 93.3
At 93.4
At 93.5
At 93.60000000000001
At 93.7
At 93.8
At 93.9
At 94.0
At 94.10000000000001
At 94.2
At 94.3
At 94.4
At 94.5
At 94.60000000000001
At 94.7
At 94.8
At 94.9
At 95.0
At 95.10000000000001
At 95.2
At 95.3
At 95.4
At 95.5
At 95.60000000000001
At 95.7
At 95.8
At 95.9
At 96.0
At 96.10000000000001
At 96.2
At 96.3
At 96.4
At 96.5
At 96.60000000000001
At 96.7
At 96.8
At 96.9
At 97.0
At 97.10000000000001
At 97.2
At 97.3
At 97.4
At 97.5
At 97.60000000000001
At 97.7
At 97.8
At 97.9
At 98.0
At 98.10000000000001
At 98.2
At 98.3
At 98.4
At 98.5
At 98.60000000000001
At 98.7
At 98.8
At 98.9
At 99.0
At 99.10000000000001
At 99.2
At 99.3
At 99.4
At 99.5
At 99.60000000000001
At 99.7
At 99.8
At 99.9

In [219]:
plt.rcParams['figure.figsize'] = [5, 15]
for i in range(1000):
    print(i, GCN.learningRate)
    GCN.learningRate /= 1.01
    GCN.learn(25, plotting=False, plotInterval=1000, oneD=True)
    weights = GCN.placeWeights.cpu().numpy()
    if i % 50 == 0:
        with open("PlaceWeights{}.npz".format(i), "wb") as f:
            np.savez(f, weights)


0 1.0
1 0.9900990099009901
2 0.9802960494069208
3 0.9705901479276444
4 0.9609803444828162
5 0.9514656876067488
6 0.9420452352542067
7 0.9327180547071353
8 0.9234832224823122
9 0.914339824239913
10 0.9052869546929831
11 0.896323717517805
12 0.8874492252651535
13 0.8786625992724292
14 0.8699629695766625
15 0.8613494748283788
16 0.8528212622063156
17 0.8443774873329858
18 0.8360173141910751
19 0.8277399150406684
20 0.8195444703372954
21 0.8114301686507875
22 0.8033962065849382
23 0.7954417886979586
24 0.7875661274237213
25 0.7797684429937835
26 0.7720479633601817
27 0.7644039241189917
28 0.7568355684346453
29 0.7493421469649953
30 0.741922917787124
31 0.7345771463238852
32 0.7273041052711734
33 0.7201030745259143
34 0.7129733411147666
35 0.7059141991235313
36 0.6989249496272587
37 0.6920049006210482
38 0.685153366951533
39 0.6783696702490425
40 0.6716531388604381
41 0.665003107782612
42 0.6584189185966455
43 0.6518999194026193
44 0.6454454647550686
45 0.6390549155990778
46 0.6327276392070077
47 0.6264630091158492
48 0.6202604050651972
49 0.6141192129358388
50 0.6080388246889493
51 0.6020186383058904
52 0.5960580577286043
53 0.5901564928005983
54 0.5843133592085132
55 0.5785280784242706
56 0.5728000776477926
57 0.5671287897502897
58 0.5615136532181086
59 0.5559541120971373
60 0.5504496159377597
61 0.544999619740356
62 0.5396035839013427
63 0.5342609741597452
64 0.5289712615443022
65 0.5237339223210913
66 0.5185484379416745
67 0.513414294991757
68 0.5083309851403535
69 0.5032980050894589
70 0.4983148565242167
71 0.4933810460635809
72 0.48849608521146626
73 0.48365949030838246
74 0.478870782483547
75 0.47412948760747226
76 0.46943513624502203
77 0.4647872636089327
78 0.4601854095137947
79 0.4556291183304898
80 0.451117938941079
81 0.4466514246941376
82 0.44222913336053227
83 0.4378506270896359
84 0.43351547236597615
85 0.429223239966313
86 0.4249735049171416
87 0.42076584645261544
88 0.41659984797288657
89 0.412475097002858
90 0.40839118515134454
91 0.40434770807063813
92 0.4003442654164734
93 0.3963804608083895
94 0.3924559017904847
95 0.3885701997925591
96 0.38472297009164264
97 0.3809138317739036
98 0.37714240769693425
99 0.3734083244524101
100 0.3697112123291189
101 0.36605070527635536
102 0.36242644086767856
103 0.3588380602650283
104 0.3552852081831963
105 0.3517675328546498
106 0.34828468599470275
107 0.3448363227670324
108 0.341422101749537
109 0.3380416849005317
110 0.3346947375252789
111 0.33138092824285037
112 0.3280999289533172
113 0.3248514148052645
114 0.32163506416362825
115 0.31845055857784976
116 0.3152975827503463
117 0.31217582450529335
118 0.3090849747577162
119 0.3060247274828873
120 0.302994779686027
121 0.299994831372304
122 0.2970245855171327
123 0.294083748036765
124 0.2911720277591733
125 0.2882891363952211
126 0.2854347885101199
127 0.2826087014951682
128 0.2798105955397705
129 0.27704019360373316
130 0.2742972213898348
131 0.27158140731666813
132 0.26889248249175063
133 0.2662301806849016
134 0.2635942383018828
135 0.2609843943582998
136 0.2584003904537622
137 0.25584197074629916
138 0.2533088819270289
139 0.2508008731950781
140 0.2483176962327506
141 0.24585910518094117
142 0.24342485661479324
143 0.24101470951959728
144 0.23862842526692798
145 0.2362657675910178
146 0.23392650256536415
147 0.23161039857956847
148 0.22931722631640442
149 0.22704675872911328
150 0.22479877101892404
151 0.22257304061279606
152 0.22036934714138223
153 0.21818747241721012
154 0.21602720041307932
155 0.2138883172406726
156 0.2117706111293788
157 0.20967387240532556
158 0.20759789347061935
159 0.20554246878279145
160 0.20350739483444696
161 0.2014924701331158
162 0.19949749518130278
163 0.19752227245673543
164 0.19556660639280735
165 0.1936303033592152
166 0.1917131716427873
167 0.1898150214285023
168 0.18793566478069534
169 0.18607491562445083
170 0.18423258972717904
171 0.18240850468037528
172 0.1806024798815597
173 0.17881433651639575
174 0.1770438975409859
175 0.17529098766434248
176 0.17355543333103216
177 0.17183706270399224
178 0.17013570564751707
179 0.16845119371041292
180 0.1667833601093197
181 0.16513203971219773
182 0.16349706902197794
183 0.1618782861603742
184 0.16027553085185564
185 0.15868864440777786
186 0.15711746971067114
187 0.1555618511986843
188 0.15402163485018247
189 0.1524966681684975
190 0.1509868001668292
191 0.14949188135329622
192 0.14801176371613486
193 0.14654630070904442
194 0.14509534723667764
195 0.14365875964027489
196 0.1422363956834405
197 0.14082811453805988
198 0.13943377677035632
199 0.13805324432708546
200 0.1366863805218668
201 0.1353330500216503
202 0.13399311883331713
203 0.132666454290413
204 0.13135292504001286
205 0.13005240102971571
206 0.12876475349476804
207 0.1274898549453149
208 0.12622757915377714
209 0.1249778011423536
210 0.12374039717064712
211 0.12251524472341299
212 0.1213022224984287
213 0.12010121039448386
214 0.11891208949948898
215 0.11773474207870195
216 0.11656905156307124
217 0.11541490253769429
218 0.11427218073039039
219 0.11314077300038652
220 0.11202056732711536
221 0.11091145279912412
222 0.10981331960309319
223 0.10872605901296356
224 0.10764956337917184
225 0.10658372611799191
226 0.10552844170098209
227 0.10448360564453672
228 0.1034491144995413
229 0.10242486584113
230 0.10141075825854455
231 0.10040669134509361
232 0.0994125656882115
233 0.09842828285961534
234 0.09745374540555975
235 0.09648885683718787
236 0.09553352162097808
237 0.09458764516928524
238 0.09365113383097548
239 0.09272389488215393
240 0.0918058365169841
241 0.09089686783859811
242 0.08999689885009714
243 0.08910584044564072
244 0.08822360440162448
245 0.08735010336794503
246 0.08648525085935152
247 0.0856289612468827
248 0.08478114974938881
249 0.08394173242513744
250 0.08311062616350241
251 0.08228774867673506
252 0.0814730184918169
253 0.08066635494239296
254 0.07986767816078512
255 0.07907690907008427
256 0.07829396937632106
257 0.07751878156071393
258 0.07675126887199399
259 0.07599135531880592
260 0.07523896566218408
261 0.07449402540810304
262 0.07375646080010202
263 0.0730261988119822
264 0.07230316714057644
265 0.07158729419859053
266 0.07087850910751538
267 0.07017674169060928
268 0.06948192246594978
269 0.06879398263955423
270 0.06811285409856854
271 0.0674384694045233
272 0.06677076178665674
273 0.0661096651353037
274 0.0654551139953502
275 0.06480704355975267
276 0.06416538966312145
277 0.06353008877536777
278 0.06290107799541363
279 0.06227829504496399
280 0.061661678262340584
281 0.06105116659637681
282 0.060446699600373084
283 0.05984821742611197
284 0.05925566081793264
285 0.058668971106864
286 0.05808809020481584
287 0.057512960598827566
288 0.05694352534537383
289 0.05637972806472656
290 0.05582151293537283
291 0.05526882468848795
292 0.05472160860246331
293 0.05417981049748843
294 0.053643376730186564
295 0.05311225418830353
296 0.052586390285449036
297 0.052065732955890134
298 0.05155023064939617
299 0.05103983232613482
300 0.050534487451618634
301 0.05003414599170162
302 0.049538758407625366
303 0.04904827565111422
304 0.04856264915951903
305 0.04808183085100894
306 0.04760577311981083
307 0.04713442883149587
308 0.046667751318312745
309 0.04620569437456707
310 0.0457482122520466
311 0.045295259655491686
312 0.04484679173811058
313 0.04440276409713918
314 0.04396313276944473
315 0.043527854227173
316 0.043096885373438615
317 0.042670183538058036
318 0.042247706473324785
319 0.04182941234982652
320 0.041415259752303486
321 0.04100520767554801
322 0.04059921552034456
323 0.04019724308945006
324 0.03979925058361392
325 0.039405198597637546
326 0.039015048116472814
327 0.038628760511359224
328 0.03824629753599923
329 0.037867621322771515
330 0.0374926943789817
331 0.0371214795831502
332 0.03675394018133683
333 0.03639003978350182
334 0.03602974235990279
335 0.035673012237527515
336 0.035319814096561895
337 0.034970112966892966
338 0.0346238742246465
339 0.03428106358875891
340 0.03394164711758308
341 0.0336055912055278
342 0.03327286257973049
343 0.03294342829676286
344 0.03261725573936917
345 0.0322943126132368
346 0.03197456694379881
347 0.03165798707306813
348 0.031344541656503105
349 0.031034199659904064
350 0.030726930356340658
351 0.030422703323109562
352 0.030121488438722338
353 0.029823255879923107
354 0.029527976118735748
355 0.029235619919540344
356 0.028946158336178557
357 0.02865956270908768
358 0.02837580466246305
359 0.028094856101448566
360 0.027816689209355015
361 0.027541276444905956
362 0.027268590539510848
363 0.026998604494565197
364 0.026731291578777423
365 0.0264666253255222
366 0.02620457953022
367 0.02594512824774257
368 0.02568824578984413
369 0.02543390672261795
370 0.02518208586397817
371 0.024932758281166506
372 0.02468589928828367
373 0.024441484443845217
374 0.0241994895483616
375 0.023959890641942178
376 0.02372266400192295
377 0.02348778614051777
378 0.023255233802492843
379 0.0230249839628642
380 0.02279701382461802
381 0.022571300816453487
382 0.022347822590548006
383 0.02212655702034456
384 0.02190748219836095
385 0.021690576434020743
386 0.021475818251505685
387 0.02126318638762939
388 0.02105265978973207
389 0.02084421761359611
390 0.020637839221382286
391 0.02043350417958642
392 0.020231192257016258
393 0.020030883422788373
394 0.019832557844344924
395 0.019636195885490024
396 0.019441778104445567
397 0.019249285251926304
398 0.019058698269233964
399 0.01886999828637026
400 0.018683166620168574
401 0.018498184772444134
402 0.01831503442816251
403 0.01813369745362625
404 0.017954155894679455
405 0.017776391974930153
406 0.01760038809399025
407 0.017426126825732923
408 0.017253590916567252
409 0.017082763283729954
410 0.016913627013594012
411 0.01674616535999407
412 0.016580361742568387
413 0.016416199745117214
414 0.01625366311397744
415 0.016092735756413307
416 0.015933401739023075
417 0.01577564528616146
418 0.015619450778377683
419 0.015464802750868993
420 0.015311685891949499
421 0.015160085041534157
422 0.01500998518963778
423 0.01486137147488889
424 0.014714229183058307
425 0.014568543745602285
426 0.014424300738220083
427 0.014281485879425826
428 0.014140085029134481
429 0.014000084187261863
430 0.013861469492338479
431 0.013724227220137107
432 0.013588343782313966
433 0.013453805725063333
434 0.013320599727785478
435 0.0131887126017678
436 0.01305813128887901
437 0.012928842860276247
438 0.012800834515124997
439 0.01267409357933168
440 0.012548607504288793
441 0.012424363865632468
442 0.012301350362012345
443 0.012179554813873608
444 0.012058965162251098
445 0.011939569467575344
446 0.01182135590849044
447 0.011704312780683603
448 0.01158842849572634
449 0.01147369157992707
450 0.011360090673195117
451 0.011247614527915957
452 0.01113625200783758
453 0.011025992086967902
454 0.010916823848483071
455 0.010808736483646605
456 0.010701719290739212
457 0.01059576167399922
458 0.010490853142573484
459 0.010386983309478697
460 0.010284141890572968
461 0.010182318703537592
462 0.010081503666868902
463 0.009981686798880101
464 0.009882858216712972
465 0.009785008135359378
466 0.009688126866692454
467 0.00959220481850738
468 0.009497232493571664
469 0.009403200488684815
470 0.009310099493747342
471 0.009217920290838953
472 0.009126653753305894
473 0.00903629084485732
474 0.008946822618670613
475 0.008858240216505558
476 0.008770534867827286
477 0.008683697888937907
478 0.00859772068211674
479 0.00851259473476905
480 0.008428311618583217
481 0.008344862988696255
482 0.00826224058286758
483 0.00818043622066097
484 0.008099441802634623
485 0.00801924930953923
486 0.00793985080152399
487 0.007861238417350485
488 0.0077834043736143416
489 0.007706340963974595
490 0.007630040558390688
491 0.007554495602367018
492 0.007479698616204968
493 0.0074056421942623445
494 0.007332319004220143
495 0.007259721786356577
496 0.0071878433528282945
497 0.007116676586958707
498 0.007046214442533373
499 0.00697644994310235
500 0.0069073761812894555
501 0.006838986318108371
502 0.006771273582285516
503 0.00670423126958962
504 0.0066378527421679405
505 0.00657213142788905
506 0.006507060819692129
507 0.0064426344749427015
508 0.006378846014794754
509 0.006315689123559162
510 0.006253157548078378
511 0.0061912450971073054
512 0.006129945640700302
513 0.00606925310960426
514 0.006009161494657683
515 0.005949664846195726
516 0.005890757273461115
517 0.005832432944020906
518 0.005774686083189016
519 0.005717510973454471
520 0.005660901953915318
521 0.005604853419718136
522 0.005549359821503105
523 0.005494415664854559
524 0.0054400155097569895
525 0.005386153970056425
526 0.005332825712927153
527 0.005280025458343716
528 0.005227747978558134
529 0.0051759880975823114
530 0.005124740690675556
531 0.005074000683837184
532 0.005023763053304143
533 0.004974022825053607
534 0.0049247750743105015
535 0.0048760149250599025
536 0.00482773754956426
537 0.004779938167885406
538 0.004732612047411293
539 0.004685754502387419
540 0.00463936089345289
541 0.004593426627181079
542 0.004547947155624831
543 0.0045029179758661695
544 0.004458334629570465
545 0.004414192702545014
546 0.004370487824301994
547 0.0043272156676257365
548 0.0042843719481442935
549 0.004241952423905241
550 0.004199952894955684
551 0.00415836920292642
552 0.004117197230620218
553 0.004076432901604176
554 0.004036072179806115
555 0.003996111069114966
556 0.003956545612985115
557 0.0039173718940446675
558 0.0038785860337075917
559 0.0038401841917896948
560 0.0038021625661284106
561 0.003764517392206347
562 0.0037272449427785616
563 0.0036903415275035263
564 0.0036538034925777487
565 0.0036176272203740085
566 0.003581809129083177
567 0.003546345672359581
568 0.003511233338969882
569 0.003476468652445428
570 0.0034420481707380475
571 0.0034079684858792547
572 0.0033742262236428266
573 0.0033408180432107195
574 0.0033077406368422963
575 0.003274990729546828
576 0.003242565078759236
577 0.0032104604740190454
578 0.00317867373665252
579 0.003147201719457941
580 0.003116041306394001
581 0.0030851894122712878
582 0.0030546429824468194
583 0.0030243989925216033
584 0.0029944544480411915
585 0.0029648063841991993
586 0.0029354518655437616
587 0.0029063879856868926
588 0.0028776118670167253
589 0.0028491206604125993
590 0.0028209115449629694
591 0.0027929817276861084
592 0.002765328443253573
593 0.002737948953716409
594 0.0027108405482340683
595 0.0026840005428060082
596 0.0026574262800059486
597 0.002631115128718761
598 0.0026050644838799615
599 0.002579271766217784
600 0.002553734421997806
601 0.0025284499227701046
602 0.0025034157651189154
603 0.002478629470414768
604 0.0024540885845690773
605 0.0024297906777911655
606 0.0024057333443476886
607 0.0023819142023244443
608 0.0023583308933905387
609 0.0023349810825648897
610 0.0023118624579850392
611 0.002288972730678257
612 0.0022663096343349077
613 0.002243870925084067
614 0.0022216543812713535
615 0.002199657803238964
616 0.002177879013107885
617 0.0021563158545622628
618 0.0021349661926359036
619 0.0021138279135008947
620 0.0020928989242583115
621 0.0020721771527310016
622 0.0020516605472584173
623 0.0020313470764934823
624 0.0020112347292014676
625 0.001991321514060859
626 0.001971605459466197
627 0.0019520846133328682
628 0.0019327570429038298
629 0.0019136208345582474
630 0.001894674093622027
631 0.001875914944180225
632 0.0018573415288913118
633 0.001838952008803279
634 0.0018207445631715634
635 0.0018027173892787756
636 0.0017848687022562135
637 0.001767196734907142
638 0.0017496997375318238
639 0.001732375977754281
640 0.0017152237403507732
641 0.0016982413270799734
642 0.0016814270565148252
643 0.0016647792638760646
644 0.0016482963008673907
645 0.001631976535512268
646 0.0016158183519923445
647 0.0015998201504874698
648 0.0015839803470172969
649 0.0015682973732844524
650 0.0015527696765192597
651 0.0015373957193259998
652 0.0015221739795306928
653 0.001507102950030389
654 0.0014921811386439494
655 0.0014774070679643063
656 0.0014627792752121844
657 0.0014482963120912716
658 0.0014339567446448234
659 0.0014197591531136865
660 0.0014057021317957293
661 0.0013917842889066626
662 0.00137800424644224
663 0.0013643606400418219
664 0.001350852118853289
665 0.001337477345399296
666 0.0013242349954448475
667 0.0013111237578661856
668 0.0012981423345209758
669 0.0012852894401197779
670 0.00127256380209879
671 0.0012599641604938514
672 0.0012474892678156944
673 0.0012351378889264302
674 0.0012229088009172576
675 0.0012108007929873838
676 0.0011988126663241424
677 0.0011869432339842994
678 0.001175191320776534
679 0.001163555763145083
680 0.0011520354090545377
681 0.0011406291178757799
682 0.0011293357602730494
683 0.001118154218092128
684 0.0011070833842496317
685 0.0010961221626233978
686 0.0010852694679439581
687 0.0010745242256870872
688 0.0010638853719674131
689 0.0010533518534330823
690 0.0010429226271614677
691 0.0010325966605559086
692 0.0010223729312434737
693 0.0010122504269737364
694 0.001002228145518551
695 0.0009923050945728226
696 0.0009824802916562601
697 0.0009727527640160991
698 0.0009631215485307912
699 0.0009535856916146447
700 0.0009441442491234107
701 0.0009347962862608026
702 0.0009255408774859432
703 0.0009163771064217259
704 0.0009073040657640851
705 0.0008983208571921635
706 0.0008894265912793698
707 0.0008806203874053166
708 0.0008719013736686303
709 0.0008632686868006241
710 0.0008547214720798259
711 0.0008462588832473523
712 0.0008378800824231211
713 0.0008295842400228922
714 0.0008213705346761308
715 0.000813238153144684
716 0.0008051862902422614
717 0.0007972141487547142
718 0.0007893209393611033
719 0.0007815058805555478
720 0.0007737681985698493
721 0.0007661071272968805
722 0.0007585219082147332
723 0.000751011790311617
724 0.000743576030011502
725 0.000736213891100497
726 0.0007289246446539574
727 0.0007217075689643143
728 0.0007145619494696181
729 0.0007074870786827902
730 0.0007004822561215745
731 0.0006935467882391827
732 0.0006866799883556263
733 0.0006798811765897291
734 0.000673149679791811
735 0.0006664848314770406
736 0.0006598859717594462
737 0.0006533524472865803
738 0.000646883611174832
739 0.0006404788229453782
740 0.0006341374484607705
741 0.000627858859862149
742 0.0006216424355070781
743 0.0006154875599079982
744 0.0006093936236712854
745 0.0006033600234369162
746 0.0005973861618187289
747 0.0005914714473452762
748 0.0005856152944012635
749 0.0005798171231695678
750 0.0005740763595738294
751 0.0005683924352216133
752 0.000562764787348132
753 0.0005571928587605267
754 0.0005516760977826998
755 0.0005462139582006929
756 0.0005408058992086068
757 0.0005354513853550562
758 0.0005301498864901547
759 0.0005249008777130245
760 0.0005197038393198261
761 0.0005145582567523031
762 0.0005094636205468348
763 0.0005044194262839948
764 0.0004994251745386088
765 0.0004944803708303057
766 0.0004895845255745602
767 0.00048473715403421795
768 0.0004799377762715029
769 0.0004751859171004979
770 0.0004704811060400969
771 0.00046582287726742267
772 0.0004612107695717056
773 0.0004566443263086194
774 0.00045212309535506875
775 0.0004476466290644245
776 0.00044321448422220247
777 0.00043882622200218065
778 0.00043448140792295113
779 0.0004301796118049021
780 0.00042592040772762585
781 0.0004217033739877484
782 0.0004175280930571766
783 0.00041339415154175905
784 0.0004093011401403555
785 0.00040524865360431236
786 0.000401236290697339
787 0.00039726365415578116
788 0.00039333035064928825
789 0.00038943599074186955
790 0.0003855801888533362
791 0.0003817625632211249
792 0.0003779827358624999
793 0.0003742403325371286
794 0.0003705349827100283
795 0.00036686631951487954
796 0.0003632339797177025
797 0.0003596376036808936
798 0.0003560768353276174
799 0.0003525513221065519
800 0.00034906071495698207
801 0.00034560466827423966
802 0.0003421828398754848
803 0.00033879489096582657
804 0.00033544048610477877
805 0.0003321192931730483
806 0.0003288309833396518
807 0.00032557523102935823
808 0.0003223517138904537
809 0.00031916011276282547
810 0.00031600011164636184
811 0.0003128713976696652
812 0.00030977366105907445
813 0.00030670659510799453
814 0.0003036698961465292
815 0.00030066326351141506
816 0.0002976863995162525
817 0.0002947390094220322
818 0.0002918208014079527
819 0.0002889314865425274
820 0.0002860707787549776
821 0.00028323839480690847
822 0.0002804340542642658
823 0.0002776574794695701
824 0.0002749083955144259
825 0.00027218653021230283
826 0.00026949161407158694
827 0.000266823380268898
828 0.00026418156462267126
829 0.0002615659055670012
830 0.0002589761441257438
831 0.00025641202388687503
832 0.000253873290977104
833 0.00025135969403673665
834 0.00024887098419478876
835 0.0002464069150443453
836 0.00024396724261816367
837 0.00024155172536451848
838 0.00023916012412328563
839 0.000236792202102263
840 0.00023444772485372575
841 0.0002321264602512136
842 0.00022982817846654814
843 0.00022755265194707736
844 0.0002252996553931459
845 0.00022306896573578804
846 0.00022086036211464163
847 0.0002186736258560808
848 0.00021650854045156517
849 0.00021436489153620313
850 0.00021224246686752786
851 0.00021014105630448303
852 0.00020806045178661686
853 0.00020600044731348204
854 0.00020396083892423964
855 0.00020194142467746498
856 0.00019994200463115344
857 0.0001979623808229242
858 0.00019600235725042
859 0.000194061739851901
860 0.0001921403364870307
861 0.00019023795691785217
862 0.00018835441278995264
863 0.0001864895176138145
864 0.00018464308674635097
865 0.00018281493737262473
866 0.00018100488848774726
867 0.00017921276087895768
868 0.0001774383771078789
869 0.0001756815614929494
870 0.0001739421400920291
871 0.00017221994068517733
872 0.00017051479275760132
873 0.0001688265274827736
874 0.00016715497770571641
875 0.0001654999779264519
876 0.00016386136428361573
877 0.0001622389745382334
878 0.00016063264805765684
879 0.00015904222579966025
880 0.0001574675502966933
881 0.0001559084656402904
882 0.00015436481746563404
883 0.00015283645293627133
884 0.0001513232207289815
885 0.00014982497101879356
886 0.00014834155546415205
887 0.00014687282719222974
888 0.0001454186407843859
889 0.0001439788522617682
890 0.00014255331907105762
891 0.0001411419000703541
892 0.00013974445551520208
893 0.00013836084704475454
894 0.0001369909376680738
895 0.00013563459175056813
896 0.0001342916750005625
897 0.00013296205445600248
898 0.0001316455984712896
899 0.0001303421767042471
900 0.00012905166010321496
901 0.00012777392089427224
902 0.00012650883256858637
903 0.0001252562698698875
904 0.00012401610878206684
905 0.00012278822651689785
906 0.00012157250150187907
907 0.00012036881336819709
908 0.000119177042938809
909 0.00011799707221664258
910 0.00011682878437291345
911 0.00011567206373555787
912 0.00011452679577778008
913 0.00011339286710671294
914 0.00011227016545219103
915 0.00011115857965563468
916 0.00011005799965904424
917 0.00010896831649410321
918 0.00010788942227138932
919 0.0001068212101696924
920 0.00010576357442543801
921 0.00010471641032221585
922 0.00010367961418041174
923 0.00010265308334694231
924 0.0001016367161850914
925 0.00010063041206444693
926 9.963407135093754e-05
927 9.864759539696786e-05
928 9.767088653165134e-05
929 9.670384805113994e-05
930 9.574638420904945e-05
931 9.479840020697965e-05
932 9.385980218512837e-05
933 9.293049721299839e-05
934 9.201039328019642e-05
935 9.109939928732319e-05
936 9.019742503695365e-05
937 8.930438122470659e-05
938 8.842017943040255e-05
939 8.754473210930946e-05
940 8.66779525834747e-05
941 8.581975503314327e-05
942 8.497005448826066e-05
943 8.412876682006006e-05
944 8.329580873273274e-05
945 8.247109775518093e-05
946 8.165455223285241e-05
947 8.084609131965585e-05
948 8.004563496995629e-05
949 7.92531039306498e-05
950 7.846841973331662e-05
951 7.769150468645209e-05
952 7.692228186777435e-05
953 7.616067511660826e-05
954 7.540660902634482e-05
955 7.466000893697507e-05
956 7.392080092769809e-05
957 7.318891180960207e-05
958 7.246426911841789e-05
959 7.174680110734444e-05
960 7.103643673994499e-05
961 7.033310568311385e-05
962 6.963673830011272e-05
963 6.894726564367596e-05
964 6.826461944918412e-05
965 6.758873212790507e-05
966 6.691953676030205e-05
967 6.625696708940797e-05
968 6.560095751426531e-05
969 6.4951443083431e-05
970 6.430835948854555e-05
971 6.367164305796589e-05
972 6.304123075046128e-05
973 6.241706014897156e-05
974 6.179906945442729e-05
975 6.118719747963098e-05
976 6.0581383643198984e-05
977 5.998156796356335e-05
978 5.938769105303301e-05
979 5.879969411191387e-05
980 5.8217518922687e-05
981 5.7641107844244554e-05
982 5.7070403806182727e-05
983 5.6505350303151214e-05
984 5.594589138925863e-05
985 5.53919716725333e-05
986 5.484353630943891e-05
987 5.4300530999444466e-05
988 5.3762901979647986e-05
989 5.3230596019453454e-05
990 5.270356041530045e-05
991 5.218174298544599e-05
992 5.1665092064798006e-05
993 5.115355649980001e-05
994 5.0647085643366346e-05
995 5.0145629349867666e-05
996 4.9649137970166005e-05
997 4.9157562346699016e-05
998 4.867085380861289e-05
999 4.8188964166943455e-05

In [ ]:
print(torch.max(GCN.activity)[0])
print(torch.max(GCN.placeWeights, dim=-1)[0])
print(torch.min(GCN.placeWeights, dim=-1)[0])
print(torch.mean(GCN.placeWeights, dim=-1))

In [222]:
plt.rcParams['figure.figsize'] = [5, 15]
GCN.learn(5000, plotting=True, plotInterval=10, oneD=True)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-222-82599c4f137c> in <module>()
      1 plt.rcParams['figure.figsize'] = [5, 15]
----> 2 GCN.learn(5000, plotting=True, plotInterval=10, oneD=True)

<ipython-input-212-fd085fba287a> in learn(self, time, plotting, plotInterval, runLength, oneD)
    263                                 cmap = plt.get_cmap("coolwarm"))
    264                     ax1.set_title(str(t))
--> 265                     fig.canvas.draw()
    266 
    267 

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\backends\backend_webagg_core.py in draw(self)
    157         backend_agg.RendererAgg.lock.acquire()
    158         try:
--> 159             self.figure.draw(renderer)
    160         finally:
    161             backend_agg.RendererAgg.lock.release()

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
     53                 renderer.start_filter()
     54 
---> 55             return draw(artist, renderer, *args, **kwargs)
     56         finally:
     57             if artist.get_agg_filter() is not None:

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\figure.py in draw(self, renderer)
   1473 
   1474             mimage._draw_list_compositing_images(
-> 1475                 renderer, self, artists, self.suppressComposite)
   1476 
   1477             renderer.close_group('figure')

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
    139     if not_composite or not has_images:
    140         for a in artists:
--> 141             a.draw(renderer)
    142     else:
    143         # Composite any adjacent images together

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
     53                 renderer.start_filter()
     54 
---> 55             return draw(artist, renderer, *args, **kwargs)
     56         finally:
     57             if artist.get_agg_filter() is not None:

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\axes\_base.py in draw(self, renderer, inframe)
   2605             renderer.stop_rasterizing()
   2606 
-> 2607         mimage._draw_list_compositing_images(renderer, self, artists)
   2608 
   2609         renderer.close_group('axes')

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\image.py in _draw_list_compositing_images(renderer, parent, artists, suppress_composite)
    139     if not_composite or not has_images:
    140         for a in artists:
--> 141             a.draw(renderer)
    142     else:
    143         # Composite any adjacent images together

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
     53                 renderer.start_filter()
     54 
---> 55             return draw(artist, renderer, *args, **kwargs)
     56         finally:
     57             if artist.get_agg_filter() is not None:

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\collections.py in draw(self, renderer)
    909     def draw(self, renderer):
    910         self.set_sizes(self._sizes, self.figure.dpi)
--> 911         Collection.draw(self, renderer)
    912 
    913 

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\artist.py in draw_wrapper(artist, renderer, *args, **kwargs)
     53                 renderer.start_filter()
     54 
---> 55             return draw(artist, renderer, *args, **kwargs)
     56         finally:
     57             if artist.get_agg_filter() is not None:

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\collections.py in draw(self, renderer)
    335                 self._linewidths, self._linestyles,
    336                 self._antialiaseds, self._urls,
--> 337                 self._offset_position)
    338 
    339         gc.restore()

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\backends\backend_agg.py in draw_path_collection(self, *kl, **kw)
    123 
    124     def draw_path_collection(self, *kl, **kw):
--> 125         return self._renderer.draw_path_collection(*kl, **kw)
    126 
    127     def _update_methods(self):

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\transforms.py in __array__(self, *args, **kwargs)
   1425         Array interface to get at this Transform's affine matrix.
   1426         """
-> 1427         return self.get_affine().get_matrix()
   1428 
   1429     def transform(self, values):

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\transforms.py in get_affine(self)
   2507             return self._b.get_affine()
   2508         else:
-> 2509             return Affine2D(np.dot(self._b.get_affine().get_matrix(),
   2510                                 self._a.get_affine().get_matrix()))
   2511     get_affine.__doc__ = Transform.get_affine.__doc__

~\AppData\Local\conda\conda\envs\pytorch\lib\site-packages\matplotlib\transforms.py in get_affine(self)
   2508         else:
   2509             return Affine2D(np.dot(self._b.get_affine().get_matrix(),
-> 2510                                 self._a.get_affine().get_matrix()))
   2511     get_affine.__doc__ = Transform.get_affine.__doc__
   2512 

KeyboardInterrupt: 

In [220]:
plt.rcParams['figure.figsize'] = [15, 15]
plt.figure()
start = 12
end = 20
for i in range(start, end):
    for j in range(start, end):
        index = (i - start)*(end - start) + (j - start) + 1
        plt.subplot((end - start), (end - start), index)
        plt.scatter(GCN.places[:, 0].cpu().numpy(),
                    GCN.places[:, 1].cpu().numpy(),
                    c = GCN.placeWeights[i, j, :].cpu().numpy(),
                    cmap = plt.get_cmap("coolwarm"))
        plt.draw()