In [1]:
%pylab inline
import pysal as ps
In [2]:
ntw = ps.Network(ps.examples.get_path('geodanet/streets.shp'))
#Snap a point pattern to the network
ntw.snapobservations(ps.examples.get_path('geodanet/crimes.shp'), 'crimes', attribute=True)
ntw.snapobservations(ps.examples.get_path('geodanet/schools.shp'), 'schools', attribute=False)
In [3]:
ntw.pointpatterns
Out[3]:
In [4]:
dir(ntw.pointpatterns['crimes'])
Out[4]:
In [5]:
counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge,
graph=False)
sum(counts.values()) / float(len(counts.keys()))
Out[5]:
In [6]:
n200 = ntw.segment_edges(200.0)
In [7]:
counts = n200.count_per_edge(n200.pointpatterns['crimes'].obs_to_edge, graph=False)
sum(counts.values()) / float(len(counts.keys()))
Out[7]:
In [8]:
import networkx as nx
figsize(10,10)
g = nx.Graph()
for e in ntw.edges:
g.add_edge(*e)
for n, p in ntw.node_coords.iteritems():
g.node[n] = p
nx.draw(g, ntw.node_coords, node_size=300, alpha=0.5)
g = nx.Graph()
for e in n200.edges:
g.add_edge(*e)
for n, p in n200.node_coords.iteritems():
g.node[n] = p
nx.draw(g, n200.node_coords, node_size=25, alpha=1.0)
In [9]:
#Binary Adjacency
#ntw.contiguityweights(graph=False)
w = ntw.contiguityweights(graph=False)
#Build the y vector
#edges = ntw.w.neighbors.keys()
edges = w.neighbors.keys()
y = np.zeros(len(edges))
for i, e in enumerate(edges):
if e in counts.keys():
y[i] = counts[e]
#Moran's I
#res = ps.esda.moran.Moran(y, ntw.w, permutations=99)
res = ps.esda.moran.Moran(y, w, permutations=99)
print dir(res)
In [10]:
counts = ntw.count_per_edge(ntw.pointpatterns['crimes'].obs_to_edge, graph=True)
#Binary Adjacency
#ntw.contiguityweights(graph=True)
w = ntw.contiguityweights(graph=True)
#Build the y vector
#edges = ntw.w.neighbors.keys()
edges = w.neighbors.keys()
y = np.zeros(len(edges))
for i, e in enumerate(edges):
if e in counts.keys():
y[i] = counts[e]
#Moran's I
#res = ps.esda.moran.Moran(y, ntw.w, permutations=99)
res = ps.esda.moran.Moran(y, w, permutations=99)
print dir(res)
In [11]:
#Binary Adjacency
#n200.contiguityweights(graph=False)
w = n200.contiguityweights(graph=False)
#Compute the counts
counts = n200.count_per_edge(n200.pointpatterns['crimes'].obs_to_edge, graph=False)
#Build the y vector and convert from raw counts to intensities
#edges = n200.w.neighbors.keys()
edges = w.neighbors.keys()
y = np.zeros(len(edges))
for i, e in enumerate(edges):
if e in counts.keys():
length = n200.edge_lengths[e]
y[i] = counts[e] / length
#Moran's I
#res = ps.esda.moran.Moran(y, n200.w, permutations=99)
res = ps.esda.moran.Moran(y, w, permutations=99)
print dir(res)
In [12]:
import time
t1 = time.time()
n0 = ntw.allneighbordistances(ntw.pointpatterns['crimes'])
print time.time()-t1
In [13]:
import time
t1 = time.time()
n1 = n200.allneighbordistances(n200.pointpatterns['crimes'])
print time.time()-t1
Note that the first time these methods are called, the underlying node-to-node shortest path distance matrix has to be calculated. Subsequent calls will not require this, and will be much faster:
In [14]:
import time
t1 = time.time()
n0 = ntw.allneighbordistances(ntw.pointpatterns['crimes'])
print time.time()-t1
In [15]:
import time
t1 = time.time()
n1 = n200.allneighbordistances(n200.pointpatterns['crimes'])
print time.time()-t1
In [16]:
npts = ntw.pointpatterns['crimes'].npoints
sim = ntw.simulate_observations(npts)
sim
Out[16]:
In [18]:
gres = ps.NetworkG(ntw,
ntw.pointpatterns['crimes'],
permutations = 99)
In [19]:
figsize(5,5)
plot(gres.xaxis, gres.observed, 'b-', linewidth=1.5, label='Observed')
plot(gres.xaxis, gres.upperenvelope, 'r--', label='Upper')
plot(gres.xaxis, gres.lowerenvelope, 'k--', label='Lower')
legend(loc='best')
Out[19]:
In [20]:
kres = ps.NetworkK(ntw,
ntw.pointpatterns['crimes'],
permutations=99)
In [21]:
figsize(5,5)
plot(kres.xaxis, kres.observed, 'b-', linewidth=1.5, label='Observed')
plot(kres.xaxis, kres.upperenvelope, 'r--', label='Upper')
plot(kres.xaxis, kres.lowerenvelope, 'k--', label='Lower')
legend(loc='best')
Out[21]:
In [15]:
In [ ]: