In [ ]:
import IPython.display as IPd

# Local path on user's machine
path = '/Users/jgaboardi/AAG_16/Data/'

In [ ]:
import pysal as ps
import geopandas as gpd
import numpy as np
import networkx as nx
import shapefile as shp
from shapely.geometry import Point
import shapely
from collections import OrderedDict
import pandas as pd
import qgrid
import gurobipy as gbp
import time
import bokeh
from bokeh.plotting import figure, show, ColumnDataSource
from bokeh.io import output_notebook, output_file, show
from bokeh.models import (HoverTool, BoxAnnotation, GeoJSONDataSource, 
                          GMapPlot, GMapOptions, ColumnDataSource, Circle, 
                          DataRange1d, PanTool, WheelZoomTool, BoxSelectTool,
                          ResetTool, MultiLine)
import utm
from cylp.cy import CyCbcModel, CyClpSimplex
import matplotlib.pyplot as plt
import matplotlib as mpl
%matplotlib inline
#%matplotlib notebook


mpl.rcParams['figure.figsize']=11,11
output_notebook()

In [ ]:
def c_s_matrix():  # Define Client to Service Matrix Function
    global All_Dist_MILES # in meters
    All_Neigh_Dist = ntw.allneighbordistances(
                        sourcepattern=ntw.pointpatterns['Rand_Points_CLIENT'],
                        destpattern=ntw.pointpatterns['Rand_Points_SERVICE'])
    All_Dist_MILES = All_Neigh_Dist * 0.000621371 # to miles

In [ ]:
def Gurobi_PMCP(sites, Ai, AiSum, All_Dist_Miles):
#**************************************************    
    # Define Global Variables
    global pydf_M           #--------- median globals
    global selected_M
    global NEW_Records_PMP
    global VAL_PMP
    global AVG_PMP
    #
    global pydf_C           #--------- center globals
    global selected_C
    global NEW_Records_PCP
    global VAL_PCP
    #
    global pydf_CentDian    #--------- centdian globals
    global selected_CentDian
    global NEW_Records_Pcentdian
    global VAL_CentDian
    #
    global pydf_MC          #--------- pmcp globals
    global VAL_PMCP
    global p_dens
    #***********************************************
    # Initiate Solutions   
    for p in range(1, sites+1): #--------- for all [p] in p = length(service facilities)

        # DATA
        # [p]        --> sites
        # Demand     --> Ai
        # Demand Sum --> AiSum
        # Travel Costs
        Cij = All_Dist_MILES
        # Weighted Costs
        Sij = Ai * Cij
        # Total Client and Service nodes
        client_nodes = range(len(Sij))
        service_nodes = range(len(Sij[0]))

        #*************************************************************
        # PMP
        t1_PMP = time.time()
        
        #     Create Model, Add Variables, & Update Model
        # Instantiate Model
        mPMP = gbp.Model(' -- p-Median -- ')
        # Turn off Gurobi's output
        mPMP.setParam('OutputFlag',False)

        # Add Client Decision Variables (iXj)
        client_var = []
        for orig in client_nodes:
            client_var.append([])
            for dest in service_nodes:
                client_var[orig].append(mPMP.addVar(vtype=gbp.GRB.BINARY,
                                                lb=0,
                                                ub=1,
                                                obj=Sij[orig][dest], 
                                                name='x'+str(orig+1)+'_'+str(dest+1)))

        # Add Service Decision Variables (j)
        serv_var = []
        for dest in service_nodes:
            serv_var.append([])
            serv_var[dest].append(mPMP.addVar(vtype=gbp.GRB.BINARY,
                                          lb=0,
                                          ub=1,
                                          name='y'+str(dest+1)))

        # Update the model
        mPMP.update()

        #     3. Set Objective Function
        mPMP.setObjective(gbp.quicksum(Sij[orig][dest]*client_var[orig][dest] 
                            for orig in client_nodes for dest in service_nodes), 
                            gbp.GRB.MINIMIZE)


        #     4. Add Constraints
        # Assignment Constraints
        for orig in client_nodes:
            mPMP.addConstr(gbp.quicksum(client_var[orig][dest] 
                            for dest in service_nodes) == 1)
        # Opening Constraints
        for orig in service_nodes:
            for dest in client_nodes:
                mPMP.addConstr((serv_var[orig][0] - client_var[dest][orig] >= 0))

        # Facility Constraint
        mPMP.addConstr(gbp.quicksum(serv_var[dest][0] for dest in service_nodes) == p)

        #     5. Optimize and Print Results
        # Solve
        mPMP.optimize()

        # Write LP
        mPMP.write(path+'LP_Files/PMP'+str(p)+'.lp')
        t2_PMP = time.time()-t1_PMP

        # Record and Display Results
        print '\n*************************************************************************'
        selected_M = OrderedDict()
        dbf1 = ps.open(path+'Snapped/SERVICE_Snapped.dbf')
        NEW_Records_PMP = []
        for v in mPMP.getVars():
            if 'x' in v.VarName:
                pass
            elif v.x > 0:
                var = '%s' % v.VarName
                selected_M[var]=(u"\u2588")
                for i in range(dbf1.n_records):
                    if var in dbf1.read_record(i):
                        x = dbf1.read_record(i)
                        NEW_Records_PMP.append(x)
                    else:
                        pass
                print '    |                                            ', var
            
        pydf_M = pydf_M.append(selected_M, ignore_index=True)
        
        # Instantiate Shapefile
        SHP_Median = shp.Writer(shp.POINT)
        # Add Points
        for idy,idx,x,y in NEW_Records_PMP:
            SHP_Median.point(float(x), float(y))
        # Add Fields
        SHP_Median.field('y_ID')
        SHP_Median.field('x_ID')
        SHP_Median.field('LAT')
        SHP_Median.field('LON')
        # Add Records
        for idy,idx,x,y in NEW_Records_PMP:
            SHP_Median.record(idy,idx,x,y)
        # Save Shapefile
        SHP_Median.save(path+'Results/Selected_Locations_Pmedian'+str(p)+'.shp')   

        print '    | Selected Facility Locations --------------  ^^^^ '
        print '    | Candidate Facilities [p] ----------------- ', len(selected_M)
        val_m = mPMP.objVal
        VAL_PMP.append(round(val_m, 3))
        print '    | Objective Value (miles) ------------------ ', val_m
        avg_m = float(mPMP.objVal)/float(AiSum)
        AVG_PMP.append(round(avg_m, 3))
        print '    | Avg. Value / Client (miles) -------------- ', avg_m
        print '    | Real Time to Optimize (sec.) ------------- ', t2_PMP
        print '*************************************************************************'
        print ' -- The p-Median Problem -- '
        print ' [p] = ', str(p), '\n\n'
        
        
        #******************************************************************************
        # PCP
        t1_PCP = time.time()
        
        # Instantiate P-Center Model
        mPCP = gbp.Model(' -- p-Center -- ')
        
        # Add Client Decision Variables (iXj)
        client_var_PCP = []
        for orig in client_nodes:
            client_var_PCP.append([])
            for dest in service_nodes:
                client_var_PCP[orig].append(mPCP.addVar(vtype=gbp.GRB.BINARY,
                                                lb=0,
                                                ub=1,
                                                obj=Cij[orig][dest], 
                                                name='x'+str(orig+1)+'_'+str(dest+1)))

        # Add Service Decision Variables (j)
        serv_var_PCP = []
        for dest in service_nodes:
            serv_var_PCP.append([])
            serv_var_PCP[dest].append(mPCP.addVar(vtype=gbp.GRB.BINARY,
                                          lb=0,
                                          ub=1,
                                          name='y'+str(dest+1)))

        # Add the Maximum travel cost variable
        W = mPCP.addVar(vtype=gbp.GRB.CONTINUOUS,
                    lb=0.,
                    name='W') 
        
        # Update the model
        mPCP.update()

        #     3. Set Objective Function
        mPCP.setObjective(W, gbp.GRB.MINIMIZE)

        #     4. Add Constraints
        # Assignment Constraints
        for orig in client_nodes:
            mPCP.addConstr(gbp.quicksum(client_var_PCP[orig][dest] 
                            for dest in service_nodes) == 1)
        # Opening Constraints
        for orig in service_nodes:
            for dest in client_nodes:
                mPCP.addConstr((serv_var_PCP[orig][0] - client_var_PCP[dest][orig] >= 0))

        # Add Maximum travel cost constraints
        for orig in client_nodes:
            mPCP.addConstr(gbp.quicksum(Cij[orig][dest]*client_var_PCP[orig][dest]
                                for dest in service_nodes) - W <= 0)
        
        # Facility Constraint
        mPCP.addConstr(gbp.quicksum(serv_var_PCP[dest][0] for dest in service_nodes) == p)

        #     5. Optimize and Print Results
        # Solve
        mPCP.optimize()

        # Write LP
        mPCP.write(path+'LP_Files/PCP'+str(p)+'.lp')
        t2_PCP = time.time()-t1_PCP

        # Record and Display Results
        print '\n*************************************************************************'
        selected_C = OrderedDict()
        dbf1 = ps.open(path+'Snapped/SERVICE_Snapped.dbf')
        NEW_Records_PCP = []
        for v in mPCP.getVars():
            if 'x' in v.VarName:
                pass
            elif 'W' in v.VarName:
                pass
            elif v.x > 0:
                var = '%s' % v.VarName
                selected_C[var]=(u"\u2588")
                for i in range(dbf1.n_records):
                    if var in dbf1.read_record(i):
                        x = dbf1.read_record(i)
                        NEW_Records_PCP.append(x)
                    else:
                        pass
                print '    |                                            ', var,  '         '
        pydf_C = pydf_C.append(selected_C, ignore_index=True)
        
        # Instantiate Shapefile
        SHP_Center = shp.Writer(shp.POINT)
        # Add Points
        for idy,idx,x,y in NEW_Records_PCP:
            SHP_Center.point(float(x), float(y))
        # Add Fields
        SHP_Center.field('y_ID')
        SHP_Center.field('x_ID')
        SHP_Center.field('LAT')
        SHP_Center.field('LON')
        # Add Records
        for idy,idx,x,y in NEW_Records_PCP:
            SHP_Center.record(idy,idx,x,y)
        # Save Shapefile
        SHP_Center.save(path+'Results/Selected_Locations_Pcenter'+str(p)+'.shp')   

        print '    | Selected Facility Locations --------------  ^^^^ '
        print '    | Candidate Facilities [p] ----------------- ', len(selected_C)
        val_c = mPCP.objVal
        VAL_PCP.append(round(val_c, 3))
        print '    | Objective Value (miles) ------------------ ', val_c
        print '    | Real Time to Optimize (sec.) ------------- ', t2_PCP
        print '*************************************************************************'
        print ' -- The p-Center Problem -- '
        print ' [p] = ', str(p), '\n\n'

        #******************************************************************************
        # p-CentDian
        
        t1_centdian = time.time()
        
        # Instantiate P-Center Model
        mPcentdian = gbp.Model(' -- p-CentDian -- ')
        
        # Add Client Decision Variables (iXj)
        client_var_CentDian = []
        for orig in client_nodes:
            client_var_CentDian.append([])
            for dest in service_nodes:
                client_var_CentDian[orig].append(mPcentdian.addVar(vtype=gbp.GRB.BINARY,
                                                lb=0,
                                                ub=1,
                                                obj=Cij[orig][dest], 
                                                name='x'+str(orig+1)+'_'+str(dest+1)))

        # Add Service Decision Variables (j)
        serv_var_CentDian = []
        for dest in service_nodes:
            serv_var_CentDian.append([])
            serv_var_CentDian[dest].append(mPcentdian.addVar(vtype=gbp.GRB.BINARY,
                                          lb=0,
                                          ub=1,
                                          name='y'+str(dest+1)))

        # Add the Maximum travel cost variable
        W_CD = mPcentdian.addVar(vtype=gbp.GRB.CONTINUOUS,
                    lb=0.,
                    name='W') 
        
        # Update the model
        mPcentdian.update()

        #     3. Set Objective Function
        M = gbp.quicksum(Sij[orig][dest]*client_var_CentDian[orig][dest] 
                    for orig in client_nodes for dest in service_nodes)
        
        Zt = M/AiSum
        
        mPcentdian.setObjective((W_CD + Zt) / 2, gbp.GRB.MINIMIZE)

        #     4. Add Constraints
        # Assignment Constraints
        for orig in client_nodes:
            mPcentdian.addConstr(gbp.quicksum(client_var_CentDian[orig][dest] 
                            for dest in service_nodes) == 1)
        # Opening Constraints
        for orig in service_nodes:
            for dest in client_nodes:
                mPcentdian.addConstr((serv_var_CentDian[orig][0] - 
                                      client_var_CentDian[dest][orig] 
                                      >= 0))

        # Add Maximum travel cost constraints
        for orig in client_nodes:
            mPcentdian.addConstr(gbp.quicksum(Cij[orig][dest]*client_var_CentDian[orig][dest]
                                for dest in service_nodes) - W_CD <= 0)
        
        # Facility Constraint
        mPcentdian.addConstr(gbp.quicksum(serv_var_CentDian[dest][0] 
                                          for dest in service_nodes) 
                                          == p)

        #     5. Optimize and Print Results
        # Solve
        mPcentdian.optimize()

        # Write LP
        mPcentdian.write(path+'LP_Files/CentDian'+str(p)+'.lp')
        t2_centdian = time.time()-t1_centdian

        # Record and Display Results
        print '\n*************************************************************************'
        selected_CentDian = OrderedDict()
        dbf1 = ps.open(path+'Snapped/SERVICE_Snapped.dbf')
        NEW_Records_Pcentdian = []
        for v in mPcentdian.getVars():
            if 'x' in v.VarName:
                pass
            elif 'W' in v.VarName:
                pass
            elif v.x > 0:
                var = '%s' % v.VarName
                selected_CentDian[var]=(u"\u2588")
                for i in range(dbf1.n_records):
                    if var in dbf1.read_record(i):
                        x = dbf1.read_record(i)
                        NEW_Records_Pcentdian.append(x)
                    else:
                        pass
                print '    |                                            ', var,  '         '
        pydf_CentDian = pydf_CentDian.append(selected_CentDian, ignore_index=True)
        
        # Instantiate Shapefile
        SHP_CentDian = shp.Writer(shp.POINT)
        # Add Points
        for idy,idx,x,y in NEW_Records_Pcentdian:
            SHP_CentDian.point(float(x), float(y))
        # Add Fields
        SHP_CentDian.field('y_ID')
        SHP_CentDian.field('x_ID')
        SHP_CentDian.field('LAT')
        SHP_CentDian.field('LON')
        # Add Records
        for idy,idx,x,y in NEW_Records_Pcentdian:
            SHP_CentDian.record(idy,idx,x,y)
        # Save Shapefile
        SHP_CentDian.save(path+'Results/Selected_Locations_CentDian'+str(p)+'.shp')   

        print '    | Selected Facility Locations --------------  ^^^^ '
        print '    | Candidate Facilities [p] ----------------- ', len(selected_CentDian)
        val_cd = mPcentdian.objVal
        VAL_CentDian.append(round(val_cd, 3))
        print '    | Objective Value (miles) ------------------ ', val_cd
        print '    | Real Time to Optimize (sec.) ------------- ', t2_centdian
        print '*************************************************************************'
        print ' -- The p-CentDian Problem -- '
        print ' [p] = ', str(p), '\n\n'
        
        #******************************************************************************
        # p-Median + p-Center Method
        
        # Record solutions that record identical facility selection
        if selected_M.keys() == selected_C.keys() == selected_CentDian.keys():
            
            pydf_MC = pydf_MC.append(selected_C, ignore_index=True) # append PMCP dataframe
            p_dens.append('p='+str(p)) # density of [p] 
            VAL_PMCP.append([round(val_m,3), round(avg_m,3), 
                             round(val_c,3), round(val_cd,3)]) # append PMCP list
            
            # Instantiate Shapefile
            SHP_PMCP = shp.Writer(shp.POINT)
            # Add Points
            for idy,idx,x,y in NEW_Records_PCP:
                SHP_PMCP.point(float(x), float(y))
            # Add Fields
            SHP_PMCP.field('y_ID')
            SHP_PMCP.field('x_ID')
            SHP_PMCP.field('LAT')
            SHP_PMCP.field('LON')
            # Add Records
            for idy,idx,x,y in NEW_Records_PCP:
                SHP_PMCP.record(idy,idx,x,y)
            # Save Shapefile
            SHP_PMCP.save(path+'Results/Selected_Locations_PMCP'+str(p)+'.shp')
        else:
            pass

In [ ]:
# Waverly  Hills
STREETS_Orig = gpd.read_file(path+'Waverly_Trim/Waverly.shp')
STREETS = gpd.read_file(path+'Waverly_Trim/Waverly.shp')
STREETS.to_crs(epsg=2779, inplace=True) # NAD83(HARN) / Florida North
STREETS.to_file(path+'WAVERLY/WAVERLY.shp')

In [ ]:
ntw = ps.Network(path+'WAVERLY/WAVERLY.shp')
shp_W = ps.open(path+'WAVERLY/WAVERLY.shp')

In [ ]:
buff = STREETS.buffer(200)  #Buffer

In [ ]:
buffU = buff.unary_union  #Buffer Union
buff1 = gpd.GeoSeries(buffU)
buff1.crs = STREETS.crs
Buff = gpd.GeoDataFrame(buff1, crs=STREETS.crs)
Buff.columns = ['geometry']

In [ ]:
np.random.seed(352)
x = np.random.uniform(shp_W.bbox[0], shp_W.bbox[2], 1000)
np.random.seed(850)
y = np.random.uniform(shp_W.bbox[1], shp_W.bbox[3], 1000)  
coords0= zip(x,y)
coords = [shapely.geometry.Point(i) for i in coords0]
Rand = gpd.GeoDataFrame(coords)
Rand.crs = STREETS.crs
Rand.columns = ['geometry']

In [ ]:
Inter = [Buff['geometry'].intersection(p) for p in Rand['geometry']]
INTER = gpd.GeoDataFrame(Inter, crs=STREETS.crs)
INTER.columns = ['geometry']

In [ ]:
# Add records that are points within the buffer
point_in = []
for p in INTER['geometry']:
    if type(p) == shapely.geometry.point.Point:
        point_in.append(p)

In [ ]:
CLIENT = gpd.GeoDataFrame(point_in[:100], crs=STREETS.crs)
CLIENT.columns = ['geometry']
SERVICE = gpd.GeoDataFrame(point_in[-15:], crs=STREETS.crs)
SERVICE.columns = ['geometry']
CLIENT.to_file(path+'CLIENT')
SERVICE.to_file(path+'SERVICE')

In [ ]:
g = nx.Graph() # Roads & Nodes
g1 = nx.MultiGraph() # Edges and Vertices
GRAPH_client = nx.Graph() # Clients 
g_client = nx.Graph() # Snapped Clients
GRAPH_service = nx.Graph() # Service
g_service = nx.Graph() # Snapped Service

In [ ]:
points_client = {} 
points_service = {}

CLI = ps.open(path+'CLIENT/CLIENT.shp')
for idx, coords in enumerate(CLI):
    GRAPH_client.add_node(idx)
    points_client[idx] = coords
    GRAPH_client.node[idx] = coords
    
SER = ps.open(path+'SERVICE/SERVICE.shp')
for idx, coords in enumerate(SER):
    GRAPH_service.add_node(idx)
    points_service[idx] = coords
    GRAPH_service.node[idx] = coords

In [ ]:
# Client Weights for demand
np.random.seed(850)
Ai = np.random.randint(1, 5, len(CLI))
Ai = Ai.reshape(len(Ai),1)
AiSum = np.sum(Ai) # Sum of Weights (Total Demand)

In [ ]:
client = shp.Writer(shp.POINT) # Client Shapefile
# Add Random Points
for i,j in CLI:
    client.point(i,j)
# Add Fields
client.field('client_ID')
client.field('Weight')
counter = 0
for i in range(len(CLI)):
    counter = counter + 1
    client.record('client_' + str(counter), Ai[i])
client.save(path+'Simulated/RandomPoints_CLIENT') # Save Shapefile

In [ ]:
service = shp.Writer(shp.POINT) #Service Shapefile
# Add Random Points
for i,j in SER:
    service.point(i,j)
# Add Fields
service.field('y_ID')
service.field('x_ID')
counter = 0
for i in range(len(SER)):
    counter = counter + 1
    service.record('y' + str(counter), 'x' + str(counter))
service.save(path+'Simulated/RandomPoints_SERVICE') # Save Shapefile

In [ ]:
# Snap
Snap_C = ntw.snapobservations(path+'Simulated/RandomPoints_CLIENT.shp', 
                     'Rand_Points_CLIENT', attribute=True)
Snap_S = ntw.snapobservations(path+'Simulated/RandomPoints_SERVICE.shp', 
                     'Rand_Points_SERVICE', attribute=True)

In [ ]:
# Create Lat & Lon lists of the snapped service locations
y_snapped = []
x_snapped = []
for i,j in ntw.pointpatterns['Rand_Points_SERVICE'].snapped_coordinates.iteritems():
    y_snapped.append(j[0]) 
    x_snapped.append(j[1])

In [ ]:
service_SNAP = shp.Writer(shp.POINT) # Snapped Service Shapefile
# Add Points
for i,j in ntw.pointpatterns['Rand_Points_SERVICE'].snapped_coordinates.iteritems():
    service_SNAP.point(j[0],j[1])
# Add Fields
service_SNAP.field('y_ID')
service_SNAP.field('x_ID')
service_SNAP.field('LAT')
service_SNAP.field('LON')
counter = 0
for i in range(len(ntw.pointpatterns['Rand_Points_SERVICE'].snapped_coordinates)):
    counter = counter + 1
    service_SNAP.record('y' + str(counter), 'x' + str(counter), y_snapped[i], x_snapped[i])
service_SNAP.save(path+'Snapped/SERVICE_Snapped') # Save Shapefile

In [ ]:
# Call Client to Service Matrix Function
c_s_matrix()

In [ ]:
# PANDAS DATAFRAME OF p/y results
p_list = []
for i in range(1, len(SER)+1):
    p = 'p='+str(i)
    p_list.append(p)
y_list = []
for i in range(1, len(SER)+1):
    y = 'y'+str(i)
    y_list.append(y)

In [ ]:
pydf_M = pd.DataFrame(index=p_list,columns=y_list)
pydf_C = pd.DataFrame(index=p_list,columns=y_list)
pydf_CentDian = pd.DataFrame(index=p_list,columns=y_list)
pydf_MC = pd.DataFrame(index=p_list,columns=y_list)

In [ ]:
# p-Median
P_Med_Graphs = OrderedDict()
for x in range(1, len(SER)+1):
    P_Med_Graphs["{0}".format(x)] = nx.Graph()
    
# p-Center
P_Cent_Graphs = OrderedDict()
for x in range(1, len(SER)+1):
    P_Cent_Graphs["{0}".format(x)] = nx.Graph()
    
# p-CentDian
P_CentDian_Graphs = OrderedDict()
for x in range(1, len(SER)+1):
    P_CentDian_Graphs["{0}".format(x)] = nx.Graph()

In [ ]:
# PMP
VAL_PMP = []
AVG_PMP = []

# PCP
VAL_PCP = []

# CentDian
VAL_CentDian = []

# PMCP
VAL_PMCP = []
p_dens = [] # when the facilities for the p-median & p-center are the same

In [ ]:
Gurobi_PMCP(len(SER), Ai, AiSum, All_Dist_MILES)

In [ ]:
# PMP Total
PMP_Tot_Diff = []
for i in range(len(VAL_PMP)):
    if i == 0:
        PMP_Tot_Diff.append('0%')
    elif i <= len(VAL_PMP):
        n1 = VAL_PMP[i-1]
        n2 = VAL_PMP[i]
        diff = n2 - n1
        perc_change = (diff/n1)*100.
        PMP_Tot_Diff.append(str(round(perc_change, 2))+'%')

# PMP Average
PMP_Avg_Diff = []
for i in range(len(AVG_PMP)):
    if i == 0:
        PMP_Avg_Diff.append('0%')
    elif i <= len(AVG_PMP):
        n1 = AVG_PMP[i-1]
        n2 = AVG_PMP[i]
        diff = n2 - n1
        perc_change = (diff/n1)*100.
        PMP_Avg_Diff.append(str(round(perc_change, 2))+'%')
        
# PCP
PCP_Diff = []
for i in range(len(VAL_PCP)):
    if i == 0:
        PCP_Diff.append('0%')
    elif i <= len(VAL_PCP):
        n1 = VAL_PCP[i-1]
        n2 = VAL_PCP[i]
        diff = n2 - n1
        perc_change = (diff/n1)*100.
        PCP_Diff.append(str(round(perc_change, 2))+'%')

# p-CentDian
CentDian_Diff = []
for i in range(len(VAL_CentDian)):
    if i == 0:
        CentDian_Diff.append('0%')
    elif i <= len(VAL_CentDian):
        n1 = VAL_CentDian[i-1]
        n2 = VAL_CentDian[i]
        diff = n2 - n1
        perc_change = (diff/n1)*100.
        CentDian_Diff.append(str(round(perc_change, 2))+'%')

# PMCP       
PMCP_Diff = []
counter = 0
for i in range(len(VAL_PMCP)):
    PMCP_Diff.append([])
    for j in range(len(VAL_PMCP[0])):
        if i == 0:
            PMCP_Diff[i].append('0%')
        elif i <= len(VAL_PMCP):
            n1 = VAL_PMCP[i-1][j]
            n2 = VAL_PMCP[i][j]
            diff = n2 - n1
            perc_change = (diff/n1*100.)
            PMCP_Diff[i].append(str(round(perc_change, 2))+'%')

In [ ]:
# PMP
pydf_M = pydf_M[len(SER):]
pydf_M.reset_index()
pydf_M.index = p_list
pydf_M.columns.name = 'Decision\nVariables'
pydf_M.index.name = 'Facility\nDensity'
pydf_M['Tot. Obj. Value'] = VAL_PMP
pydf_M['Tot. % Change'] = PMP_Tot_Diff
pydf_M['Avg. Obj. Value'] = AVG_PMP
pydf_M['Avg. % Change'] = PMP_Avg_Diff
pydf_M = pydf_M.fillna('')
#pydf_M.to_csv(path+'CSV')  <-- need to change squares to alphanumeric to use

# PCP
pydf_C = pydf_C[len(SER):]
pydf_C.reset_index()
pydf_C.index = p_list
pydf_C.columns.name = 'Decision\nVariables'
pydf_C.index.name = 'Facility\nDensity'
pydf_C['Worst Case Obj. Value'] = VAL_PCP
pydf_C['Worst Case % Change'] = PCP_Diff
pydf_C = pydf_C.fillna('')
#pydf_C.to_csv(path+'CSV')  <-- need to change squares to alphanumeric to use

pydf_CentDian = pydf_CentDian[len(SER):]
pydf_CentDian.reset_index()
pydf_CentDian.index = p_list
pydf_CentDian.columns.name = 'Decision\nVariables'
pydf_CentDian.index.name = 'Facility\nDensity'
pydf_CentDian['CentDian Obj. Value'] = VAL_CentDian
pydf_CentDian['CentDian % Change'] = CentDian_Diff
pydf_CentDian = pydf_CentDian.fillna('')
#pydf_CentDian.to_csv(path+'CSV')  <-- need to change squares to alphanumeric to use

# PMCP
pydf_MC = pydf_MC[len(SER):]
pydf_MC.reset_index()
pydf_MC.index = p_dens
pydf_MC.columns.name = 'D.V.'
pydf_MC.index.name = 'F.D.'
pydf_MC['Min.\nTotal'] = [VAL_PMCP[x][0] for x in range(len(VAL_PMCP))]
pydf_MC['Min.\nTotal\n%\nChange'] = [PMCP_Diff[x][0] for x in range(len(PMCP_Diff))]
pydf_MC['Avg.\nTotal'] = [VAL_PMCP[x][1] for x in range(len(VAL_PMCP))]
pydf_MC['Avg.\nTotal\n%\nChange'] = [PMCP_Diff[x][1] for x in range(len(PMCP_Diff))]
pydf_MC['Worst\nCase'] = [VAL_PMCP[x][2] for x in range(len(VAL_PMCP))]
pydf_MC['Worst\nCase\n%\nChange'] = [PMCP_Diff[x][2] for x in range(len(PMCP_Diff))]
pydf_MC['Center\nMedian'] = [VAL_PMCP[x][3] for x in range(len(VAL_PMCP))]
pydf_MC['Center\nMedian\n%\nChange'] = [PMCP_Diff[x][3] for x in range(len(PMCP_Diff))]
pydf_MC = pydf_MC.fillna('')
#pydf_MC.to_csv(path+'CSV')  <-- need to change squares to alphanumeric to use

In [ ]:
# Create Graphs of the PMCP results
PMCP_Graphs = OrderedDict()
for x in pydf_MC.index:
    PMCP_Graphs[x[2:]] = nx.Graph()

In [ ]:
plt.figure(1)

plt.subplot(221)
# Draw Network Actual Roads and Nodes
for e in ntw.edges:
    g.add_edge(*e)
nx.draw(g, ntw.node_coords, node_size=2, alpha=0.1, edge_color='r', width=2)
# PMP
size = 700
for i,j in P_Med_Graphs.iteritems():
    size=size-50
    # p-Median
    P_Med = ps.open(path+'Results/Selected_Locations_Pmedian'+str(i)+'.shp')
    points_median = {}
    for idx, coords in enumerate(P_Med):
        P_Med_Graphs[i].add_node(idx)
        points_median[idx] = coords
        P_Med_Graphs[i].node[idx] = coords
    nx.draw(P_Med_Graphs[i], 
                points_median, 
                node_size=size, 
                alpha=.1, 
                node_color='k')
# Title
plt.title('PMP', family='Times New Roman', 
      size=30, color='k', backgroundcolor='w', weight='bold')
# North Arrow and 'N' --> Must be changed for different spatial resolutions, etc.
plt.arrow(624000, 164050, 0.0, 500, width=50, head_width=125, 
          head_length=75, fc='k', ec='k',alpha=0.75,)
plt.annotate('N', xy=(623900, 164700), fontstyle='italic', fontsize='xx-large',
            fontweight='heavy', alpha=0.75)


################################################################################

plt.subplot(222)
# Draw Network Actual Roads and Nodes
for e in ntw.edges:
    g.add_edge(*e)
nx.draw(g, ntw.node_coords, node_size=2, alpha=0.1, edge_color='r', width=2)
# PCP
size = 700
for i,j in P_Cent_Graphs.iteritems():
    size=size-50
    # p-Center
    P_Cent = ps.open(path+'Results/Selected_Locations_Pcenter'+str(i)+'.shp')
    points_center = {}
    for idx, coords in enumerate(P_Cent):
        P_Cent_Graphs[i].add_node(idx)
        points_center[idx] = coords
        P_Cent_Graphs[i].node[idx] = coords
    nx.draw(P_Cent_Graphs[i], 
                points_center, 
                node_size=size, 
                alpha=.1, 
                node_color='k')
# Title
plt.title('PCP', family='Times New Roman', 
      size=30, color='k', backgroundcolor='w', weight='bold')
# North Arrow and 'N' --> Must be changed for different spatial resolutions, etc.
plt.arrow(624000, 164050, 0.0, 500, width=50, head_width=125, 
          head_length=75, fc='k', ec='k',alpha=0.75,)
plt.annotate('N', xy=(623900, 164700), fontstyle='italic', fontsize='xx-large',
            fontweight='heavy', alpha=0.75)


###############################################################################

plt.subplot(223)
# Draw Network Actual Roads and Nodes
for e in ntw.edges:
    g.add_edge(*e)
nx.draw(g, ntw.node_coords, node_size=2, alpha=0.1, edge_color='r', width=2)
# CentDian
size = 700
for i,j in P_CentDian_Graphs.iteritems():
    size=size-50
    P_CentDian = ps.open(path+'Results/Selected_Locations_CentDian'+str(i)+'.shp')
    points_centdian = {}
    for idx, coords in enumerate(P_CentDian):
        P_CentDian_Graphs[i].add_node(idx)
        points_centdian[idx] = coords
        P_CentDian_Graphs[i].node[idx] = coords
    nx.draw(P_CentDian_Graphs[i], 
                points_centdian, 
                node_size=size, 
                alpha=.1, 
                node_color='k')
# Title
plt.title('CentDian', family='Times New Roman', 
      size=30, color='k', backgroundcolor='w', weight='bold')
# North Arrow and 'N' --> Must be changed for different spatial resolutions, etc.
plt.arrow(624000, 164050, 0.0, 500, width=50, head_width=125, 
          head_length=75, fc='k', ec='k',alpha=0.75,)
plt.annotate('N', xy=(623900, 164700), fontstyle='italic', fontsize='xx-large',
            fontweight='heavy', alpha=0.75)


###################################################################################

plt.subplot(224)
# Draw Network Actual Roads and Nodes
# PM+CP 
size = 700
#shape = 'sdh^Vp<8>'
#counter = -1
for i,j in PMCP_Graphs.iteritems():
    size = size - 300
    if int(i) <= len(SER)-1:
        counter = counter+1
        pmcp = ps.open(path+'Results/Selected_Locations_PMCP'+str(i)+'.shp')
        points_pmcp = {}
        for idx, coords in enumerate(pmcp):
            PMCP_Graphs[i].add_node(idx)
            points_pmcp[idx] = coords
            PMCP_Graphs[i].node[idx] = coords
        nx.draw(PMCP_Graphs[i], 
                    points_pmcp, 
                    node_size=size,
                    alpha=.75, 
                    node_color='k')
    else:
        pass
for e in ntw.edges:
    g.add_edge(*e)
nx.draw(g, ntw.node_coords, node_size=2, alpha=0.1, edge_color='r', width=2)
# Legend (Ordered Dictionary)
LEGEND = OrderedDict()
for i in PMCP_Graphs:
    if int(i) <= len(SER)-1:
        LEGEND['PMP/PCP == '+str(i)]=PMCP_Graphs[i]
plt.legend(LEGEND, 
       loc='lower right', 
       frameon=False,
       scatterpoints=1)    
# Title
plt.title('PM+CP', family='Times New Roman', 
      size=30, color='k', backgroundcolor='w', weight='bold')
# North Arrow and 'N' --> Must be changed for different spatial resolutions, etc.
plt.arrow(624000, 164050, 0.0, 500, width=50, head_width=125, 
          head_length=75, fc='k', ec='k',alpha=0.75,)
plt.annotate('N', xy=(623900, 164700), fontstyle='italic', fontsize='xx-large',
            fontweight='heavy', alpha=1)

plt.show()

In [ ]:


In [ ]:
PMCP_Graphs

In [ ]:
for i,j in PMCP_Graphs.iteritems():
    print i, j

In [ ]: