In [1]:
import numpy as np
import ipyparallel as ipp
c = ipp.Client(profile='mpi')
print(c.ids)
view = c[:]
view.activate()
In [2]:
%%px --block
from mpi4py import MPI
import numpy as np
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
if rank == 0:
a = 12.
else:
a = 0.0
if rank>0:
print(a)
a = comm.bcast(a, root=0)
if rank>0:
print(a)
In [3]:
%%px --block
from mpi4py import MPI
import numpy as np
comm = MPI.COMM_WORLD
rank = comm.rank
if rank == 0:
a = np.array([12.],dtype=np.float32)
else:
a = np.zeros(1,dtype=np.float32)
if rank>0:
print(a)
comm.Bcast(a, root=0)
if rank>0:
print(a)
In [4]:
%%px --block
from mpi4py import MPI
import numpy as np
comm = MPI.COMM_WORLD
rank = comm.rank
N_local = 5
N = N_local*comm.size
if rank == 0:
A = np.arange(N, dtype=np.float64)
else:
A = None
A_local = np.empty(N_local)
comm.Scatter( A, A_local )
A_local = A_local+100.0
comm.Gather( A_local, A)
if rank==0:
print(A)
In [5]:
view['A_local']
Out[5]:
In [17]:
%%px --block
from mpi4py import MPI
import numpy as np
comm = MPI.COMM_WORLD
rank = comm.rank
A = rank*np.ones(1,dtype=np.float)
suma = np.zeros(1)
comm.Reduce(A,suma, op = MPI.SUM)
if rank==0:
print(suma)
In [18]:
0+1+2+3
Out[18]:
In [19]:
view['suma']
Out[19]:
In [20]:
%%px --block
import numpy
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
rankF = numpy.array(rank,dtype=np.float)
total = numpy.zeros(1)
comm.Reduce(rankF,total, op=MPI.SUM)
if rank==0:
print(total)
In [ ]:
In [ ]:
In [ ]: