<?xml version="1.0"?>
<feed xmlns="http://www.w3.org/2005/Atom" xml:lang="en">
	<id>https://www.campisano.org/wiki/Mpi4py/history?feed=atom</id>
	<title>Mpi4py - Revision history</title>
	<link rel="self" type="application/atom+xml" href="https://www.campisano.org/wiki/Mpi4py/history?feed=atom"/>
	<link rel="alternate" type="text/html" href="https://www.campisano.org/wiki/Mpi4py/history"/>
	<updated>2026-05-11T05:02:42Z</updated>
	<subtitle>Revision history for this page on the wiki</subtitle>
	<generator>MediaWiki 1.43.5</generator>
	<entry>
		<id>https://www.campisano.org/mediawiki/index.php?title=Mpi4py&amp;diff=1425&amp;oldid=prev</id>
		<title>imported&gt;T1t0: Created page with &quot;= Simple multi-process hello world =  * helloworld.py:  &lt;pre&gt; #!/usr/bin/env python  # chmod +x hellompi.py # mpiexec -n 10 hellompi.py  from mpi4py import MPI  size = MPI.COM...&quot;</title>
		<link rel="alternate" type="text/html" href="https://www.campisano.org/mediawiki/index.php?title=Mpi4py&amp;diff=1425&amp;oldid=prev"/>
		<updated>2014-12-24T02:16:05Z</updated>

		<summary type="html">&lt;p&gt;Created page with &amp;quot;= Simple multi-process hello world =  * helloworld.py:  &amp;lt;pre&amp;gt; #!/usr/bin/env python  # chmod +x hellompi.py # mpiexec -n 10 hellompi.py  from mpi4py import MPI  size = MPI.COM...&amp;quot;&lt;/p&gt;
&lt;p&gt;&lt;b&gt;New page&lt;/b&gt;&lt;/p&gt;&lt;div&gt;= Simple multi-process hello world =&lt;br /&gt;
&lt;br /&gt;
* helloworld.py:&lt;br /&gt;
&lt;br /&gt;
&amp;lt;pre&amp;gt;&lt;br /&gt;
#!/usr/bin/env python&lt;br /&gt;
&lt;br /&gt;
# chmod +x hellompi.py&lt;br /&gt;
# mpiexec -n 10 hellompi.py&lt;br /&gt;
&lt;br /&gt;
from mpi4py import MPI&lt;br /&gt;
&lt;br /&gt;
size = MPI.COMM_WORLD.Get_size()&lt;br /&gt;
rank = MPI.COMM_WORLD.Get_rank()&lt;br /&gt;
name = MPI.Get_processor_name()&lt;br /&gt;
&lt;br /&gt;
print &amp;quot;Hello from process &amp;quot; + str(rank + 1) + &amp;quot; of &amp;quot; + str(size) + &amp;quot; on &amp;quot; + str(name)&lt;br /&gt;
&amp;lt;/pre&amp;gt;&lt;br /&gt;
&lt;br /&gt;
run with &amp;lt;pre&amp;gt;mpiexec -n 10 hellompi.py&amp;lt;/pre&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
= Better spawned and reduced sum example =&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
* master.py:&lt;br /&gt;
&lt;br /&gt;
&amp;lt;pre&amp;gt;&lt;br /&gt;
#!/usr/bin/env python&lt;br /&gt;
&lt;br /&gt;
from mpi4py import MPI&lt;br /&gt;
from array import array&lt;br /&gt;
&lt;br /&gt;
def run(debug = False):&lt;br /&gt;
    worker = MPI.COMM_SELF.Spawn(&amp;quot;worker.py&amp;quot;, None, 3)&lt;br /&gt;
&lt;br /&gt;
    data_size = 1000000&lt;br /&gt;
&lt;br /&gt;
    try:&lt;br /&gt;
        data = array(&amp;#039;i&amp;#039;)&lt;br /&gt;
        for i in range(0, data_size):&lt;br /&gt;
            data.append(1)&lt;br /&gt;
&lt;br /&gt;
        print &amp;quot;Broadcasting input data len: &amp;quot;, len(data), &amp;quot; content: &amp;quot;, data[0]&lt;br /&gt;
        worker.Bcast([data, MPI.INT], root = MPI.ROOT)&lt;br /&gt;
&lt;br /&gt;
        result = array(&amp;#039;i&amp;#039;, [0])&lt;br /&gt;
        worker.Reduce(sendbuf = None, recvbuf = [result, MPI.INT], op = MPI.SUM, root = MPI.ROOT)&lt;br /&gt;
        result = result[0] # strange thing (Reduce logic)&lt;br /&gt;
&lt;br /&gt;
        print &amp;quot;Calculated value of sum is&amp;quot;, result&lt;br /&gt;
&lt;br /&gt;
    except Exception as ex:&lt;br /&gt;
        print &amp;quot;Unexpected error:&amp;quot;&lt;br /&gt;
        print ex&lt;br /&gt;
&lt;br /&gt;
    worker.Disconnect()&lt;br /&gt;
&lt;br /&gt;
def __main__():&lt;br /&gt;
    run()&lt;br /&gt;
&lt;br /&gt;
__main__()&lt;br /&gt;
&amp;lt;/pre&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
* worker.py:&lt;br /&gt;
&lt;br /&gt;
&amp;lt;pre&amp;gt;&lt;br /&gt;
#!/usr/bin/env python&lt;br /&gt;
&lt;br /&gt;
from mpi4py import MPI&lt;br /&gt;
from array import array&lt;br /&gt;
&lt;br /&gt;
def run(debug = False):&lt;br /&gt;
    master = MPI.Comm.Get_parent()&lt;br /&gt;
    my_rank = master.Get_rank()&lt;br /&gt;
    n_procs = master.Get_size()&lt;br /&gt;
&lt;br /&gt;
    data_size = 10241024&lt;br /&gt;
&lt;br /&gt;
    try:&lt;br /&gt;
        data = array(&amp;#039;i&amp;#039;)&lt;br /&gt;
        for i in range(0, data_size):&lt;br /&gt;
            data.append(0)&lt;br /&gt;
&lt;br /&gt;
        master.Bcast([data, MPI.INT], root = 0)&lt;br /&gt;
        #print &amp;quot;# Broadcast finished and data on rank &amp;quot;, my_rank, &amp;quot; is len &amp;quot;, len(data), &amp;quot; data: &amp;quot;, data&lt;br /&gt;
&lt;br /&gt;
        work_size = int(data_size / n_procs)&lt;br /&gt;
&lt;br /&gt;
        work_start = my_rank * work_size&lt;br /&gt;
        work_limit = int(0)&lt;br /&gt;
&lt;br /&gt;
        # if is the last proc, work_limit is the end of data&lt;br /&gt;
        if my_rank == (n_procs - 1):&lt;br /&gt;
            work_limit = data_size&lt;br /&gt;
        else:&lt;br /&gt;
            work_limit = work_start + work_size&lt;br /&gt;
&lt;br /&gt;
        result = int(0)&lt;br /&gt;
&lt;br /&gt;
        for i in range(work_start, work_limit):&lt;br /&gt;
            result += data[i]&lt;br /&gt;
&lt;br /&gt;
        print &amp;quot;my_rank: &amp;quot;, my_rank, &amp;quot; work_start: &amp;quot;, work_start, &amp;quot; work_limit: &amp;quot;, work_limit, &amp;quot;result = &amp;quot;, result&lt;br /&gt;
&lt;br /&gt;
        result = array(&amp;#039;i&amp;#039;, [result]) # strange thing (Reduce logic)&lt;br /&gt;
        master.Reduce(sendbuf = [result, MPI.INT], recvbuf = None, op = MPI.SUM, root = 0)&lt;br /&gt;
&lt;br /&gt;
    except Exception as ex:&lt;br /&gt;
        print &amp;quot;# Unexpected error:&amp;quot;&lt;br /&gt;
        print ex&lt;br /&gt;
&lt;br /&gt;
    #print &amp;quot;# Disconnecting from rank %d&amp;quot; % my_rank&lt;br /&gt;
    master.Disconnect()&lt;br /&gt;
&lt;br /&gt;
def __main__():&lt;br /&gt;
    run()&lt;br /&gt;
&lt;br /&gt;
__main__()&lt;br /&gt;
&amp;lt;/pre&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
run the code with &amp;lt;pre&amp;gt;python master.py&amp;lt;/pre&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
= Depends =&lt;br /&gt;
&lt;br /&gt;
&amp;lt;pre&amp;gt;apt-get install libopenmpi-dev python-mpi4py&amp;lt;/pre&amp;gt;&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
= References =&lt;br /&gt;
&lt;br /&gt;
* http://mpi4py.readthedocs.org/en/latest/tutorial.html#dynamic-process-management&lt;br /&gt;
* https://portal.tacc.utexas.edu/c/document_library/get_file?uuid=be16db01-57d9-4422-b5d5-17625445f351&amp;amp;groupId=13601&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
&lt;br /&gt;
[[Category:develop]]&lt;br /&gt;
[[Category:python]]&lt;/div&gt;</summary>
		<author><name>imported&gt;T1t0</name></author>
	</entry>
</feed>