Sun MPI 4.0 Programming and Reference Guide

Header Files

Include syntax must be placed at the top of any program that calls Sun MPI routines.

#include <mpi.h>
#include <mpi.h>
INCLUDE 'mpif.h'

These lines allow the program to access the Sun MPI version of the mpi header file, which contains the definitions, macros, and function prototypes required when compiling the program. Ensure that you are referencing the Sun MPI include file.

The include files are usually found in /opt/SUNWhpc/include/ or /opt/SUNWhpc/include/v9/. If the compiler cannot find them, check that they exist and are accessible from the machine on which you are compiling your code. The location of the include file is specified by a compiler option (see "Compiling and Linking").

Sample Code


Example 3-1 Simple Sun MPI Program in C: connectivity.c

/*
 * Test the connectivity between all processes.
 */

#pragma ident "@(#)connectivity.c 1.1 99/02/02"

#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <netdb.h>
#include <unistd.h>

#include <mpi.h>

int
main(int argc, char **argv)
{
    MPI_Status  status;
    int         verbose = 0;
    int         rank;
    int         np;	                 /* number of processes in job
*/
    int         peer;
    int         i;
    int         j;

    MPI_Init(&argc, &argv);
    MPI_Comm_rank(MPI_COMM_WORLD, &rank);
    MPI_Comm_size(MPI_COMM_WORLD, &np);

    if (argc>1 && strcmp(argv[1], "-v")==0)
        verbose = 1;

    for (i=0; i<np; i++) {
        if (rank==i) {
            /* rank i sends to and receives from each higher rank
*/
            for(j=i+1; j<np; j++) {
                if (verbose)
                  printf("checking connection %4d <->
%-4d\n", i, j);
                MPI_Send(&rank, 1, MPI_INT, j, rank, MPI_COMM_WORLD);
               MPI_Recv(&peer, 1, MPI_INT, j, j, MPI_COMM_WORLD, &status);
            }
        } else if (rank>i) {
            /* receive from and reply to rank i */
          MPI_Recv(&peer, 1, MPI_INT, i, i, MPI_COMM_WORLD, &status);
            MPI_Send(&rank, 1, MPI_INT, i, rank, MPI_COMM_WORLD);
        }
    }

    MPI_Barrier(MPI_COMM_WORLD);
    if (rank==0)
        printf("Connectivity test on %d processes PASSED.\n", np);

    MPI_Finalize();
    return 0;
}


Example 3-2 Simple Sun MPI Program in Fortran: monte.f

!
! Estimate pi via Monte-Carlo method.
! 
! Each process sums how many of samplesize random points generated 
! in the square (-1,-1),(-1,1),(1,1),(1,-1)
fall in the circle of 
! radius 1 and center (0,0), and then estimates pi from the formula
! pi = (4 * sum) / samplesize.
! The final estimate of pi is calculated at rank 0 as the average
of 
! all the estimates.
!
        program monte

        include 'mpif.h'

        double precision drand
        external drand

        double precision x, y, pi, pisum
        integer*4 ierr, rank, np
        integer*4 incircle, samplesize

        parameter(samplesize=2000000)

        call MPI_INIT(ierr)
        call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierr)
        call MPI_COMM_SIZE(MPI_COMM_WORLD, np, ierr)

!       seed random number generator
        x = drand(2 + 11*rank)

        incircle = 0
        do i = 1, samplesize
           x = drand(0)*2.0d0 - 1.0d0     ! generate a
random point
           y = drand(0)*2.0d0 - 1.0d0

           if ((x*x + y*y) .lt. 1.0d0) then
              incircle = incircle+1       ! point is in the circle
           endif
        end do

        pi = 4.0d0 * DBLE(incircle) / DBLE(samplesize)

!       sum estimates at rank 0
         call MPI_REDUCE(pi, pisum, 1, MPI_DOUBLE_PRECISION, MPI_SUM, 
      &         0 , MPI_COMM_WORLD, ierr)

        if (rank .eq. 0) then
!          final estimate is the average
           pi = pisum / DBLE(np)
              print '(A,I4,A,F8.6,A)','Monte-Carlo estimate
of pi by ',np,
      &          ' processes is ',pi,'.'
        endif

        call MPI_FINALIZE(ierr)
        end