MPI use c language

  • Published on
    19-Jun-2015

  • View
    156

  • Download
    1

DESCRIPTION

Mpi use c language

Transcript

  • 1. MPI use C language (1) Speaker Adviser Date 2006/10/27

2. Embedded and Parallel Systems Lab2 Outline MPI Introduction MPI function MPI function MPI_COMM_WORLD MPI point to point Blocking Non-blocking Message passing 3. Embedded and Parallel Systems Lab3 Outline Communication mode Standard Synchronous Buffered Ready Blocking Message Passing Hello.c Non-Blocking Message Passing Wait Test Isend-Irecv.c 4. Embedded and Parallel Systems Lab4 MPI Introduction MPI Message Passing Interface process process process process process process Distributed-Memory ( Shared-Memory) PVM (Parallel Virtual Machine) MPI MPICH2 5. Embedded and Parallel Systems Lab5 1. MPI mpdboot -n 4 -f mpd.hosts 2. MPI vi hello.c 3. Compile mpicc hello.c o hello.o 4. mpiexec n 4 ./hello.o 5. MPI mpdallexit 6. Embedded and Parallel Systems Lab6 7. Embedded and Parallel Systems Lab7 8. Embedded and Parallel Systems Lab8 9. Embedded and Parallel Systems Lab9 MPI #include "mpi.h" MPI_Init(); Do some work or MPI function example: MPI_Send() / MPI_Recv() MPI_Finalize(); 10. Embedded and Parallel Systems Lab10 MPI function int MPI_Init( int *argc, char *argv[]) MPI function MPI_COMM_WORLD MPI_COMM_SELF (argc, argv) process int MPI_Comm_rank ( MPI_Comm comm, int *rank) process process ID Rank = Process ID double MPI_Wtime() int MPI_Finzlize() MPI int MPI_Abort(MPI_Comm comm, int errorcode) MPI 11. Embedded and Parallel Systems Lab11 MPI function MPI_COMM_WORLD communicator Processes process process function call process process 12. Embedded and Parallel Systems Lab12 MPI function MPI function double MPI_Wtime() double MPI_Wtick() function int result; result= MPI-function(); function int MPI_Comm_size( MPI_Comm comm, int *size) process communicator parameters comm IN MPI_COMM_WORLD size OUT process return value int MPI_SUCCESS 13. Embedded and Parallel Systems Lab13 MPI function MPI_SUCCESS MPI function MPI_ERR_COMM Communicator Communicator NULL MPI_ERR_COUNT Count MPI_ERR_TYPE (Datatype) MPI Datatype MPI_ERR_BUFFER buffer MPI_ERR_ROOT root Rank(ID) communicator >0 && < communicator size 14. Embedded and Parallel Systems Lab14 MPI point to point Blocking Non-Blocking Send MPI_Send(buffer, count, datatype, dest, tag, comm) Receive MPI_Recv(buffer, count, datatype, source, tag, comm, status) Send MPI_Isend(buffer, count, datatype, dest, tag, comm, request) Receive MPI_Irecv(buffer, count, datatype, source, tag, comm, request) 15. Embedded and Parallel Systems Lab15 MPI_Status typedef struct MPI_Status { int count; int cancelled; int MPI_SOURCE; // ID int MPI_TAG; // tag int MPI_ERROR; // } MPI_Status; 16. Embedded and Parallel Systems Lab16 MPICH 17. Embedded and Parallel Systems Lab17 Blocking 18. Embedded and Parallel Systems Lab18 Non-Blocking 19. Embedded and Parallel Systems Lab19 Message passing MPI Send a b B a b Receive a b Send a b multiple threads process 0 process 2 process 1 process process receive process 20. Embedded and Parallel Systems Lab20 DataType C MPI_CHAR signed char MPI_SHORT signed short int MPI_INT signed int MPI_LONG signed long int MPI_UNSIGNED_CHAR unsigned char MPI_UNSIGNED_SHORT unsigned short int MPI_UNSIGNED unsigned int MPI_UNSIGNED_LONG unsigned long int MPI_FLOAT float MPI_DOUBLE double MPI_LONG_DOUBLE long double MPI_BYTE 8 binary digits MPI_PACKED data packed or unpacked with MPI_Pack()/ MPI_Unopack() 21. Embedded and Parallel Systems Lab21 Communication mode Standard Synchronous Buffered Ready 22. Embedded and Parallel Systems Lab22 Standard mode 23. Embedded and Parallel Systems Lab23 Synchronous mode 24. Embedded and Parallel Systems Lab24 Buffered mode 25. Embedded and Parallel Systems Lab25 Ready mode 26. Embedded and Parallel Systems Lab26 Blocking Message Passing int MPI_Send(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm) int MPI_Recv(void* buf, int count, MPI_Datatype datatype, int source, int tag, MPI_Comm comm, MPI_Status *status) int MPI_Ssend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm) 27. Embedded and Parallel Systems Lab27 Blocking Message Passing int MPI_Bsend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm) int MPI_Buffer_attach(void* buffer_addr, int* size) int MPI_Buffer_detach(void* buffer_addr, int* size) int MPI_Rsend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm) int MPI_Get_count(MPI_Status *status, MPI_Datatype datatype, int *count) 28. Embedded and Parallel Systems Lab28 First program Hello.c Process blocking 29. Embedded and Parallel Systems Lab29 hello.c #include "mpi.h" #include #define SIZE 20 int main(int argc,char *argv[]) { int numtasks, rank, dest, source, rc, count, tag=1; char inmsg[SIZE]; char outmsg[SIZE]; double starttime, endtime; MPI_Status Stat; MPI_Datatype strtype; MPI_Init(&argc,&argv); // MPI MPI_Comm_rank(MPI_COMM_WORLD, &rank); // process ID MPI_Type_contiguous(SIZE, MPI_CHAR, &strtype); // string MPI_Type_commit(&strtype); // string starttune=MPI_Wtime(); // 30. Embedded and Parallel Systems Lab30 hello.c if (rank == 0) { dest = 1; source = 1; strcpy(outmsg,"Who are you?"); // process 0 rc = MPI_Send(outmsg, 1, strtype, dest, tag, MPI_COMM_WORLD); printf("process %d has sended message: %sn",rank, outmsg); // process 1 rc = MPI_Recv(inmsg, 1, strtype, source, tag, MPI_COMM_WORLD, &Stat); printf("process %d has received: %sn",rank, inmsg); } else if (rank == 1) { dest = 0; source = 0; strcpy(outmsg,"I am process 1"); rc = MPI_Recv(inmsg, 1, strtype, source, tag, MPI_COMM_WORLD, &Stat); printf("process %d has received: %sn",rank, inmsg); rc = MPI_Send(outmsg, 1 , strtype, dest, tag, MPI_COMM_WORLD); printf("process %d has sended message: %sn",rank, outmsg); } 31. Embedded and Parallel Systems Lab31 hello.c endtime=MPI_Wtime(); // // MPI_CHAR rc = MPI_Get_count(&Stat, MPI_CHAR, &count); printf("Task %d: Received %d char(s) from task %d with tag %d and use time is %f n", rank, count, Stat.MPI_SOURCE, Stat.MPI_TAG, endtime-starttime); MPI_Type_free(&strtype); // string MPI_Finalize(); // MPI } 1. Compile mpicc hello.c o hello.o 2. mpiexec n 4 ./hello.o 32. Embedded and Parallel Systems Lab32 hello.c process 0 has sended message: Who are you? process 1 has received: Who are you? process 1 has sended message: I am process 1 Task 1: Received 20 char(s) from task 0 with tag 1 and use time is 0.001302 process 0 has received: I am process 1 Task 0: Received 20 char(s) from task 1 with tag 1 and use time is 0.002133 33. Embedded and Parallel Systems Lab33 Non-blocking Message Passing int MPI_Isend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm, MPI_Request *request) int MPI_Irecv(void* buf, int count, MPI_Datatype datatype, int source, int tag, MPI_Comm comm, MPI_Request *request) int MPI_Issend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm, MPI_Request *request) int MPI_Ibsend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm, MPI_Request *request) int MPI_Irsend(void* buf, int count, MPI_Datatype datatype, int dest, int tag, MPI_Comm comm, MPI_Request *request) 34. Embedded and Parallel Systems Lab34 Wait int MPI_Wait(MPI_Request *request, MPI_Status *status) int MPI_Waitall(int count, MPI_Request *array_of_requests, MPI_Status *array_of_statuses) int MPI_Waitany(int count, MPI_Request *array_of_requests, int *index, MPI_Status *status) int MPI_Waitsome(int incount, MPI_Request *array_of_requests, int *outcount, int *array_of_indices, MPI_Status *array_of_statuses) 35. Embedded and Parallel Systems Lab35 Test int MPI_Test(MPI_Request *request, int *flag, MPI_Status *status) int MPI_Testall(int count, MPI_Request *array_of_requests, int *flag, MPI_Status *array_of_statuses) int MPI_Testany(int count, MPI_Request *array_of_requests, int *index, int *flag, MPI_Status *status) int MPI_Testsome(int incount, MPI_Request *array_of_requests, int *outcount, int *array_of_indices, MPI_Status *array_of_statuses) 36. Embedded and Parallel Systems Lab36 Isend-Irecv.c process receive process ID send process send process non-blocking non-blocking 37. Embedded and Parallel Systems Lab37 Isend-Irecv.c #include mpi.h #include int main(int argc,char *argv[]) { int numtasks, rank, next, prev, buf[2], tag1=1, tag2=2; MPI_Request reqs[4]; MPI_Status stats[4]; int flag; MPI_Init(&argc,&argv); MPI_Comm_size(MPI_COMM_WORLD, &numtasks); MPI_Comm_rank(MPI_COMM_WORLD, &rank); prev = rank-1; next = rank+1; if (rank == 0) prev = numtasks - 1; if (rank == (numtasks - 1)) next = 0; 38. Embedded and Parallel Systems Lab38 Isend-Irecv.c // non-blocking receive process , handle reqs[0] MPI_Irecv(&buf[0], 1, MPI_INT, prev, tag1, MPI_COMM_WORLD, &reqs[0]); MPI_Irecv(&buf[1], 1, MPI_INT, next, tag2, MPI_COMM_WORLD, &reqs[1]); // non-blocking send process, handle reqs[2] MPI_Isend(&rank, 1, MPI_INT, prev, tag2, MPI_COMM_WORLD, &reqs[2]); MPI_Isend(&rank, 1, MPI_INT, next, tag1, MPI_COMM_WORLD, &reqs[3]); MPI_Waitall(4, reqs, stats); // reqs handle MPI_Test(&reqs[0],&flag, &stats[0]); // MPI_Irecv printf("Process %d: has receive data %d from prevenient process %dn", rank, buf[0], prev); printf("Process %d: has receive data %d from next process %dn", rank, buf[1], prev); printf("Process %d: test %dn",rank , flag); MPI_Finalize(); } 39. Embedded and Parallel Systems Lab39 Isend-Irecv.c Process 2: has receive data 1 from prevenient process 1 Process 2: has receive data 3 from next process 1 Process 2: test 1 Process 0: has receive data 3 from prevenient process 3 Process 0: has receive data 1 from next process 3 Process 0: test 1 Process 1: has receive data 0 from prevenient process 0 Process 1: has receive data 2 from next process 0 Process 1: test 1 Process 3: has receive data 2 from prevenient process 2 Process 3: has receive data 0 from next process 2 Process 3: test 1 40. Embedded and Parallel Systems Lab40 The End Thank you very much!