1 | include(dom.inc)
2 |
3 | SUBROUTINE pmm_returnvectors(nnodes, nfacemax, nbfaces)
4 |
5 | ! ================================================================!
6 | ! !
7 | ! pmm_return.F : Controls the reception of the solution vectors !
8 | ! !
9 | ! out : !
10 | ! !
11 | ! author : J. AMAYA (october 2007) !
12 | ! !
13 | ! ================================================================!
14 |
15 | USE mod_pmm
16 | #ifdef USEPALM
17 | USE palmlib
18 | #endif
19 |
20 | IMPLICIT NONE
21 |
22 | include 'pmm_constants.h'
23 |
24 | ! IN
25 | DOM_INT :: nnodes, nfacemax, nbfaces
26 |
27 | ! LOCAL
28 | DOM_INT :: ierr, i, iproc
29 | DOM_INT :: status(MPI_STATUS_SIZE)
30 |
31 | DOM_REAL,DIMENSION(3,nnodes) :: Q_rtot
32 | DOM_REAL,DIMENSION(nnodes) :: Gtot, Lbtot
33 | DOM_REAL,DIMENSION(nbfaces) :: Htot, Qw
34 |
35 | DO i=1, pmm_n_p-1
36 |
37 | ! ---------------------------!
38 | ! Capture the process number !
39 | ! ---------------------------!
40 |
41 | ! print*, " (",pmm_rank,") Waiting next message"
42 | #ifdef USEPALM
43 | CALL MPI_RECV(iproc, 1, MPI_INTEGER, MPI_ANY_SOURCE, &
44 | & PMM_RETURN, PL_COMM_EXEC , status, ierr)
45 | #else
46 | CALL MPI_RECV(iproc, 1, MPI_INTEGER, MPI_ANY_SOURCE, &
47 | & PMM_RETURN, MPI_COMM_WORLD, status, ierr)
48 | #endif
49 |
50 | ! print*, " (",pmm_rank,") Message received from ", iproc
51 | ! ------------------------------!
52 | ! Receive the resulting vectors !
53 | ! ------------------------------!
54 |
55 | IF (iproc.ne.PMM_HOST) THEN
56 | #ifdef USEPALM
57 | CALL MPI_RECV(Gtot, nnodes, MPI_DOUBLE_PRECISION, iproc, &
58 | & PMM_RETURN, PL_COMM_EXEC , status, ierr)
59 | CALL MPI_RECV(Htot, nbfaces, MPI_DOUBLE_PRECISION, &
60 | & iproc, PMM_RETURN, PL_COMM_EXEC , status, &
61 | & ierr)
62 | CALL MPI_RECV(Qw, nbfaces, MPI_DOUBLE_PRECISION, &
63 | & iproc, PMM_RETURN, PL_COMM_EXEC , status, &
64 | & ierr)
65 | CALL MPI_RECV(Lbtot, nnodes, MPI_DOUBLE_PRECISION, iproc, &
66 | & PMM_RETURN, PL_COMM_EXEC , status, ierr)
67 | CALL MPI_RECV(Q_rtot, 3*nnodes, MPI_DOUBLE_PRECISION, &
68 | & iproc, PMM_RETURN, PL_COMM_EXEC , status, &
69 | & ierr)
70 | #else
71 | CALL MPI_RECV(Gtot, nnodes, MPI_DOUBLE_PRECISION, iproc, &
72 | & PMM_RETURN, MPI_COMM_WORLD, status, ierr)
73 | CALL MPI_RECV(Htot, nbfaces, MPI_DOUBLE_PRECISION, &
74 | & iproc, PMM_RETURN, MPI_COMM_WORLD, status, &
75 | & ierr)
76 | CALL MPI_RECV(Qw, nbfaces, MPI_DOUBLE_PRECISION, &
77 | & iproc, PMM_RETURN, MPI_COMM_WORLD, status, &
78 | & ierr)
79 | CALL MPI_RECV(Lbtot, nnodes, MPI_DOUBLE_PRECISION, iproc, &
80 | & PMM_RETURN, MPI_COMM_WORLD, status, ierr)
81 | CALL MPI_RECV(Q_rtot, 3*nnodes, MPI_DOUBLE_PRECISION, &
82 | & iproc, PMM_RETURN, MPI_COMM_WORLD, status, &
83 | & ierr)
84 | #endif
85 |
86 | ! ------------------------------------------!
87 | ! Integrate all results in the main vectors !
88 | ! ------------------------------------------!
89 |
90 | CALL master_integrate(Gtot, Lbtot, Htot, Q_rtot, Qw, iproc)
91 |
92 | ENDIF
93 |
94 | ENDDO
95 |
96 | END SUBROUTINE pmm_returnvectors
pmm_returnvectors.F could be called by: