psmile_enddef_appl.F90

Go to the documentation of this file.
00001 !-----------------------------------------------------------------------
00002 ! Copyright 2006-2010, NEC Europe Ltd., London, UK.
00003 ! All rights reserved. Use is subject to OASIS4 license terms.
00004 !-----------------------------------------------------------------------
00005 !
00006 #ifdef DONT_HAVE_STDMPI2
00007 #undef PRISM_with_MPI2
00008 #endif
00009 !
00010 !BOP
00011 !
00012 ! !ROUTINE: PSMILe_Enddef_appl
00013 !
00014 ! !INTERFACE:
00015 
00016       subroutine psmile_enddef_appl (tag, my_icomp0_coll_comps, &
00017                                      n_active, ierror)
00018 !
00019 ! !USES:
00020 !
00021       use PRISM_constants
00022 !
00023       use PSMILe, dummy_interface => PSMILe_enddef_appl
00024 
00025       implicit none
00026 !
00027 ! !INPUT PARAMETERS:
00028 !
00029       Integer,            Intent (In)  :: tag
00030 
00031 !     Message tag to be used for point-to-point communication
00032 !
00033 ! !OUTPUT PARAMETERS:
00034 !
00035       Integer,            Intent (Out) :: my_icomp0_coll_comps
00036 
00037 !     Index 0 of component information of actual application
00038 !     in collected component information "all_comp_infos";
00039 !     i.e. the component information of actual application is stored in
00040 !     all_comp_infos(my_icomp0_coll_comps+1:
00041 !                    my_icomp0_coll_comps+n_active)
00042 
00043       Integer,            Intent (Out) :: n_active
00044 !
00045 !     Number of components in actual application
00046 !     = Number_of_comps_per_appl(Appl%sequence_number)
00047 
00048       Integer,            Intent (Out) :: ierror
00049 
00050 !     Returns the error code of PSMILe_enddef_appl;
00051 !             ierror = 0 : No error
00052 !             ierror > 0 : Severe error
00053 !
00054 ! !LOCAL VARIABLES
00055 !
00056 !  ... for coloring
00057 
00058       integer                      :: color, key
00059       integer                      :: comm_appl_roots, rank, size
00060       integer                      :: master_rank ! rank of master in comm_psmile
00061       logical                      :: i_am_master
00062 
00063 !  ... for applications:
00064 
00065       integer                      :: iappl
00066       integer                      :: Number_of_comps_per_appl (noApplication)
00067       integer                      :: disp (noApplication)
00068       integer                      :: root_ranks (noApplication)
00069 
00070 !  ... for MPI_Gather
00071 
00072       Type (Enddef_comp)           :: dummy_comp_info
00073 
00074 !  ... for components:
00075 
00076       integer                      :: i, icomp, n
00077 
00078 !  ... Temporary vector in order to active component id's for the application
00079 
00080       Integer, allocatable         :: global_ids (:)
00081       Type (Enddef_comp), Pointer  :: b_comps (:)
00082       Integer                      :: comp_min, comp_max, n_miss
00083       Integer                      :: local_comp_min, local_comp_max
00084 
00085 #ifndef PRISM_with_MPI2
00086       Integer, allocatable         :: global_ids_in (:)
00087 #endif
00088 
00089 !  ... for error parameters:
00090 
00091       integer, parameter           :: nerrp = 3
00092       integer                      :: ierrp (nerrp)
00093 !
00094 ! !DESCRIPTION:
00095 !
00096 ! Subroutine "PSMILe_Enddef_appl" finishs the definition phase for the
00097 ! application. The data of all components is stored in global array
00098 ! "all_comp_infos".
00099 !
00100 ! !REVISION HISTORY:
00101 !
00102 !   Date      Programmer   Description
00103 ! ----------  ----------   -----------
00104 ! 03.05.16    H. Ritzdorf  created
00105 !
00106 !EOP
00107 !----------------------------------------------------------------------
00108 !
00109 ! $Id: psmile_enddef_appl.F90 2687 2010-10-28 15:15:52Z coquart $
00110 ! $Author: coquart $
00111 !
00112    Character(len=len_cvs_string), save :: mycvs = 
00113        '$Id: psmile_enddef_appl.F90 2687 2010-10-28 15:15:52Z coquart $'
00114 !
00115 !----------------------------------------------------------------------
00116 
00117 #ifdef VERBOSE
00118       print 9990, trim(ch_id)
00119       call psmile_flushstd
00120 #endif /* VERBOSE */
00121 !
00122 !  Initialization
00123 !
00124       ierror = 0
00125       master_rank = 0
00126       i_am_master = .false.
00127 !
00128 !-----------------------------------------------------------------------
00129 !  Collect the data of active components of all application processes
00130 !  within the application communicator "Appl%comm".
00131 !
00132 !  (*) Get comp_min = Minimum of global component id's
00133 !  (*) Get comp_max = Maximum of global component id's
00134 !  (*) Allocate vector global_ids (comp_min:comp_max)
00135 !  (*) Get global_ids vector with
00136 !      global_ids (comp_id) = Appl%rank: Component of global component id
00137 !                                        "comp_id" is     active
00138 !                                        in an application process.
00139 !      global_ids (comp_id) = Appl%size: Component of global component id
00140 !                                        "comp_id" is not active in an 
00141 !                                        in an application process.
00142 !-----------------------------------------------------------------------
00143 !
00144       if (n_act_comp > 0) then
00145          local_comp_min = comp_infos(1)%global_comp_id
00146          local_comp_max = comp_infos(n_act_comp)%global_comp_id
00147       else
00148 !
00149 ! hier gibt es doch bestimmt einen Wert aus der SCC info
00150 ! den man statt huge verwenden kann
00151 !
00152          local_comp_min = huge (local_comp_max)
00153          local_comp_max = - local_comp_min
00154       endif
00155 !
00156       call MPI_Allreduce (local_comp_min, comp_min, 1, &
00157                           MPI_INTEGER, MPI_MIN, Appl%comm, ierror)
00158       if ( ierror /= MPI_SUCCESS ) then
00159          ierrp (1) = ierror
00160          ierror = PRISM_Error_MPI
00161 
00162          call psmile_error ( ierror, 'MPI_Allreduce(MPI_MIN)', &
00163                              ierrp, 1, __FILE__, __LINE__ )
00164          return
00165       endif
00166 !
00167       call MPI_Allreduce (local_comp_max, comp_max, 1, &
00168                           MPI_INTEGER, MPI_MAX, Appl%comm, ierror)
00169       if ( ierror /= MPI_SUCCESS ) then
00170          ierrp (1) = ierror
00171          ierror = PRISM_Error_MPI
00172 
00173          call psmile_error ( ierror, 'MPI_Allreduce(MPI_MAX)', &
00174                              ierrp, 1, __FILE__, __LINE__ )
00175          return
00176       endif
00177 !
00178 !  ... Is there an active component in the application ?
00179 !
00180       if (comp_min > comp_max) then
00181          n_active = 0
00182       else
00183 !
00184 !  Allocate vector for all possible global comp id's
00185 !
00186       Allocate (global_ids(comp_min:comp_max), STAT = ierror)
00187       if ( ierror > 0 ) then
00188          ierrp (1) = ierror
00189          ierrp (2) = comp_max - comp_min + 1
00190          call psmile_error ( PRISM_Error_Alloc, 'global_ids', &
00191                              ierrp, 2, __FILE__, __LINE__ )
00192          return
00193       endif
00194 !
00195 !  Set Mask of global id's on local process
00196 !
00197       global_ids (:) = Appl%size
00198 !
00199       global_ids (comp_infos(1:n_act_comp)%global_comp_id) = Appl%rank
00200 !
00201 !  Get global id's on all PSMILe processes
00202 !
00203 #ifdef PRISM_with_MPI2
00204       call MPI_Allreduce (MPI_IN_PLACE, global_ids, comp_max-comp_min+1, &
00205                           MPI_INTEGER, MPI_MIN, Appl%comm, ierror)
00206 #else
00207       Allocate (global_ids_in(comp_min:comp_max), STAT = ierror)
00208       if ( ierror > 0 ) then
00209          ierrp (1) = ierror
00210          ierrp (2) = comp_max - comp_min + 1
00211          call psmile_error ( PRISM_Error_Alloc, 'global_ids_in', &
00212                              ierrp, 2, __FILE__, __LINE__ )
00213          return
00214       endif
00215 
00216       global_ids_in = global_ids
00217 
00218       call MPI_Allreduce (global_ids_in, global_ids, comp_max-comp_min+1, &
00219                           MPI_INTEGER, MPI_MIN, Appl%comm, ierror)
00220   
00221       Deallocate (global_ids_in)
00222 #endif
00223       if ( ierror /= MPI_SUCCESS ) then
00224          ierrp (1) = ierror
00225          ierror = PRISM_Error_MPI
00226 
00227          call psmile_error ( ierror, 'MPI_Allreduce(MPI_MAX)', &
00228                              ierrp, 1, __FILE__, __LINE__ )
00229          return
00230       endif
00231 !
00232 !  Create a_comps vector
00233 !
00234 !  n_active = Total number of active components
00235 !  n_miss   = Maximal number of unknown active components within an
00236 !             application process.
00237 !
00238       n_active = 0
00239          do i = comp_min, comp_max
00240          if (global_ids(i) /= Appl%size) n_active = n_active + 1
00241          end do
00242 
00243 #ifdef PRISM_ASSERTION
00244       if (n_active < n_act_comp .or. n_active > comp_max-comp_min+1) then
00245          print *, 'n_active', n_active, n_act_comp, comp_max-comp_min+1
00246 
00247          call psmile_assert ( __FILE__, __LINE__, &
00248              'Inconsistent number of active application components!')
00249       endif
00250 
00251       if (n_active > noComponents) then
00252          print *, 'n_active', n_active, noComponents
00253          call psmile_assert ( __FILE__, __LINE__, &
00254              'n_active > noComponents !')
00255       endif
00256 #endif
00257 
00258       call MPI_Allreduce (n_active-n_act_comp, n_miss, 1, &
00259                           MPI_INTEGER, MPI_MAX, Appl%comm, ierror)
00260       if ( ierror /= MPI_SUCCESS ) then
00261          ierrp (1) = ierror
00262          ierror = PRISM_Error_MPI
00263 
00264          call psmile_error ( ierror, 'MPI_Allreduce(MPI_MAX)', &
00265                              ierrp, 1, __FILE__, __LINE__ )
00266          return
00267       endif
00268 !
00269 !===> If a component is not known in all processes of an application,
00270 !     collect the data of all processes in the application on process 0.
00271 !     The data of the application is stored in "b_comps".
00272 !     "b_comps" is sorted relative to the global component id.
00273 !     (Brauch man die Sortierung wirklich ?)
00274 !
00275       if (n_miss > 0) then
00276          Allocate (b_comps(n_active), STAT = ierror)
00277          if ( ierror > 0 ) then
00278             ierrp (1) = ierror
00279             ierrp (2) = n_active
00280             call psmile_error ( PRISM_Error_Alloc, 'b_comps', &
00281                                 ierrp, 2, __FILE__, __LINE__ )
00282             return
00283          endif
00284 !
00285          call psmile_enddef_appl_miss (global_ids, comp_min, comp_max, &
00286                                        b_comps, n_active, tag, ierror)
00287          if (ierror > 0) return
00288 !
00289       else
00290          b_comps => comp_infos
00291       endif
00292 !
00293       Deallocate (global_ids)
00294       endif
00295 !
00296 !-----------------------------------------------------------------------
00297 ! Create communicator for roots of applications
00298 ! Sollte man den permanent machen ?
00299 !-----------------------------------------------------------------------
00300 !
00301       if (Appl%rank == PRISM_root) then
00302          color = 1
00303       else
00304          color = MPI_UNDEFINED
00305       endif
00306 !
00307       key = Appl%sequence_number - 1
00308 !
00309       call MPI_Comm_Split (comm_psmile, color, key, &
00310                            comm_appl_roots, ierror)
00311       if ( ierror /= MPI_SUCCESS ) then
00312          ierrp (1) = ierror
00313          ierror = PRISM_Error_MPI
00314 
00315          call psmile_error ( ierror, 'MPI_Comm_Split', &
00316                              ierrp, 1, __FILE__, __LINE__ )
00317          return
00318       endif 
00319 
00320       if (Appl%rank == PRISM_root) then
00321          call MPI_Comm_rank (comm_appl_roots, rank, ierror)
00322          if ( ierror /= MPI_SUCCESS ) then
00323             ierrp (1) = ierror
00324             ierror = PRISM_Error_MPI
00325 
00326             call psmile_error ( ierror, 'MPI_Comm_Rank', &
00327                                 ierrp, 1, __FILE__, __LINE__ )
00328             return
00329          endif 
00330 !
00331          i_am_master = (rank == PRISM_root)
00332          if (i_am_master) master_rank = psmile_rank
00333 
00334 #ifdef PRISM_ASSERTION
00335 !
00336 !     Internal Control
00337 !
00338          call MPI_Comm_size (comm_appl_roots, size, ierror)
00339          if ( ierror /= MPI_SUCCESS ) then
00340             ierrp (1) = ierror
00341             ierror = PRISM_Error_MPI
00342 
00343             call psmile_error ( ierror, 'MPI_Comm_size', &
00344                                 ierrp, 1, __FILE__, __LINE__ )
00345             return
00346          endif
00347 
00348          if (size /= noApplication) then
00349             write (*, 9970) size, noApplication
00350             call psmile_assert ( __FILE__, __LINE__, &
00351                                  'size /= noApplication')
00352          endif
00353 
00354          if (rank /= Appl%sequence_number-1) then
00355             write (*, 9960) rank, Appl%sequence_number
00356             call psmile_assert ( __FILE__, __LINE__, &
00357                                  'rank /= Appl%sequence_number-1')
00358          endif
00359 #endif /* PRISM_ASSERTION */
00360 
00361       endif
00362 !
00363 !  Broadcast master_rank to all processes
00364 !
00365       i = master_rank
00366 
00367       call MPI_Allreduce (i, master_rank, 1, MPI_INTEGER, &
00368                           MPI_SUM, comm_psmile, ierror)
00369       if ( ierror /= MPI_SUCCESS ) then
00370          ierrp (1) = ierror
00371          ierror = PRISM_Error_MPI
00372 
00373          call psmile_error ( ierror, 'MPI_Allreduce', &
00374                              ierrp, 1, __FILE__, __LINE__ )
00375          return
00376       endif 
00377 !
00378 !  Determine mapping of global var id's to global grid extension id's
00379 !  for variables which are used coupling.
00380 
00381       if ( .not. Appl%stand_alone ) then
00382          call psmile_field2grid (ierror)
00383          if (ierror > 0) return
00384       endif
00385 !
00386 !=======================================================================
00387 ! Collect the data within the roots of applications
00388 ! The data is collected on the root of communicator "comm_appl_roots"
00389 !
00390 ! Collected are:
00391 !
00392 ! Number_of_comps_per_appl : Number of active components per application.
00393 ! root_ranks               : Ranks of roots in psmile communicator
00394 !
00395 ! Number_of_Grids_Vector   : Number of grids per process for each component.
00396 ! psmile_ranks             : Rank in communicator "comm_psmile"
00397 !                            for each process in component.
00398 ! all_extents              : All grid extensions
00399 ! all_extent_infos         : All infos to extensions
00400 !=======================================================================
00401 !
00402 !  For stand_alone applications without the driver
00403 !  process we can already return here 
00404 
00405       if ( Appl%stand_alone ) then
00406 #ifdef VERBOSE
00407          print 9980, trim(ch_id), ierror
00408 #endif
00409          return
00410       endif
00411 
00412       if (Appl%rank == PRISM_root) then
00413 !
00414 !      Collect the number of components in the master process
00415 !
00416 !        call MPI_Gather (n_act_comp,               1, MPI_INTEGER, &
00417          call MPI_Gather (n_active,                 1, MPI_INTEGER, &
00418                           Number_of_comps_per_appl, 1, MPI_INTEGER, &
00419                           PRISM_root, comm_appl_roots, ierror)
00420          if ( ierror /= MPI_SUCCESS ) then
00421             ierrp (1) = ierror
00422             ierror = PRISM_Error_MPI
00423 
00424             call psmile_error ( ierror, 'MPI_Gather', &
00425                                 ierrp, 1, __FILE__, __LINE__ )
00426             return
00427          endif 
00428 !
00429 !      Collect the psmile ranks of root processes
00430 !
00431          call MPI_Gather (psmile_rank, 1, MPI_INTEGER, &
00432                           root_ranks,  1, MPI_INTEGER, &
00433                           PRISM_root, comm_appl_roots, ierror)
00434          if ( ierror /= MPI_SUCCESS ) then
00435             ierrp (1) = ierror
00436             ierror = PRISM_Error_MPI
00437 
00438             call psmile_error ( ierror, 'MPI_Gather', &
00439                                 ierrp, 1, __FILE__, __LINE__ )
00440             return
00441          endif 
00442 !
00443 !-----------------------------------------------------------------------
00444 !      Collect the number of processes per component in the master process
00445 !-----------------------------------------------------------------------
00446 !
00447          if (i_am_master) then
00448 ! Number_of_coll_comps == 0: ende ?
00449             Number_of_coll_comps = SUM (Number_of_comps_per_appl(:))
00450 !
00451             if (Number_of_coll_comps < 2 ) &
00452                Appl%stand_alone = .true.
00453 
00454 #ifdef PRISM_ASSERTION_not_needed_anymore
00455             if (Number_of_coll_comps < 2) then
00456                call psmile_assert ( __FILE__, __LINE__, &
00457                  'Total number of active components < 2')
00458             endif
00459 #endif /* PRISM_ASSERTION */
00460 !
00461             Allocate (all_comp_infos(1:Number_of_coll_comps), STAT = ierror)
00462             if ( ierror > 0 ) then
00463                ierrp (1) = ierror
00464                ierrp (2) = Number_of_coll_comps
00465 
00466                ierror = PRISM_Error_Alloc
00467                call psmile_error ( ierror, 'all_comp_infos', &
00468                                    ierrp, 2, __FILE__, __LINE__ )
00469                return
00470             endif
00471 !
00472 !    Get displacement for Collection of components
00473 !
00474             disp (1) = 0
00475 !cdir vector
00476                do iappl = 2, noApplication
00477                disp (iappl) = disp (iappl-1) + Number_of_comps_per_appl (iappl-1)
00478                enddo
00479 
00480              call MPI_Gatherv (b_comps,  n_active, datatype_enddef_comp, &
00481                        all_comp_infos, Number_of_comps_per_appl,         &
00482                                        disp, datatype_enddef_comp,       &
00483                            PRISM_root, comm_appl_roots, ierror)
00484 
00485          else
00486 !
00487 !    ... Note: all_comp_info is not allocated at this time since
00488 !              all_comp_info is significant only on the root process.
00489 !              The SUN run time system doesn't like to transfer
00490 !              the all_comp_infos to MPI_Gatherv.
00491 !
00492              call MPI_Gatherv (b_comps,  n_active, datatype_enddef_comp, &
00493                          dummy_comp_info, Number_of_comps_per_appl,      &
00494                                        disp, datatype_enddef_comp,       &
00495                            PRISM_root, comm_appl_roots, ierror)
00496          endif
00497 
00498          if ( ierror /= MPI_SUCCESS ) then
00499             ierrp (1) = ierror
00500             ierror = PRISM_Error_MPI
00501 
00502             call psmile_error ( ierror, 'MPI_Gatherv', &
00503                                 ierrp, 1, __FILE__, __LINE__ )
00504             return
00505          endif 
00506 !
00507       endif
00508 
00509       call MPI_Bcast ( Appl%stand_alone,1, MPI_Logical, PRISM_Root, Appl%comm, ierror )
00510 
00511       if ( Appl%stand_alone ) then
00512          print 9950, trim(ch_id)
00513 #ifdef VERBOSE
00514          print 9980, trim(ch_id), ierror
00515          call psmile_flushstd
00516 #endif
00517          return
00518       endif
00519 !
00520 !=======================================================================
00521 ! The way back:
00522 !
00523 ! Distribute the data collected to all processes in communicator
00524 ! "comm_psmile".
00525 !
00526 ! (*) Broadcast Number of components "Number_of_coll_comps"
00527 ! (*) Broadcast non-pointers of "all_comp_infos"
00528 !
00529 ! (*) Broadcast vectors
00530 !     Number_of_Grids_Vector, psmile_ranks, all_extents, all_extent_infos
00531 !     for all components from corresponding roots of applications.
00532 !
00533 ! In prism_enddef_comp koennte man nur in den roots sammeln !
00534 !
00535 !=======================================================================
00536 !
00537       call MPI_Bcast (Number_of_coll_comps, 1, MPI_INTEGER, &
00538                       master_rank, comm_psmile, ierror)
00539       if ( ierror /= MPI_SUCCESS ) then
00540          ierrp (1) = ierror
00541          ierror = PRISM_Error_MPI
00542 
00543          call psmile_error ( ierror, 'MPI_Bcast', &
00544                              ierrp, 1, __FILE__, __LINE__ )
00545          return
00546       endif 
00547 !
00548 ! WARNING: Not ideal. Should be removed.
00549 !
00550       call MPI_Bcast (Number_of_comps_per_appl, NoApplication, MPI_INTEGER, &
00551                       master_rank, comm_psmile, ierror)
00552       if ( ierror /= MPI_SUCCESS ) then
00553          ierrp (1) = ierror
00554          ierror = PRISM_Error_MPI
00555 
00556          call psmile_error ( ierror, 'MPI_Bcast', &
00557                              ierrp, 1, __FILE__, __LINE__ )
00558          return
00559       endif 
00560 !
00561       call MPI_Bcast (root_ranks, NoApplication, MPI_INTEGER, &
00562                       master_rank, comm_psmile, ierror)
00563       if ( ierror /= MPI_SUCCESS ) then
00564          ierrp (1) = ierror
00565          ierror = PRISM_Error_MPI
00566 
00567          call psmile_error ( ierror, 'MPI_Bcast', &
00568                              ierrp, 1, __FILE__, __LINE__ )
00569          return
00570       endif 
00571 !
00572       if (.not. i_am_master) then
00573          Allocate (all_comp_infos(1:Number_of_coll_comps), STAT = ierror)
00574          if ( ierror > 0 ) then
00575             ierrp (1) = ierror
00576             ierrp (2) = Number_of_coll_comps
00577 
00578             ierror = PRISM_Error_Alloc
00579             call psmile_error ( ierror, 'all_comp_infos', &
00580                                 ierrp, 2, __FILE__, __LINE__ )
00581             return
00582          endif
00583       endif
00584 !
00585       call MPI_Bcast (all_comp_infos, Number_of_coll_comps, datatype_enddef_comp, &
00586                       master_rank, comm_psmile, ierror)
00587       if ( ierror /= MPI_SUCCESS ) then
00588          ierrp (1) = ierror
00589          ierror = PRISM_Error_MPI
00590 
00591          call psmile_error ( ierror, 'MPI_Bcast', &
00592                              ierrp, 1, __FILE__, __LINE__ )
00593          return
00594       endif 
00595 !
00596 !  Share pointer for the components of the own application
00597 !
00598 #ifdef PRISM_ASSERTION
00599       if (n_active /= Number_of_comps_per_appl(Appl%sequence_number)) then
00600          print *, 'n_active, Number_of_comps_per_appl ', &
00601                    n_active, Number_of_comps_per_appl(Appl%sequence_number)
00602 
00603          call psmile_assert ( __FILE__, __LINE__, &
00604              'Inconsistent number of active application components in Number_of_comps_per_appl!')
00605       endif
00606 #endif
00607 
00608       my_icomp0_coll_comps = &
00609          SUM (Number_of_comps_per_appl(1:Appl%sequence_number-1))
00610 
00611          do i = 1, n_active
00612 
00613          all_comp_infos(my_icomp0_coll_comps+i)%Number_of_Grids_Vector => &
00614                                      b_comps(i)%Number_of_Grids_Vector
00615 
00616          all_comp_infos(my_icomp0_coll_comps+i)%psmile_ranks => &
00617                                      b_comps(i)%psmile_ranks
00618 
00619          all_comp_infos(my_icomp0_coll_comps+i)%all_extents => &
00620                                      b_comps(i)%all_extents
00621 
00622          all_comp_infos(my_icomp0_coll_comps+i)%all_extent_infos => &
00623                                      b_comps(i)%all_extent_infos
00624 
00625          end do
00626 !
00627       if (n_miss > 0) then
00628          Deallocate (b_comps)
00629       endif
00630 !
00631 !     Allocate vectors of global component information which are not 
00632 !     contained on the current process and
00633 !     broadcast the data from the root of the application to all processes.
00634 !
00635       icomp = 0
00636          do iappl = 1, NoApplication
00637             do i = 1, Number_of_comps_per_appl(iappl)
00638 
00639             icomp = icomp + 1
00640             size = all_comp_infos(icomp)%size
00641 !
00642             if (iappl /= Appl%sequence_number) then
00643                Allocate (all_comp_infos(icomp)%Number_of_Grids_Vector(1:size), &
00644                          STAT = ierror)
00645                if ( ierror > 0 ) then
00646                   ierrp (1) = ierror
00647                   ierrp (2) = size
00648 
00649                   ierror = PRISM_Error_Alloc
00650                   call psmile_error ( ierror, 'all_comp_infos()%Number_of_Grids_Vector', &
00651                                       ierrp, 2, __FILE__, __LINE__ )
00652                   return
00653                endif
00654                ! initialisation
00655                all_comp_infos(icomp)%Number_of_Grids_Vector(1:size) = 0
00656 !
00657                Allocate (all_comp_infos(icomp)%psmile_ranks(1:size), &
00658                          STAT = ierror)
00659                if ( ierror > 0 ) then
00660                   ierrp (1) = ierror
00661                   ierrp (2) = size
00662 
00663                   ierror = PRISM_Error_Alloc
00664                   call psmile_error ( ierror, 'all_comp_infos()%psmile_ranks', &
00665                                       ierrp, 2, __FILE__, __LINE__ )
00666                   return
00667                endif
00668                ! Initailisation
00669                all_comp_infos(icomp)%psmile_ranks(1:size) = 0
00670 
00671 #ifdef PRISM_ASSERTION
00672             else if (icomp /= my_icomp0_coll_comps+i) then
00673                print *, 'icomp, my_icomp0_coll_comps, i', &
00674                          icomp, my_icomp0_coll_comps, i
00675 
00676                call psmile_assert ( __FILE__, __LINE__, &
00677                                     'inconsistent indices')
00678 #endif
00679             endif
00680 !
00681 !     Broadcast the data which depends on the size of comp communicator
00682 !
00683             call MPI_Bcast (all_comp_infos(icomp)%Number_of_Grids_Vector, &
00684                             size, MPI_INTEGER, &
00685                             root_ranks(iappl), comm_psmile, ierror)
00686             if ( ierror /= MPI_SUCCESS ) then
00687                ierrp (1) = ierror
00688                ierror = PRISM_Error_MPI
00689 
00690                call psmile_error ( ierror, 'MPI_Bcast', &
00691                                    ierrp, 1, __FILE__, __LINE__ )
00692                return
00693             endif 
00694 
00695             call MPI_Bcast (all_comp_infos(icomp)%psmile_ranks, size, &
00696                             MPI_INTEGER, &
00697                             root_ranks(iappl), comm_psmile, ierror)
00698             if ( ierror /= MPI_SUCCESS ) then
00699                ierrp (1) = ierror
00700                ierror = PRISM_Error_MPI
00701 
00702                call psmile_error ( ierror, 'MPI_Bcast', &
00703                                    ierrp, 1, __FILE__, __LINE__ )
00704                return
00705             endif 
00706 !
00707 !     Allocate the data which depends on the number of grids
00708 !
00709             n = SUM (all_comp_infos(icomp)%Number_of_Grids_Vector(:))
00710 
00711             if (iappl /= Appl%sequence_number) then
00712 
00713                Allocate (all_comp_infos(icomp)%all_extents(1:2, 1:ndim_3d, 1:n), &
00714                          STAT = ierror)
00715                if ( ierror > 0 ) then
00716                   ierrp (1) = ierror
00717                   ierrp (2) = n * (2 * ndim_3d)
00718 
00719                   ierror = PRISM_Error_Alloc
00720                   call psmile_error ( ierror, 'all_comp_infos()%all_extents', &
00721                                       ierrp, 2, __FILE__, __LINE__ )
00722                   return
00723                endif
00724                ! Initialisation
00725                all_comp_infos(icomp)%all_extents(1:2, 1:ndim_3d, 1:n) = 0.
00726 !
00727                Allocate (all_comp_infos(icomp)%all_extent_infos(nd_extent_infos, 1:n), &
00728                          STAT = ierror)
00729                if ( ierror > 0 ) then
00730                   ierrp (1) = ierror
00731                   ierrp (2) = n * nd_extent_infos
00732 
00733                   ierror = PRISM_Error_Alloc
00734                   call psmile_error ( ierror, 'all_comp_infos()%all_extent_infos', &
00735                                       ierrp, 2, __FILE__, __LINE__ )
00736                   return
00737                endif
00738                ! Initialisation
00739                all_comp_infos(icomp)%all_extent_infos(nd_extent_infos, 1:n) = 0
00740             endif
00741 !
00742 !     Broadcast the data which depends on the number of grids
00743 !
00744             call MPI_Bcast (all_comp_infos(icomp)%all_extent_infos, &
00745                             n*nd_extent_infos, MPI_INTEGER, &
00746                             root_ranks(iappl), comm_psmile, ierror)
00747             if ( ierror /= MPI_SUCCESS ) then
00748                ierrp (1) = ierror
00749                ierror = PRISM_Error_MPI
00750 
00751                call psmile_error ( ierror, 'MPI_Bcast', &
00752                                    ierrp, 1, __FILE__, __LINE__ )
00753                return
00754             endif 
00755 
00756             call MPI_Bcast (all_comp_infos(icomp)%all_extents, n*2*ndim_3d, &
00757                             PSMILe_float_datatype, &
00758                             root_ranks(iappl), comm_psmile, ierror)
00759             if ( ierror /= MPI_SUCCESS ) then
00760                ierrp (1) = ierror
00761                ierror = PRISM_Error_MPI
00762 
00763                call psmile_error ( ierror, 'MPI_Bcast', &
00764                                    ierrp, 1, __FILE__, __LINE__ )
00765                return
00766             endif 
00767 !
00768             end do ! Component
00769          end do ! Application
00770 !
00771 !=======================================================================
00772 !     Free
00773 !=======================================================================
00774 !
00775 !     Free communicator created (wirklich ? )
00776 !
00777       if (comm_appl_roots /= MPI_COMM_NULL) then
00778          call MPI_Comm_free (comm_appl_roots, ierror)
00779          if ( ierror /= MPI_SUCCESS ) then
00780             ierrp (1) = ierror
00781             ierror = PRISM_Error_MPI
00782 
00783             call psmile_error ( ierror, 'MPI_Comm_free', &
00784                                 ierrp, 1, __FILE__, __LINE__ )
00785             return
00786          endif 
00787       endif
00788 !
00789 !===> All done
00790 !
00791 #ifdef VERBOSE
00792       call psmile_print_comp_info ( all_comp_infos, Number_of_coll_comps, &
00793                                     'End of PSMILe_enddef_appl' )
00794 
00795       print 9980, trim(ch_id), ierror
00796       call psmile_flushstd
00797 #endif /* VERBOSE */
00798 !
00799 !  Formats
00800 !
00801 9990 format (1x, a, ': psmile_enddef_appl')
00802 9980 format (1x, a, ': psmile_enddef_appl: eof ierror =', i3)
00803 9950 format (1x, a, ': psmile_enddef_appl: WARNING: Reset Appl%stand_alone to .true.!')
00804 !
00805 #ifdef PRISM_ASSERTION
00806 
00807 9970 format (/1x, 'psmile_enddef_appl: inconsistent number of applications:', &
00808                   ' size = ', i7, '; noApplication =', i7)
00809 9960 format (/1x, 'psmile_enddef_appl: wrong rank: rank = ', i7, &
00810                   '; Appl%sequence_number =', i7)
00811 #endif /* PRISM_ASSERTION */
00812 !
00813       end subroutine PSMILe_enddef_appl

Generated on 18 Mar 2011 for Oasis4 by  doxygen 1.6.1