mp.fpp Source File


Contents

Source Code


Source Code

#include "unused_dummy.inc"
!> Easier Fortran90 interface to the MPI Message Passing Library.
!>
!>     (c) Copyright 1991 to 1998 by Michael A. Beer, William D. Dorland,
!>     P. B. Snyder, Q. P. Liu, and Gregory W. Hammett. ALL RIGHTS RESERVED.
!>
!> @note mp_mpi_r8.f90 is a version of mp_mpi.f90 to use when compiling
!> with -r8 (where the default real type is taken to be 8 bytes).  Just
!> replaced all occurances of MPI_REAL with MPI_DOUBLE_PRECISION and
!> MPI_COMPLEX with MPI_DOUBLE_COMPLEX.
module mp
  use, intrinsic :: iso_fortran_env, only : error_unit
#ifdef MPI
# ifndef MPIINC
  use mpi
# endif
#endif
  implicit none

  private

  public :: init_mp, finish_mp
  public :: mp_initialized, timer_local, time_message
  public :: get_mp_times, reset_mp_times
  public :: broadcast, sum_reduce, nb_sum_reduce, sum_allreduce, nb_sum_allreduce
  public :: sum_allreduce_sub, nb_sum_allreduce_sub
  public :: broadcast_sub, sum_reduce_sub, nb_sum_reduce_sub
  public :: max_reduce, max_allreduce, nb_max_allreduce
  public :: min_reduce, min_allreduce, min_allreduce_sub
  public :: maxloc_allreduce
  public :: nproc, iproc, proc0, job
  public :: send, ssend, receive
  public :: send_init, recv_init, start_comm, free_request !For persistent comms
  public :: barrier
  public :: comm_type
  public :: waitany
  public :: mp_comm_self
  public :: mp_undefined, mp_status_size
! JH> new abort method
  public :: mp_abort
! <JH
! MAB> needed by Trinity
  public :: scope, allprocs, subprocs
  public :: all_to_group, group_to_all
! <MAB
 !MRH needed by Multigs2
  public :: multigs2procs
  public :: land_allreduce
  ! EGH: logical used when underusing procs
  public :: included
  public :: split
  public :: free_comm
  public :: wait
  public :: waitall, testall
  public :: nbsend
  public :: nbrecv
  public :: nproc_comm
  public :: rank_comm
  public :: allgatherv, allgather, nb_allgatherv
  public :: init_jobs
  public :: mp_comm
  public :: mp_info, mp_comm_null
!AJ
  public :: initialise_requests
  public :: mp_request_null

  public :: use_nproc
  public :: split_all
  public :: unsplit_all
  public :: get_proc_name
  public :: set_default_error_file_unit

  ! <EGH needed for functional_tests
  public :: grp0

# ifdef MPI
# ifdef MPIINC
! CMR: defined MPIINC for machines where need to include mpif.h
  include 'mpif.h'
#endif
  integer, pointer :: nproc
  integer, target :: ntot_proc, ngroup_proc, mulntot_proc

  integer, pointer :: iproc
  integer, target :: aproc, gproc, mulproc

  logical, pointer :: proc0
  logical, target :: aproc0, gproc0, mulproc0

  integer, parameter :: mp_info = MPI_INFO_NULL
  integer, parameter :: mp_comm_null = MPI_COMM_NULL
  integer, parameter :: mp_request_null = MPI_REQUEST_NULL
  integer, parameter :: mp_comm_self = MPI_COMM_SELF
  integer, parameter :: mp_undefined = MPI_UNDEFINED
  integer, parameter :: mp_status_size = MPI_STATUS_SIZE

  !> Currently active communicator
  integer, pointer :: mp_comm
  !> Communicator for all processors
  integer, target :: comm_all = mp_comm_null
  !> Communicator for ensemble simulations
  integer, target :: comm_group = mp_comm_null
  !> Communicator for multiscale GS2
  integer, target :: comm_multigs2 = mp_comm_null

  integer (kind(MPI_REAL)) :: mpireal, mpicmplx, mpi2real
#ifndef SINGLE_PRECISION
  integer (kind(MPI_REAL)) :: mpicmplx8
#endif
# else
  integer, target :: nproc_actual = 1, iproc_actual = 0
  logical, target :: proc0_actual = .true.
  integer, pointer :: nproc => nproc_actual, iproc => iproc_actual
  logical, pointer :: proc0 => proc0_actual

  integer, target :: mp_comm_actual = -1
  integer, pointer :: mp_comm => mp_comm_actual

  integer, parameter :: mp_info = -1
  integer, parameter :: mp_comm_null = -1
  integer, parameter :: mp_request_null = -1
  integer, parameter :: mp_comm_self = -1
  integer, parameter :: mp_undefined = -1
  integer, parameter :: mp_status_size = 1

# endif
  !> Selectors for different communicator focuses
  integer, parameter :: allprocs = 0, subprocs = 1, multigs2procs = 2

  integer :: job = 0

  ! needed for Trinity -- MAB
  integer, dimension (:), allocatable :: grp0

  !> True if the communicator has been initialised.
  !! Used for unit tests to work out whether to
  !! call mp_abort or stop
  logical :: mp_initialized = .false.

  !> If using nprocs<nprocs available,
  !! this is true for procs that take part
  !! and false for procs that lie idle
  logical :: included = .true.

  !> File unit for the error file for [[mp_abort]]
  integer :: err_unit = error_unit

  !> Timers for mpi routines
  real, dimension(2) :: time_mp_other = 0., time_mp_collectives = 0.
  real, dimension(2) :: time_mp_ptp = 0., time_mp_sync = 0.

  !> A simple object for storing details of a communicator
  type comm_type
     sequence
     integer :: id = mp_comm_null !< The communicator id, used in calls to MPI routines
     integer :: iproc = -1 !< The procs local rank
     integer :: nproc = -1 !< The total number of processors in the communicator
     logical :: proc0 = .false. !< Is iproc equal to 0?
  end type comm_type

  interface wait
     module procedure wait_stat
     module procedure wait_nostat
  end interface

  interface waitall
     module procedure waitall_stat
     module procedure waitall_nostat
  end interface

  interface waitany
     module procedure waitany_stat
     module procedure waitany_nostat
  end interface

  interface testall
     module procedure testall_stat
     module procedure testall_nostat
     module procedure testall_nostat_noflag
  end interface

  interface nbsend
     module procedure nbsend_real_array
     module procedure nbsend_real_array_count
     module procedure nbsend_complex_array
     module procedure nbsend_complex_2d_array
     module procedure nbsend_complex_2d_array_count
     module procedure nbsend_complex_3d_array
     module procedure nbsend_complex_3d_array_count
     module procedure nbsend_complex_array_sub
     module procedure nbsend_complex_array_count
  end interface

  interface nbrecv
     module procedure nbrecv_real_array
     module procedure nbrecv_real_array_count
     module procedure nbrecv_complex_array
     module procedure nbrecv_complex_2d_array
     module procedure nbrecv_complex_2d_array_count
     module procedure nbrecv_complex_3d_array
     module procedure nbrecv_complex_3d_array_count
     module procedure nbrecv_complex_array_sub
     module procedure nbrecv_complex_array_count
  end interface

  interface send_init
     module procedure send_init_complex_array
     module procedure send_init_real_array
  end interface

  interface recv_init
     module procedure recv_init_complex_array
     module procedure recv_init_real_array
  end interface

  interface start_comm
     module procedure start_persist
     module procedure startall_persist
  end interface

  interface free_request
     module procedure free_handle_persist
     module procedure free_handles_persist
  end interface

  interface split
     module procedure split_nokey
     module procedure split_key
     module procedure split_nokey_to_commtype
     module procedure split_key_to_commtype
     module procedure split_nokey_to_commtype_sub
     module procedure split_key_to_commtype_sub
  end interface

  interface free_comm
     module procedure free_comm_id
     module procedure free_comm_type
  end interface

  interface allgather
     module procedure allgather_integer_array_1to1
  end interface allgather

  interface allgatherv
     module procedure allgatherv_complex_array_1to1
     module procedure allgatherv_complex_array_1to3
     module procedure allgatherv_complex_array_1to1_sub
     module procedure allgatherv_complex_array_1to3_sub
  end interface

  interface nb_allgatherv
     module procedure nb_allgatherv_complex_array_1to1
     module procedure nb_allgatherv_complex_array_1to3_sub
  end interface

  interface broadcast
     module procedure broadcast_integer
     module procedure broadcast_integer_array
     module procedure broadcast_integer_2array

     module procedure broadcast_real
     module procedure broadcast_real_array
     module procedure broadcast_real_2array
     module procedure broadcast_real_3array
     module procedure broadcast_real_4array
     module procedure broadcast_real_5array

     module procedure broadcast_complex
     module procedure broadcast_complex_array
#ifndef SINGLE_PRECISION
     ! This is needed for the gs2_gryfx_zonal module
     module procedure broadcast_complex8_array
#endif
     module procedure broadcast_complex_2array
     module procedure broadcast_complex_3array
     module procedure broadcast_complex_4array

     module procedure broadcast_logical
     module procedure broadcast_logical_array
     module procedure broadcast_logical_2array

     module procedure bcastfrom_integer
     module procedure bcastfrom_integer_array

     module procedure bcastfrom_real
     module procedure bcastfrom_real_array

     module procedure bcastfrom_complex
     module procedure bcastfrom_complex_array
     module procedure bcastfrom_complex_2array
     module procedure bcastfrom_complex_3array

     module procedure bcastfrom_logical
     module procedure bcastfrom_logical_array

     module procedure broadcast_character
     module procedure broadcast_character_array
     module procedure bcastfrom_character

  end interface

  interface broadcast_sub
     module procedure bcastfrom_complex_array_sub
     module procedure bcastfrom_complex_2array_sub
     module procedure bcastfrom_complex_3array_sub
     module procedure bcastfrom_complex_4array_sub
  end interface

  interface sum_reduce_sub
     module procedure sum_reduce_complex_array_sub
     module procedure sum_reduce_complex_2array_sub
     module procedure sum_reduce_complex_3array_sub
     module procedure sum_reduce_complex_4array_sub
  end interface

  interface nb_sum_reduce_sub
     module procedure nb_sum_reduce_complex_array_sub
     module procedure nb_sum_reduce_complex_4array_sub
  end interface

  interface sum_reduce
     module procedure sum_reduce_logical

     module procedure sum_reduce_integer
     module procedure sum_reduce_integer_array
     module procedure sum_reduce_integer_2array

     module procedure sum_reduce_real
     module procedure sum_reduce_real_array
     module procedure sum_reduce_real_2array
     module procedure sum_reduce_real_3array
     module procedure sum_reduce_real_4array
     module procedure sum_reduce_real_5array

     module procedure sum_reduce_complex
     module procedure sum_reduce_complex_array
     module procedure sum_reduce_complex_2array
     module procedure sum_reduce_complex_3array
     module procedure sum_reduce_complex_4array
     module procedure sum_reduce_complex_5array
  end interface

  interface nb_sum_reduce
     module procedure nb_sum_reduce_complex
     module procedure nb_sum_reduce_complex_array
     module procedure nb_sum_reduce_complex_2array
     module procedure nb_sum_reduce_complex_3array
     module procedure nb_sum_reduce_complex_4array
     module procedure nb_sum_reduce_complex_5array
  end interface

  interface sum_allreduce
     module procedure sum_allreduce_integer
     module procedure sum_allreduce_integer_array
     module procedure sum_allreduce_integer_2array
     module procedure sum_allreduce_integer_3array

     module procedure sum_allreduce_real
     module procedure sum_allreduce_real_array
     module procedure sum_allreduce_real_2array
     module procedure sum_allreduce_real_3array
     module procedure sum_allreduce_real_4array
     module procedure sum_allreduce_real_5array

     module procedure sum_allreduce_complex
     module procedure sum_allreduce_complex_array
     module procedure sum_allreduce_complex_2array
     module procedure sum_allreduce_complex_3array
     module procedure sum_allreduce_complex_4array
     module procedure sum_allreduce_complex_5array
  end interface

  interface nb_sum_allreduce
     module procedure nb_sum_allreduce_integer
     module procedure nb_sum_allreduce_complex_array
     module procedure nb_sum_allreduce_complex_3array
     module procedure nb_sum_allreduce_complex_4array
     module procedure nb_sum_allreduce_complex_5array
  end interface

  interface sum_allreduce_sub
     module procedure sum_allreduce_sub_integer
     module procedure sum_allreduce_sub_integer_array

     module procedure sum_allreduce_sub_real
     module procedure sum_allreduce_sub_real_array
     module procedure sum_allreduce_sub_real_2array
     module procedure sum_allreduce_sub_real_3array
     module procedure sum_allreduce_sub_real_4array
     module procedure sum_allreduce_sub_real_5array

     module procedure sum_allreduce_sub_complex
     module procedure sum_allreduce_sub_complex_array
     module procedure sum_allreduce_sub_complex_2array
     module procedure sum_allreduce_sub_complex_3array
     module procedure sum_allreduce_sub_complex_4array
     module procedure sum_allreduce_sub_complex_5array
  end interface

  interface nb_sum_allreduce_sub
     module procedure nb_sum_allreduce_sub_complex_2array
     module procedure nb_sum_allreduce_sub_complex_4array
  end interface

  interface max_reduce
     module procedure max_reduce_integer
     module procedure max_reduce_integer_array

     module procedure max_reduce_real
     module procedure max_reduce_real_array
  end interface

  interface max_allreduce
     module procedure max_allreduce_integer
     module procedure max_allreduce_integer_array

     module procedure max_allreduce_real
     module procedure max_allreduce_real_array
  end interface

  interface nb_max_allreduce
     module procedure nb_max_allreduce_integer
     module procedure nb_max_allreduce_integer_array
     module procedure nb_max_allreduce_real
     module procedure nb_max_allreduce_real_array
  end interface

  interface min_reduce
     module procedure min_reduce_integer
     module procedure min_reduce_integer_array

     module procedure min_reduce_real
     module procedure min_reduce_real_array
  end interface

  interface min_allreduce
     module procedure min_allreduce_integer
     module procedure min_allreduce_integer_array

     module procedure min_allreduce_real
     module procedure min_allreduce_real_array
  end interface

  interface min_allreduce_sub
     module procedure min_allreduce_sub_integer
  end interface

! MRH
  interface maxloc_allreduce
     module procedure maxloc_allreduce_real
     module procedure maxloc_allreduce_real_array
  end interface

  interface land_allreduce
     module procedure land_allreduce_single_element
  end interface
! MRH
  interface send
     module procedure send_integer
     module procedure send_integer_array

     module procedure send_real
     module procedure send_real_array
     module procedure send_real_4d_array
     module procedure send_real_5d_array

     module procedure send_complex
     module procedure send_complex_array
     module procedure send_complex_2d_array
     module procedure send_complex_3d_array

     module procedure nonblocking_send_complex_array

     module procedure send_logical
     module procedure send_logical_array

     module procedure send_character
  end interface

  interface receive
     module procedure receive_integer
     module procedure receive_integer_array

     module procedure receive_real
     module procedure receive_real_array
     module procedure receive_real_4d_array
     module procedure receive_real_5d_array

     module procedure receive_complex
     module procedure receive_complex_array
     module procedure receive_complex_2array
     module procedure receive_complex_3d_array
     module procedure nonblocking_receive_complex_array

     module procedure receive_logical
     module procedure receive_logical_array

     module procedure receive_character
  end interface

! MAB> needed for Trinity
! synchronous sends
  interface ssend
     module procedure ssend_character

     module procedure ssend_integer
     module procedure ssend_integer_array

     module procedure ssend_real
     module procedure ssend_real_array

     module procedure ssend_complex
     module procedure ssend_complex_array
     module procedure ssend_complex_2array

     module procedure ssend_logical
     module procedure ssend_logical_array
  end interface

! send stuff from global proc0 to group proc0s
  interface all_to_group
     module procedure all_to_group_real
     module procedure all_to_group_real_array
  end interface

! send stuff from group proc0s to global proc0
  interface group_to_all
     module procedure group_to_all_real
     module procedure group_to_all_real_array
  end interface
! <MAB

  interface barrier
     module procedure barrier_nocomm
     module procedure barrier_comm
  end interface
contains
  !/ Timer related routines

  !> Returns current requested timer values
  subroutine get_mp_times(total_time, overheads_time, collectives_time, ptp_time, sync_time)
    implicit none
    real, intent(out), optional :: total_time, overheads_time, collectives_time, ptp_time, sync_time

    if (present(total_time)) then
       total_time = time_mp_other(1) + time_mp_collectives(1) + &
            time_mp_ptp(1) + time_mp_sync(1)
    end if

    if (present(overheads_time)) overheads_time = time_mp_other(1)
    if (present(collectives_time)) collectives_time = time_mp_collectives(1)
    if (present(ptp_time)) ptp_time = time_mp_ptp(1)
    if (present(sync_time)) sync_time = time_mp_sync(1)
  end subroutine get_mp_times

  !> Resets mp timers to zero
  subroutine reset_mp_times
    implicit none
    time_mp_other = 0.
    time_mp_collectives = 0.
    time_mp_ptp = 0.
    time_mp_sync = 0.
  end subroutine reset_mp_times

  !> Returns CPU time in seconds
  function timer_local()
# ifdef OPENMP
!$    use omp_lib, only: omp_get_wtime
# endif
    real :: timer_local

    timer_local=0.

# ifdef OPENMP
    timer_local=omp_get_wtime()
# else
# if defined MPI && !defined MPIINC && !defined SINGLE_PRECISION
    timer_local=mpi_wtime()
# else
    ! this routine is F95 standard
    call cpu_time(timer_local)
# endif
# endif
  end function timer_local

  !> This routine counts elapsed time between two calls.
  !> The two elements in `targ` will be populated by time_message
  !> and correspond to the cumulative time and the time at the last
  !> call to time_message for this entry or zero depending on if
  !> the second element is zero or non-zero. Essentially the second
  !> element acts both as a store for the time at a call and a flag
  !> which flip-flops, to work out if we're currently timing or not.
  subroutine time_message(lprint,targ,chmessage)
    use warning_helpers, only: is_zero
    implicit none
    character (len=*), intent(in) :: chmessage
    logical, intent(in) :: lprint
    real, intent(in out) :: targ(2) ! tsum and told
    real :: tnew
    real, parameter :: small_number=1.e-10

    tnew=timer_local()

    if (is_zero(targ(2))) then
       !>RN targ(2) must be non-zero at initialization.
       if (is_zero(tnew)) tnew = small_number
       targ(2) = tnew
    else
       targ(1)=targ(1)+tnew-targ(2)
       if (lprint) print *, chmessage,': ',tnew-targ(2),' seconds'
       targ(2)=0.
    end if
  end subroutine time_message

  !/ MPI related routines

  !> FIXME : Add documentation
  subroutine get_proc_name(nm)
    implicit none
#ifdef MPI
    character*(MPI_MAX_PROCESSOR_NAME), intent(out) :: nm
    integer :: ierr, len
#else
    character(len=5), intent(out) :: nm
#endif
#ifdef MPI
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_get_processor_name(nm,len,ierr)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
#else
    nm="local"
#endif
  end subroutine get_proc_name

  !> Initialise the MPI library, communicators, and related variables
  subroutine init_mp (comm_in, multigs2, err_unit_in)
#ifdef MPI
    use constants, only: pi, kind_rs, kind_rd
#ifdef SHMEM
    use shm_mpi3
#endif
#endif
    use optionals, only : get_option_with_default
    implicit none
    !> Communicator to use instead of `MPI_COMM_WORLD`. If present and
    !> set to `MPI_COMM_NULL`, gets set to `MPI_COMM_WORLD`
    integer, intent (inout), optional :: comm_in
    !> If true, initialise multiscale communicator and variables
    logical, intent (in), optional :: multigs2
    !> Unit of open file to write any error messages to. Defaults to stderr
    integer, intent (in), optional :: err_unit_in
# ifdef MPI
    integer :: ierror, err_unit_local
#ifdef OPENMP
    integer :: provided
#endif
    logical :: is_initialised, is_multiscale

    err_unit_local = get_option_with_default(err_unit_in, err_unit)
    call mpi_initialized (is_initialised, ierror)
#ifdef OPENMP
    if (.not. is_initialised) then
       call mpi_init_thread(MPI_THREAD_MULTIPLE, provided, ierror)
       if(provided .ne. MPI_THREAD_MULTIPLE) then
          write(err_unit_local,*) 'Problem with MPI_INIT_THREAD, stopping'
          stop
       end if
    end if
#else
    if (.not. is_initialised) call mpi_init (ierror)
#endif

    is_multiscale = get_option_with_default(multigs2, .false.)

    if (is_multiscale) then
      call init_comm(comm_multigs2, mulntot_proc, mulproc, mulproc0, comm_in)
    else
      call init_comm(comm_all, ntot_proc, aproc, aproc0, comm_in)
    end if

    if (is_multiscale) then
      call scope(multigs2procs)
    else
      call scope(allprocs)
#ifdef SHMEM
      call shm_init(comm_all)
#endif
    end if

#ifndef SINGLE_PRECISION
    mpicmplx8 = MPI_COMPLEX
#endif
    if ( (kind(pi)==kind_rs) .and. (kind_rs/=kind_rd) ) then
       mpireal = MPI_REAL
       mpi2real = MPI_2REAL
       mpicmplx = MPI_COMPLEX
    else if (kind(pi)==kind_rd) then
       mpireal = MPI_DOUBLE_PRECISION
       mpi2real = MPI_2DOUBLE_PRECISION
       mpicmplx = MPI_DOUBLE_COMPLEX
    else
       write (err_unit_local, *) 'ERROR: precision mismatch in mpi'
       error stop 'ERROR: precision mismatch in mpi'
    end if

# else
    UNUSED_DUMMY(comm_in); UNUSED_DUMMY(multigs2); UNUSED_DUMMY(err_unit_in)
# endif
    mp_initialized = .true.
  end subroutine init_mp

#ifdef MPI
  !> Initialise a communicator and associated variables
  !>
  !> Defaults to using `MPI_COMM_WORLD`, but can be set to another
  !> communicator `comm_in`.
  subroutine init_comm(comm, total_procs, rank, is_rank0, comm_in)
    !> Communicator to initialise
    integer, intent(out) :: comm
    !> Total number of processors in communicator
    integer, intent(out) :: total_procs
    !> This processor's rank
    integer, intent(out) :: rank
    !> True if this processor's rank is zero
    logical, intent(out) :: is_rank0
    !> Communicator to use instead of `MPI_COMM_WORLD`: if this is
    !> `MPI_COMM_NULL`, this is also initialised to `MPI_COMM_WORLD`
    integer, intent(inout), optional :: comm_in

    integer :: ierror

    if (present(comm_in)) then
      if (comm_in == mp_comm_null) then
        comm_in = mpi_comm_world
      end if
      comm = comm_in
    else
      comm = mpi_comm_world
    end if

!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER

    call mpi_comm_size(comm, total_procs, ierror)
    call mpi_comm_rank(comm, rank, ierror)
    is_rank0 = (rank == 0)

!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
  end subroutine init_comm
#endif

  !> How many procs are in passed communicator
  subroutine nproc_comm(comm,nproc)
    integer, intent(in) :: comm
    integer, intent(out) :: nproc
#ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_comm_size(comm,nproc,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
#else
    UNUSED_DUMMY(comm)
    nproc = 1
#endif
  end subroutine nproc_comm

  !> What is rank of current proc in passed communicator
  subroutine rank_comm(comm,rank)
    integer, intent(in) :: comm
    integer, intent(out) :: rank
#ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_comm_rank(comm,rank,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
#else
    UNUSED_DUMMY(comm)
    rank = 0
#endif
  end subroutine rank_comm

  !> Switch the module communicator (and size/rank variables) between different scopes.
  subroutine scope (focus)
    !> Which scope to use. Should be one of [[allprocs]], [[multigs2procs]],
    !> [[subprocs]]. Other values are equivalent to [[subprocs]]
    integer, intent (in) :: focus
# ifdef MPI
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    if (focus == allprocs) then
       mp_comm => comm_all
       nproc => ntot_proc
       iproc => aproc
       proc0 => aproc0
    else if (focus == multigs2procs) then
       mp_comm => comm_multigs2
       nproc => mulntot_proc
       iproc => mulproc
       proc0 => mulproc0
    else
       mp_comm => comm_group
       nproc => ngroup_proc
       iproc => gproc
       proc0 => gproc0
    end if
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
# else
    UNUSED_DUMMY(focus)
# endif
  end subroutine scope

  !> Finalise MPI library if it hasn't been finalised already
  subroutine finish_mp
#ifdef MPI
# ifdef SHMEM
    use shm_mpi3, only : shm_clean
# endif
    implicit none
    integer :: ierror
    logical :: fin
# ifdef SHMEM
    call shm_clean
# endif
    call mpi_finalized (fin, ierror)
    if(.not.fin) call mpi_finalize (ierror)
#endif
    mp_initialized = .false.
  end subroutine finish_mp

! ************** allgathers *****************************
  !> A subroutine to do a allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine allgather_integer_array_1to1(arr,count,out,recvcnts)
    implicit none
    integer, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    integer, dimension(:), intent(out) :: out !< The gathered data
    integer, intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER

    !Do the gather
    call mpi_allgather(arr,count,MPI_INTEGER,out,recvcnts,&
         MPI_INTEGER,mp_comm,ierror)

!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    out = reshape(arr, shape(out))
    UNUSED_DUMMY(count); UNUSED_DUMMY(recvcnts)
#endif
  end subroutine allgather_integer_array_1to1

  !> A subroutine to do a allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine allgatherv_complex_array_1to1(arr,count,out,recvcnts,displs)
    implicit none
    complex, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    complex, dimension(:), intent(out) :: out !< The gathered data
    integer, dimension(:), intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
    integer, dimension(:), intent(in) :: displs !< Array detailing offset in array where gathered data is to be stored
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER

    !Do the gather
    call mpi_allgatherv(arr,count,mpicmplx,out,recvcnts,displs,&
         mpicmplx,mp_comm,ierror)

!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    out = reshape(arr, shape(out))
    UNUSED_DUMMY(count); UNUSED_DUMMY(recvcnts); UNUSED_DUMMY(displs)
#endif
  end subroutine allgatherv_complex_array_1to1

  !> A subroutine to do a allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine nb_allgatherv_complex_array_1to1(arr,count,out,recvcnts,displs,request)
    implicit none
    complex, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    complex, dimension(:), intent(out) :: out !< The gathered data
    integer, dimension(:), intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
    integer, dimension(:), intent(in) :: displs !< Array detailing offset in array where gathered data is to be stored
    integer, intent(out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER

    !Do the gather
    call mpi_iallgatherv(arr,count,mpicmplx,out,recvcnts,displs,&
         mpicmplx,mp_comm,request,ierror)

!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call allgatherv_complex_array_1to1(arr,count,out,recvcnts,displs)
    request = mp_request_null
#endif
  end subroutine nb_allgatherv_complex_array_1to1

  !> A subroutine to do a allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine allgatherv_complex_array_1to3(arr,count,out,recvcnts,displs)
    implicit none
    complex, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    complex, dimension(:,:,:), intent(out) :: out !< The gathered data
    integer, dimension(:), intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
    integer, dimension(:), intent(in) :: displs !< Array detailing offset in array where gathered data is to be stored
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    !Do the gather
    call mpi_allgatherv(arr,count,mpicmplx,out,recvcnts,displs,&
         mpicmplx,mp_comm,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    out = reshape(arr, shape(out))
    UNUSED_DUMMY(count); UNUSED_DUMMY(recvcnts); UNUSED_DUMMY(displs)
#endif
  end subroutine allgatherv_complex_array_1to3

  !> A subroutine to do a allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine allgatherv_complex_array_1to1_sub(arr,count,out,recvcnts,displs,sub_comm)
    implicit none
    complex, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    complex, dimension(:), intent(out) :: out !< The gathered data
    integer, dimension(:), intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
    integer, dimension(:), intent(in) :: displs !< Array detailing offset in array where gathered data is to be stored
    integer, intent(in) :: sub_comm !< Sub-communicator handle
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    !Do the gather
    call mpi_allgatherv(arr,count,mpicmplx,out,recvcnts,displs,&
         mpicmplx,sub_comm,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    out = reshape(arr, shape(out))
    UNUSED_DUMMY(count); UNUSED_DUMMY(recvcnts); UNUSED_DUMMY(displs); UNUSED_DUMMY(sub_comm)
#endif
  end subroutine allgatherv_complex_array_1to1_sub

  !> A subroutine to do a allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine allgatherv_complex_array_1to3_sub(arr,count,out,recvcnts,displs,sub_comm)
    implicit none
    complex, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    complex, dimension(:,:,:), intent(out) :: out !< The gathered data
    integer, dimension(:), intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
    integer, dimension(:), intent(in) :: displs !< Array detailing offset in array where gathered data is to be stored
    integer, intent(in) :: sub_comm !< Sub-communicator handle
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    !Do the gather
    call mpi_allgatherv(arr,count,mpicmplx,out,recvcnts,displs,&
         mpicmplx,sub_comm,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
#else
    out = reshape(arr, shape(out))
    UNUSED_DUMMY(count); UNUSED_DUMMY(recvcnts); UNUSED_DUMMY(displs); UNUSED_DUMMY(sub_comm)
#endif
  end subroutine allgatherv_complex_array_1to3_sub

  !> A subroutine to do a non-blocking allgatherv operation, sending recvcnts(iproc)
  !! data from the iproc'th processor to all others starting at arr(start).
  subroutine nb_allgatherv_complex_array_1to3_sub(arr,count,out,recvcnts,displs,sub_comm,request)
    implicit none
    complex, dimension(:), intent(in) :: arr  !< The data to gather
    integer, intent(in) :: count !< How much data to gather, <=SIZE(arr)
    complex, dimension(:,:,:), intent(out) :: out !< The gathered data
    integer, dimension(:), intent(in) :: recvcnts !< Array detailing how much data to expect from each proc
    integer, dimension(:), intent(in) :: displs !< Array detailing offset in array where gathered data is to be stored
    integer, intent(in) :: sub_comm !< Sub-communicator handle
    integer, intent(out) :: request !< FIXME : Add documentation
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    !Do the gather
    call mpi_iallgatherv(arr,count,mpicmplx,out,recvcnts,displs,&
         mpicmplx,sub_comm,request,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
#else
    call allgatherv_complex_array_1to3_sub(arr,count,out,recvcnts,displs,sub_comm)
    request = mp_request_null
#endif
  end subroutine nb_allgatherv_complex_array_1to3_sub


! ************** comm utils *****************************
  !> A routine to free the communicator with id comm
  subroutine free_comm_id (comm)
    implicit none
    integer, intent(inout) :: comm !< Communicator id
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_comm_free(comm,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
# endif
    comm = mp_comm_null
  end subroutine free_comm_id

  !> A routine to free the communicator represented by comm
  subroutine free_comm_type (comm)
    implicit none
    type(comm_type), intent(inout) :: comm !< Communicator object
#ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_comm_free(comm%id,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
#endif
    comm%id = mp_comm_null
    comm%iproc = -1
    comm%nproc = -1
    comm%proc0 = .false.
  end subroutine free_comm_type

  !> This function splits mp_comm into two pieces,
  !! one with nprocs_new procs, and one with all the
  !! remainder. For the remainder, included is set
  !! to false. This means that the remainder will lie
  !! idle.
  subroutine use_nproc(nprocs_new)
    implicit none
    integer, intent(in) :: nprocs_new
    integer :: colour
    included = (iproc < nprocs_new)
    colour = 1
    if (included) colour = 0
    call split_all(colour)
  end subroutine use_nproc

  !> FIXME : Add documentation
  subroutine unsplit_all(old_comm)
    implicit none
    integer, intent(in) :: old_comm
#ifdef MPI
    integer :: ierror
    call free_comm_id(comm_all)
    comm_all = old_comm
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_comm_size (comm_all, ntot_proc, ierror)
    call mpi_comm_rank (comm_all, aproc, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    aproc0 = aproc == 0
    included = .true.
    call scope (allprocs)
#else
    UNUSED_DUMMY(old_comm)
#endif
  end subroutine unsplit_all

  !> A routine to split the global communicator into sub-groups
  !! based on each procs specific colour "col". mp_comm is then overwritten
  !! to be the new split communicator
  !! This is different to job fork, which has the group and global communicators.
  !! The global communicator is replaced.
  !! This action can be undone with unsplit_all
  !! If the old mp_comm is not mpi_comm_world, you should make sure you have
  !! saved its value somewhere before calling this so that its value
  !! can be saved.
  subroutine split_all (col)
    implicit none
    integer, intent(inout) :: col !< Processors colour
#ifdef MPI
    integer :: ierror, new_comm
!    if (scope == subprocs) then
!      write (*,*) 'Can only call split_all with global scope'
!      call mpi_abort(comm_all, 1, ierror)
!    end if
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !Split the comm group, note we use a constant key of 0 across all procs
    !meaning that the rank order is the same in the old and new communicators
    call mpi_comm_split(comm_all,col,aproc,new_comm,ierror)
    comm_all = new_comm
    call mpi_comm_size (comm_all, ntot_proc, ierror)
    call mpi_comm_rank (comm_all, aproc, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    aproc0 = aproc == 0
    call scope (allprocs)
#else
    UNUSED_DUMMY(col)
#endif
  end subroutine split_all

  !> A routine to split the mp_comm communicator into sub-groups
  !! based on each procs specific colour "col". The sub communicator's
  !! handle is passed back in new_comm
  !!
  !! In future we may wish to make split an interface to allow for
  !! user specific keys (to reorder processor ranks) and to specify
  !! a different communicator to split
  subroutine split_nokey (col,new_comm)
    implicit none
    integer, intent(inout) :: col !< Processors colour
    integer, intent(out) :: new_comm !< The new sub communicator's handle
#ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !Split the comm group, note we use a constant key of 0 across all procs
    !meaning that the rank order is the same in the old and new communicators
    call mpi_comm_split(mp_comm,col,0,new_comm,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
#else
    UNUSED_DUMMY(col)
    new_comm=-1
#endif
  end subroutine split_nokey

  !> A routine to split the mp_comm communicator into sub-groups
  !! based on each procs specific colour "col" and ranked by key. The sub communicator's
  !! handle is passed back in new_comm
  subroutine split_key (col,key,new_comm)
    implicit none
    integer, intent(in) :: col !< Processors colour
    integer, intent(in) :: key !< Processors key, used to determine rank
    integer, intent(out) :: new_comm !< The new sub communicator's handle
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !Split the comm group
    call mpi_comm_split(mp_comm,col,key,new_comm,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
# else
    UNUSED_DUMMY(col); UNUSED_DUMMY(key)
    new_comm = -1
# endif
  end subroutine split_key

  !> A routine to split the mp_comm communicator into sub-groups
  !! based on each procs specific colour "col". The sub communicator's
  !! handle is passed back in new_comm
  subroutine split_nokey_to_commtype (col,new_comm)
    implicit none
    integer, intent(in) :: col !< Processors colour
    type(comm_type), intent(out) :: new_comm !< The new sub communicator's handle
# ifdef MPI
    integer :: ierror
# endif
    integer :: comm_id, nproc, iproc
# ifdef MPI
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !Split the comm group, note we use a constant key of 0 across all procs
    !meaning that the rank order is the same in the old and new communicators
    call mpi_comm_split(mp_comm,col,0,comm_id,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !If the resulting communicator is the null communicator then
    !just return the default type with the appropriate id.
    if(comm_id == mp_comm_null) then
       new_comm%id = comm_id
       return
    end if
# else
    UNUSED_DUMMY(col)
    comm_id = mp_comm_null
# endif
    new_comm%id=comm_id
    call nproc_comm(comm_id,nproc)
    new_comm%nproc=nproc
    call rank_comm(comm_id,iproc)
    new_comm%iproc=iproc
    new_comm%proc0=iproc.eq.0
  end subroutine split_nokey_to_commtype

  !> A routine to split the mp_comm communicator into sub-groups
  !! based on each procs specific colour "col" and ranked by key. The sub communicator's
  !! handle is passed back in new_comm
  subroutine split_key_to_commtype (col,key,new_comm)
    implicit none
    integer, intent(in) :: col !< Processors colour
    integer, intent(in) :: key !< Processors key, used to determine rank
    type(comm_type), intent(out) :: new_comm !< The new sub communicator's handle
# ifdef MPI
    integer :: ierror
# endif
    integer :: comm_id, nproc
# ifdef MPI
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !Split the comm group
    call mpi_comm_split(mp_comm,col,key,comm_id,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !If the resulting communicator is the null communicator then
    !just return the default type with the appropriate id.
    if(comm_id == mp_comm_null) then
       new_comm%id = comm_id
       return
    end if
# else
    UNUSED_DUMMY(col); UNUSED_DUMMY(key)
    comm_id = mp_comm_null
# endif
    new_comm%id=comm_id
    call nproc_comm(comm_id,nproc)
    new_comm%nproc=nproc
    call rank_comm(comm_id,nproc)
    new_comm%iproc=nproc
    new_comm%proc0=new_comm%iproc.eq.0
  end subroutine split_key_to_commtype

  !> A routine to split a subcommunicator into sub-groups
  !! based on each procs specific colour "col". The sub communicator's
  !! handle is passed back in new_comm
  subroutine split_nokey_to_commtype_sub (col,new_comm,sub)
    implicit none
    integer, intent(in) :: col !< Processors colour
    type(comm_type), intent(out) :: new_comm !< The new sub communicator's handle
    integer, intent(in) :: sub
# ifdef MPI
    integer :: ierror
# endif
    integer :: comm_id, nproc, iproc
    !Split the comm group, note we use a constant key of 0 across all procs
    !meaning that the rank order is the same in the old and new communicators
# ifdef MPI
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    call mpi_comm_split(sub,col,0,comm_id,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !If the resulting communicator is the null communicator then
    !just return the default type with the appropriate id.
    if(comm_id == mp_comm_null) then
       new_comm%id = comm_id
       return
    end if
# else
    UNUSED_DUMMY(col); UNUSED_DUMMY(sub)
    comm_id = mp_comm_null
# endif
    new_comm%id=comm_id
    call nproc_comm(comm_id,nproc)
    new_comm%nproc=nproc
    call rank_comm(comm_id,iproc)
    new_comm%iproc=iproc
    new_comm%proc0=iproc.eq.0
  end subroutine split_nokey_to_commtype_sub

  !> A routine to split a subcommunicator into sub-groups
  !! based on each procs specific colour "col" and ranked by key. The sub communicator's
  !! handle is passed back in new_comm
  subroutine split_key_to_commtype_sub (col,key,new_comm,sub)
    implicit none
    integer, intent(in) :: col !< Processors colour
    integer, intent(in) :: key !< Processors key, used to determine rank
    type(comm_type), intent(out) :: new_comm !< The new sub communicator's handle
    integer, intent(in) :: sub !< Subcommunicator to split
# ifdef MPI
    integer :: ierror
# endif
    integer :: comm_id, nproc
# ifdef MPI
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER
    !Split the comm group
    call mpi_comm_split(sub,col,key,comm_id,ierror)
!$OMP MASTER
    call time_message(.false., time_mp_other, ' MPI Overheads')
!$OMP END MASTER

    !If the resulting communicator is the null communicator then
    !just return the default type with the appropriate id.
    if(comm_id == mp_comm_null) then
       new_comm%id = comm_id
       return
    end if
# else
    UNUSED_DUMMY(col); UNUSED_DUMMY(key); UNUSED_DUMMY(sub)
    comm_id = mp_comm_null
# endif
    new_comm%id=comm_id
    call nproc_comm(comm_id,nproc)
    new_comm%nproc=nproc
    call rank_comm(comm_id,nproc)
    new_comm%iproc=nproc
    new_comm%proc0=new_comm%iproc.eq.0
  end subroutine split_key_to_commtype_sub

! ************** broadcasts *****************************

  !> FIXME : Add documentation
  subroutine broadcast_character (char)
    implicit none
    character(*), intent (in out) :: char
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (char, len(char), MPI_CHARACTER, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(char)
# endif
  end subroutine broadcast_character

  !> FIXME : Add documentation
  !!
  !! An array of characters, each of same length
  subroutine broadcast_character_array (char)
    implicit none
    character(len = *), dimension(:), intent (in out) :: char
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (char, size(char) * len(char(1)), MPI_CHARACTER, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    if (.false.) write(*,*) char
# endif
  end subroutine broadcast_character_array

  !> FIXME : Add documentation
  subroutine broadcast_integer (i)
    implicit none
    integer, intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (i, 1, MPI_INTEGER, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine broadcast_integer

  !> FIXME : Add documentation
  subroutine broadcast_integer_array (i)
    implicit none
    integer, dimension (:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (i, size(i), MPI_INTEGER, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine broadcast_integer_array

  !> FIXME : Add documentation
  subroutine broadcast_integer_2array (i)
    implicit none
    integer, dimension (:,:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (i, size(i), MPI_INTEGER, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine broadcast_integer_2array

  !> FIAME : Add documentation
  subroutine broadcast_real (a)
    implicit none
    real, intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, 1, mpireal, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine broadcast_real

  !> FIAME : Add documentation
  subroutine broadcast_real_array (a)
    implicit none
    real, dimension (:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, size(a), mpireal, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine broadcast_real_array

  !> FIXME : Add documentation
  subroutine broadcast_real_2array(a)
    implicit none
    real, dimension(:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, size(a), mpireal, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine broadcast_real_2array

  !> FIAME : Add documentation
  subroutine broadcast_real_3array(a)
    implicit none
    real, dimension(:,:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, size(a), mpireal, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine broadcast_real_3array

  !> FIXME : Add documentation
  subroutine broadcast_real_4array(a)
    implicit none
    real, dimension(:,:,:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, size(a), mpireal, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine broadcast_real_4array

  !> FIXME : Add documentation
  subroutine broadcast_real_5array(a)
    implicit none
    real, dimension(:,:,:,:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, size(a), mpireal, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine broadcast_real_5array

  !> FIXME : Add documentation
  subroutine broadcast_complex (z)
    implicit none
    complex, intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, 1, mpicmplx, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine broadcast_complex

  !> FIXME : Add documentation
  subroutine broadcast_complex_array (z)
    implicit none
    complex, dimension (:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine broadcast_complex_array

#ifndef SINGLE_PRECISION
  !> FIXME : Add documentation
  subroutine broadcast_complex8_array (z)
    use constants, only: kind_rs
    implicit none
    complex (kind=kind_rs), dimension (:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx8, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine broadcast_complex8_array
#endif

  !> FIXME : Add documentation
  subroutine broadcast_complex_2array (z)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine broadcast_complex_2array

  !> FIXME : Add documentation
  subroutine broadcast_complex_3array (z)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine broadcast_complex_3array

  !> FIXME : Add documentation
  subroutine broadcast_complex_4array (z)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine broadcast_complex_4array

  !> FIXME : Add documentation
  subroutine broadcast_logical (f)
    implicit none
    logical, intent (in out) :: f
# ifdef MPI
    integer :: ierror,rc
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (f, 1, MPI_LOGICAL, 0, mp_comm, ierror)
    if (ierror .ne. MPI_SUCCESS) &
      call MPI_ABORT(MPI_COMM_WORLD, rc, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(f)
# endif
  end subroutine broadcast_logical

  !> FIXME : Add documentation
  subroutine broadcast_logical_array (f)
    implicit none
    logical, dimension (:), intent (in out) :: f
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (f, size(f), MPI_LOGICAL, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(f)
# endif
  end subroutine broadcast_logical_array

  !> FIXME : Add documentation
  subroutine broadcast_logical_2array (f)
    implicit none
    logical, dimension (:,:), intent (in out) :: f
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (f, size(f), MPI_LOGICAL, 0, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(f)
# endif
  end subroutine broadcast_logical_2array

  !> FIXME : Add documentation
  subroutine bcastfrom_logical (f, src)
    implicit none
    logical, intent (in out) :: f
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (f, 1, MPI_LOGICAL, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(f)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_logical

  !> FIXME : Add documentation
  subroutine bcastfrom_logical_array (f, src)
    implicit none
    logical, dimension (:), intent (in out) :: f
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (f, size(f), MPI_LOGICAL, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(f)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_logical_array

  !> FIXME : Add documentation
  subroutine bcastfrom_character (c, src)
    implicit none
    character(*), intent (in out) :: c
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (c, len(c), MPI_CHARACTER, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(c)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_character

  !> FIXME : Add documentation
  subroutine bcastfrom_integer (i, src)
    implicit none
    integer, intent (in out) :: i
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (i, 1, MPI_INTEGER, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_integer

  !> FIXME : Add documentation
  subroutine bcastfrom_integer_array (i, src)
    implicit none
    integer, dimension (:), intent (in out) :: i
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (i, size(i), MPI_INTEGER, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_integer_array

  !> FIXME : Add documentation
  subroutine bcastfrom_real (a, src)
    implicit none
    real, intent (in out) :: a
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, 1, mpireal, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_real

  !> FIXME : Add documentation
  subroutine bcastfrom_real_array (a, src)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (a, size(a), mpireal, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_real_array

  !> FIXME : Add documentation
  subroutine bcastfrom_complex (z, src)
    implicit none
    complex, intent (in out) :: z
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, 1, mpicmplx, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_complex

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_array (z, src)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_complex_array

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_2array (z, src)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, src, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_complex_2array

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_3array (z, src)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent (in) :: src
# ifdef MPI
    integer :: ierror
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
    call mpi_bcast (z, size(z), mpicmplx, src, mp_comm, ierror)
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
# else
    UNUSED_DUMMY(z)
    if (src /= 0) call mp_abort ("broadcast from")
# endif
  end subroutine bcastfrom_complex_3array

!******************BROADCAST SUB*************************

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_array_sub (z, src, sub)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (in) :: src
    integer, intent (in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, src, sub, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (src /= 0) call mp_abort ("broadcast from sub")
# endif
  end subroutine bcastfrom_complex_array_sub

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_2array_sub (z, src, sub)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent (in) :: src
    integer, intent (in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, src, sub, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (src /= 0) call mp_abort ("broadcast from sub")
# endif
  end subroutine bcastfrom_complex_2array_sub

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_3array_sub (z, src, sub)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent (in) :: src
    integer, intent (in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, src, sub, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (src /= 0) call mp_abort ("broadcast from sub")
# endif
  end subroutine bcastfrom_complex_3array_sub

  !> FIXME : Add documentation
  subroutine bcastfrom_complex_4array_sub (z, src, sub)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent (in) :: src
    integer, intent (in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_bcast (z, size(z), mpicmplx, src, sub, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (src /= 0) call mp_abort ("broadcast from sub")
# endif
  end subroutine bcastfrom_complex_4array_sub

! ************** reductions ***********************

  !> FIXME : Add documentation
  subroutine sum_reduce_integer (i, dest)
    implicit none
    integer, intent (in out) :: i
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (i, i, 1, MPI_INTEGER, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_integer

  !> FIXME : Add documentation
  subroutine sum_reduce_integer_array (i, dest)
    implicit none
    integer, dimension (:), intent (in out) :: i
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (i, i, size(i), MPI_INTEGER, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_integer_array

  !> FIXME : Add documentation
  subroutine sum_reduce_integer_2array (a, dest)
    implicit none
    integer, dimension (:,:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
         (MPI_IN_PLACE, a, size(a), MPI_INTEGER, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
         (a, a, size(a), MPI_INTEGER, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_integer_2array

  !> FIXME : Add documentation
  subroutine sum_reduce_logical (a, dest)
    implicit none
    logical, intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, a, 1, MPI_LOGICAL, MPI_LOR, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (a, a, 1, MPI_LOGICAL, MPI_LOR, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_logical

  !> FIXME : Add documentation
  subroutine sum_reduce_real (a, dest)
    implicit none
    real, intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, a, 1, mpireal, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (a, a, 1, mpireal, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_real

  !> FIXME : Add documentation
  subroutine sum_reduce_real_array (a, dest)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
         (a, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_real_array

  !> FIXME : Add documentation
  subroutine sum_reduce_real_2array (a, dest)
    implicit none
    real, dimension (:,:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
         (a, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_real_2array

  !> FIXME : Add documentation
  subroutine sum_reduce_real_3array (a, dest)
    implicit none
    real, dimension (:,:,:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
         (a, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_real_3array

  !> FIXME : Add documentation
  subroutine sum_reduce_real_4array (a, dest)
    implicit none
    real, dimension (:,:,:,:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
         (a, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_real_4array

  !> FIXME : Add documentation
  subroutine sum_reduce_real_5array (a, dest)
    implicit none
    real, dimension (:,:,:,:,:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
         (a, a, size(a), mpireal, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_real_5array

  !> FIXME : Add documentation
  subroutine sum_reduce_complex (z, dest)
    implicit none
    complex, intent (in out) :: z
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, 1, mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (z, z, 1, mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_array (z, dest)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_array

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_2array (z, dest)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_2array

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_3array (z, dest)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_3array

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_4array (z, dest)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_4array

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_5array (z, dest)
    implicit none
    complex, dimension (:,:,:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if (iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_5array

!******************NON-BLOCKING SUM REDUCE*************************

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex (z, dest, request)
    implicit none
    complex, intent (in out) :: z
    integer, intent (in) :: dest
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, 1, mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    else
       call mpi_ireduce &
            (z, z, 1, mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce(z, dest)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_array (z, dest, request)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (in) :: dest
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce(z, dest)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_array

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_2array (z, dest, request)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent (in) :: dest
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce(z, dest)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_2array

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_3array (z, dest, request)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce_complex_3array(z, dest)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_3array

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_4array (z, dest, request)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce_complex_4array(z, dest)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_4array

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_5array (z, dest, request)
    implicit none
    complex, dimension (:,:,:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if (iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, mp_comm, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce(z, dest)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_5array


!******************SUM REDUCE SUB*************************

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_array_sub (z, dest, sub)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (in) :: dest
    type(comm_type), intent(in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(sub%iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_array_sub

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_2array_sub (z, dest, sub)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent (in) :: dest
    type(comm_type), intent(in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(sub%iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_2array_sub

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_3array_sub (z, dest, sub)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
    type(comm_type), intent(in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(sub%iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_3array_sub

  !> FIXME : Add documentation
  subroutine sum_reduce_complex_4array_sub (z, dest, sub)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
    type(comm_type), intent(in) :: sub
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(sub%iproc.eq.dest) then
       call mpi_reduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    else
       call mpi_reduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z); UNUSED_DUMMY(sub)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine sum_reduce_complex_4array_sub

!******************NON-BLOCKING SUM REDUCE SUB*************************

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_array_sub (z, dest, sub, request)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (in) :: dest
    type(comm_type), intent(in) :: sub
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(sub%iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce_complex_array_sub(z, dest, sub)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_array_sub

  !> FIXME : Add documentation
  subroutine nb_sum_reduce_complex_4array_sub (z, dest, sub, request)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent (in) :: dest
    type(comm_type), intent(in) :: sub
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(sub%iproc.eq.dest) then
       call mpi_ireduce &
            (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, request, ierror)
    else
       call mpi_ireduce &
            (z, z, size(z), mpicmplx, MPI_SUM, dest, sub%id, request, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_reduce_complex_4array_sub(z, dest, sub)
    request = mp_request_null
# endif
  end subroutine nb_sum_reduce_complex_4array_sub


!******************SUM ALLREDUCE*************************

  !> FIXME : Add documentation
  subroutine sum_allreduce_integer (i)
    implicit none
    integer, intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine sum_allreduce_integer

  !> FIXME : Add documentation
  subroutine sum_allreduce_integer_array (i)
    implicit none
    integer, dimension (:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine sum_allreduce_integer_array

  !> FIXME : Add documentation
  subroutine sum_allreduce_integer_2array (i)
    implicit none
    integer, dimension (:,:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine sum_allreduce_integer_2array

  !> FIXME : Add documentation
  subroutine sum_allreduce_integer_3array (i)
    implicit none
    integer, dimension (:,:,:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine sum_allreduce_integer_3array

  !> FIXME : Add documentation
  subroutine sum_allreduce_real (a)
    implicit none
    real, intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, 1, mpireal, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_real

  !> FIXME : Add documentation
  subroutine sum_allreduce_real_array (a)
    implicit none
    real, dimension (:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_real_array

  !> FIXME : Add documentation
  subroutine sum_allreduce_real_2array (a)
    implicit none
    real, dimension (:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_real_2array

  !> FIXME : Add documentation
  subroutine sum_allreduce_real_3array (a)
    implicit none
    real, dimension (:,:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_real_3array

  !> FIXME : Add documentation
  subroutine sum_allreduce_real_4array (a)
    implicit none
    real, dimension (:,:,:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_real_4array

  !> FIXME : Add documentation
  subroutine sum_allreduce_real_5array (a)
    implicit none
    real, dimension (:,:,:,:,:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_real_5array

  !> FIXME : Add documentation
  subroutine sum_allreduce_complex (z)
    implicit none
    complex, intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, 1, mpicmplx, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_complex

  !> FIXME : Add documentation
  subroutine sum_allreduce_complex_array (z)
    implicit none
    complex, dimension (:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_complex_array

  !> FIXME : Add documentation
  subroutine sum_allreduce_complex_2array (z)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_complex_2array

  !> FIXME : Add documentation
  subroutine sum_allreduce_complex_3array (z)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_complex_3array

  !> FIXME : Add documentation
  subroutine sum_allreduce_complex_4array (z)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_complex_4array

  !> FIXME : Add documentation
  subroutine sum_allreduce_complex_5array (z)
    implicit none
    complex, dimension (:,:,:,:,:), intent (in out) :: z
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_complex_5array
  !/Sub-communicator allreduce

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_integer (i,sub_comm)
    implicit none
    integer, intent (in out) :: i
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(i)
# endif
  end subroutine sum_allreduce_sub_integer

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_integer_array (i,sub_comm)
    implicit none
    integer, dimension (:), intent (in out) :: i
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(i)
# endif
  end subroutine sum_allreduce_sub_integer_array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_real (a,sub_comm)
    implicit none
    real, intent (in out) :: a
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, 1, mpireal, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_sub_real

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_real_array (a,sub_comm)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_sub_real_array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_real_2array (a,sub_comm)
    implicit none
    real, dimension (:,:), intent (in out) :: a
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_sub_real_2array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_real_3array (a,sub_comm)
    implicit none
    real, dimension (:,:,:), intent (in out) :: a
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_sub_real_3array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_real_4array (a,sub_comm)
    implicit none
    real, dimension (:,:,:,:), intent (in out) :: a
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_sub_real_4array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_real_5array (a,sub_comm)
    implicit none
    real, dimension (:,:,:,:,:), intent (in out) :: a
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(a)
# endif
  end subroutine sum_allreduce_sub_real_5array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_complex (z,sub_comm)
    implicit none
    complex, intent (in out) :: z
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, 1, mpicmplx, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_sub_complex

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_complex_array (z,sub_comm)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_sub_complex_array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_complex_2array (z,sub_comm)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_sub_complex_2array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_complex_3array (z,sub_comm)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_sub_complex_3array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_complex_4array (z,sub_comm)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_sub_complex_4array

  !> FIXME : Add documentation
  subroutine sum_allreduce_sub_complex_5array (z,sub_comm)
    implicit none
    complex, dimension (:,:,:,:,:), intent (in out) :: z
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(sub_comm); UNUSED_DUMMY(z)
# endif
  end subroutine sum_allreduce_sub_complex_5array

!******************NON-BLOCKING SUM ALLREDUCE SUB*************************

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_sub_complex_2array (z,sub_comm,request)
    implicit none
    complex, dimension (:,:), intent (in out) :: z
    integer, intent(in) :: sub_comm
    integer, intent(out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce_sub(z, sub_comm)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_sub_complex_2array

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_sub_complex_4array (z,sub_comm,request)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent(in) :: sub_comm
    integer, intent(out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, sub_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce_sub(z, sub_comm)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_sub_complex_4array

!******************NON-BLOCKING SUM ALLREDUCE*************************

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_integer (i, request)
    implicit none
    integer, intent (in out) :: i
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_SUM, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce(i)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_integer

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_complex_array (z, request)
    implicit none
    complex, dimension (:), intent (in out) :: z
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce(z)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_complex_array

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_complex_3array (z, request)
    implicit none
    complex, dimension (:,:,:), intent (in out) :: z
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce(z)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_complex_3array

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_complex_4array (z, request)
    implicit none
    complex, dimension (:,:,:,:), intent (in out) :: z
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce(z)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_complex_4array

  !> FIXME : Add documentation
  subroutine nb_sum_allreduce_complex_5array (z, request)
    implicit none
    complex, dimension (:,:,:,:,:), intent (in out) :: z
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, z, size(z), mpicmplx, MPI_SUM, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call sum_allreduce(z)
    request = mp_request_null
# endif
  end subroutine nb_sum_allreduce_complex_5array

!*******************MAX REDUCE*************************

  !> FIXME : Add documentation
  subroutine max_reduce_integer (i, dest)
    implicit none
    integer, intent (in out) :: i
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_MAX, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (i, i, 1, MPI_INTEGER, MPI_MAX, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine max_reduce_integer

  !> FIXME : Add documentation
  subroutine max_reduce_integer_array (i, dest)
    implicit none
    integer, dimension (:), intent (in out) :: i
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_MAX, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (i, i, size(i), MPI_INTEGER, MPI_MAX, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine max_reduce_integer_array

  !> FIXME : Add documentation
  subroutine max_reduce_real (a, dest)
    implicit none
    real, intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, a, 1, mpireal, MPI_MAX, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (a, a, 1, mpireal, MPI_MAX, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine max_reduce_real

  !> FIXME : Add documentation
  subroutine max_reduce_real_array (a, dest)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, a, size(a), mpireal, MPI_MAX, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (a, a, size(a), mpireal, MPI_MAX, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine max_reduce_real_array

  !> FIXME : Add documentation
  subroutine max_allreduce_integer (i)
    implicit none
    integer, intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_MAX, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine max_allreduce_integer

  !> FIXME : Add documentation
  subroutine max_allreduce_integer_array (i)
    implicit none
    integer, dimension (:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_MAX, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine max_allreduce_integer_array

  !> FIXME : Add documentation
  subroutine max_allreduce_real (a)
    implicit none
    real, intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, 1, mpireal, MPI_MAX, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine max_allreduce_real

  !> FIXME : Add documentation
  subroutine max_allreduce_real_array (a)
    implicit none
    real, dimension (:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_MAX, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine max_allreduce_real_array

  !> FIXME : Add documentation
  subroutine maxloc_allreduce_real(a,i)
    implicit none
    real, intent (in out) :: a
    integer, intent (in out) :: i
# ifdef MPI
    real, dimension(:,:), allocatable :: ai
    integer :: ierror

    allocate (ai(1,2))
    ai(1,1)=a
    ai(1,2)=real(i,kind=kind(a))
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, ai, 1, mpi2real, MPI_MAXLOC, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    a=ai(1,1)
    i=int(ai(1,2))
    deallocate(ai)
# else
    UNUSED_DUMMY(a) ; UNUSED_DUMMY(i)
# endif
  end subroutine maxloc_allreduce_real

  !> FIXME : Add documentation
  subroutine maxloc_allreduce_real_array (a,i)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, dimension (:), intent (in out) :: i
# ifdef MPI
    real, dimension(:,:), allocatable :: ai
    integer :: ierror

    allocate (ai(size(a),2))
    ai(:,1)=a(:)
    ai(:,2)=real(i(:),kind=kind(a(1)))
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, ai, size(a), mpi2real, MPI_MAXLOC, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    a(:)=ai(:,1)
    i(:)=int(ai(:,2))
    deallocate(ai)
# else
    UNUSED_DUMMY(a) ; UNUSED_DUMMY(i)
# endif
  end subroutine maxloc_allreduce_real_array

  !> FIXME : Add documentation
  subroutine nb_max_allreduce_integer (i,request)
    implicit none
    integer, intent (in out) :: i
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_MAX, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call max_allreduce(i)
    request = mp_request_null
# endif
  end subroutine nb_max_allreduce_integer

  !> FIXME : Add documentation
  subroutine nb_max_allreduce_integer_array (i, request)
    implicit none
    integer, dimension (:), intent (in out) :: i
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_MAX, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call max_allreduce(i)
    request = mp_request_null
# endif
  end subroutine nb_max_allreduce_integer_array

  !> FIXME : Add documentation
  subroutine nb_max_allreduce_real (a, request)
    implicit none
    real, intent (in out) :: a
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, a, 1, mpireal, MPI_MAX, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call max_allreduce(a)
    request = mp_request_null
# endif
  end subroutine nb_max_allreduce_real

  !> FIXME : Add documentation
  subroutine nb_max_allreduce_real_array (a, request)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, intent (out) :: request
# ifdef MPI3
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_iallreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_MAX, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    call max_allreduce(a)
    request = mp_request_null
# endif
  end subroutine nb_max_allreduce_real_array

  !> FIXME : Add documentation
  subroutine min_reduce_integer (i, dest)
    implicit none
    integer, intent (in out) :: i
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_MIN, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (i, i, 1, MPI_INTEGER, MPI_MIN, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine min_reduce_integer

  !> FIXME : Add documentation
  subroutine min_reduce_integer_array (i, dest)
    implicit none
    integer, dimension (:), intent (in out) :: i
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_MIN, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (i, i, size(i), MPI_INTEGER, MPI_MIN, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine min_reduce_integer_array

  !> FIXME : Add documentation
  subroutine min_reduce_real (a, dest)
    implicit none
    real, intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, a, 1, mpireal, MPI_MIN, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (a, a, 1, mpireal, MPI_MIN, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine min_reduce_real

  !> FIXME : Add documentation
  subroutine min_reduce_real_array (a, dest)
    implicit none
    real, dimension (:), intent (in out) :: a
    integer, intent (in) :: dest
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    if(iproc.eq.dest)then
       call mpi_reduce &
            (MPI_IN_PLACE, a, size(a), mpireal, MPI_MIN, dest, mp_comm, ierror)
    else
       call mpi_reduce &
            (a, a, size(a), mpireal, MPI_MIN, dest, mp_comm, ierror)
    endif
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
    if (dest /= 0) call mp_abort ("reduce to")
# endif
  end subroutine min_reduce_real_array

  !> FIXME : Add documentation
  subroutine min_allreduce_integer (i)
    implicit none
    integer, intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_MIN, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine min_allreduce_integer

  !> FIXME : Add documentation
  subroutine min_allreduce_integer_array (i)
    implicit none
    integer, dimension (:), intent (in out) :: i
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, size(i), MPI_INTEGER, MPI_MIN, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i)
# endif
  end subroutine min_allreduce_integer_array

  !> FIXME : Add documentation
  subroutine min_allreduce_real (a)
    implicit none
    real, intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, 1, mpireal, MPI_MIN, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine min_allreduce_real

  !> FIXME : Add documentation
  subroutine min_allreduce_real_array (a)
    implicit none
    real, dimension (:), intent (in out) :: a
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, a, size(a), mpireal, MPI_MIN, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(a)
# endif
  end subroutine min_allreduce_real_array

  !> FIXME : Add documentation
  subroutine min_allreduce_sub_integer (i, sub_comm)
    implicit none
    integer, intent (in out) :: i
    integer, intent(in) :: sub_comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, i, 1, MPI_INTEGER, MPI_MIN, sub_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(i); UNUSED_DUMMY(sub_comm)
# endif
  end subroutine min_allreduce_sub_integer

! ****************** LAND ALLREDUCE*******************

  !> FIXME : Add documentation
  subroutine land_allreduce_single_element (l)
    implicit none
    logical, intent (in out) :: l
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
    call mpi_allreduce &
         (MPI_IN_PLACE, l, 1, MPI_LOGICAL, MPI_LAND, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_collectives, ' MPI Collectives')
!$OMP END MASTER
# else
    UNUSED_DUMMY(l)
# endif
  end subroutine land_allreduce_single_element
! ********************* barrier **********************

  !> FIXME : Add documentation
  subroutine barrier_nocomm
# ifdef MPI
    implicit none
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_sync, ' MPI Sync')
!$OMP END MASTER
    call mpi_barrier (mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_sync, ' MPI Sync')
!$OMP END MASTER
# endif
  end subroutine barrier_nocomm

  subroutine barrier_comm(comm)
    implicit none
    integer,intent(in)::comm
# ifdef MPI
    integer :: ierror
!$OMP MASTER
    call time_message(.false., time_mp_sync, ' MPI Sync')
!$OMP END MASTER
    call mpi_barrier (comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_sync, ' MPI Sync')
!$OMP END MASTER
# else
    UNUSED_DUMMY(comm)
# endif
  end subroutine barrier_comm

! ********************* sends **********************

  !> FIXME : Add documentation
  subroutine send_integer (i, dest, tag)
    implicit none
    integer, intent (in) :: i
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (i, 1, MPI_INTEGER, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(i)
# endif
  end subroutine send_integer

  !> FIXME : Add documentation
  subroutine send_integer_array (i, dest, tag)
    implicit none
    integer, dimension (:), intent (in) :: i
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (i, size(i), MPI_INTEGER, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(i)
# endif
  end subroutine send_integer_array

  !> FIXME : Add documentation
  subroutine send_real (a, dest, tag)
    implicit none
    real, intent (in) :: a
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (a, 1, mpireal, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(a)
# endif
  end subroutine send_real

  !> FIXME : Add documentation
  subroutine send_real_array (a, dest, tag)
    implicit none
    real, dimension (:), intent (in) :: a
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (a, size(a), mpireal, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(a)
# endif
  end subroutine send_real_array

  !> FIXME : Add documentation
  subroutine send_real_4d_array (a, dest, tag)
    implicit none
    real, dimension (:,:,:,:), intent (in) :: a
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (a, size(a), mpireal, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(a)
# endif
  end subroutine send_real_4d_array

  !> FIXME : Add documentation
  subroutine send_real_5d_array (a, dest, tag)
    implicit none
    real, dimension (:,:,:,:,:), intent (in) :: a
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (a, size(a), mpireal, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(a)
# endif
  end subroutine send_real_5d_array

  !> FIXME : Add documentation
  subroutine send_complex (z, dest, tag)
    implicit none
    complex, intent (in) :: z
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (z, 1, mpicmplx, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(z)
# endif
  end subroutine send_complex

  !> FIXME : Add documentation
  subroutine send_complex_array (z, dest, tag)
    implicit none
    complex, dimension (:), intent (in) :: z
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (z, size(z), mpicmplx, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(z)
# endif
  end subroutine send_complex_array

  !> FIXME : Add documentation
  subroutine send_complex_2d_array (z, dest, tag)
    implicit none
    complex, dimension (:,:), intent (in) :: z
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (z, size(z), mpicmplx, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(z)
# endif
  end subroutine send_complex_2d_array

  !> FIXME : Add documentation
  subroutine send_complex_3d_array (z, dest, tag)
    implicit none
    complex, dimension (:,:,:), intent (in) :: z
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (z, size(z), mpicmplx, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(z)
# endif
  end subroutine send_complex_3d_array

  !> FIXME : Add documentation
  subroutine nonblocking_send_complex_array (z, dest, tag, request)
    implicit none
    complex, dimension (:), intent (in) :: z
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
    integer, intent (out) :: request
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_isend (z, size(z), mpicmplx, dest, tagp, mp_comm, request, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(z)
    request = 0
# endif
  end subroutine nonblocking_send_complex_array

  !> FIXME : Add documentation
  subroutine send_logical (f, dest, tag)
    implicit none
    logical, intent (in) :: f
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (f, 1, MPI_LOGICAL, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(f)
# endif
  end subroutine send_logical

  !> FIXME : Add documentation
  subroutine send_logical_array (f, dest, tag)
    implicit none
    logical, dimension (:), intent (in) :: f
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send (f, size(f), MPI_LOGICAL, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(f)
# endif
  end subroutine send_logical_array

  !> FIXME : Add documentation
  subroutine send_character (s, dest, tag)
    implicit none
    character(*), intent (in) :: s
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_send &
         (s, len(s), MPI_CHARACTER, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(s)
# endif
  end subroutine send_character

! MAB> needed for Trinity
! ********************* synchronous sends **********************

  !> FIXME : Add documentation
  subroutine ssend_integer (i, dest, tag)
    implicit none
    integer, intent (in) :: i
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_ssend (i, 1, MPI_INTEGER, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(i)
# endif
  end subroutine ssend_integer

  !> FIXME : Add documentation
  subroutine ssend_integer_array (i, dest, tag)
    implicit none
    integer, dimension (:), intent (in) :: i
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_ssend (i, size(i), MPI_INTEGER, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(i)
# endif
  end subroutine ssend_integer_array

  !> FIXME : Add documentation
  subroutine ssend_real (a, dest, tag)
    implicit none
    real, intent (in) :: a
    integer, intent (in) :: dest
    integer, intent (in), optional :: tag
# ifdef MPI
    integer :: ierror
    integer :: tagp
    tagp = 0
    if (present(tag)) tagp = tag
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
    call mpi_ssend (a, 1, MPI_DOUBLE_PRECISION, dest, tagp, mp_comm, ierror)
!$OMP MASTER
    call time_message(.false., time_mp_ptp, ' MPI Point-to-point')
!$OMP END MASTER
# else
    UNUSED_DUMMY(dest) ; UNUSED_DUMMY(tag); UNUSED_DUMMY(a)
# endif
  end su