(git:1f285aa)
negf_subgroup_types.F
Go to the documentation of this file.
1 !--------------------------------------------------------------------------------------------------!
2 ! CP2K: A general program to perform molecular dynamics simulations !
3 ! Copyright 2000-2024 CP2K developers group <https://cp2k.org> !
4 ! !
5 ! SPDX-License-Identifier: GPL-2.0-or-later !
6 !--------------------------------------------------------------------------------------------------!
7 
8 ! **************************************************************************************************
9 !> \brief Environment for NEGF based quantum transport calculations
10 !> \author Sergey Chulkov
11 ! **************************************************************************************************
12 
16  cp_blacs_env_type
17  USE message_passing, ONLY: mp_comm_type,&
19  mp_para_env_type
20  USE negf_control_types, ONLY: negf_control_type
21 #include "./base/base_uses.f90"
22 
23  IMPLICIT NONE
24  PRIVATE
25 
26  CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'negf_subgroup_types'
27  LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .true.
28 
29  PUBLIC :: negf_subgroup_env_type, negf_sub_env_create, negf_sub_env_release
30 
31 ! **************************************************************************************************
32 !> \brief Parallel (sub)group environment.
33 !> \par History
34 !> * 06.2017 created [Sergey Chulkov]
35 ! **************************************************************************************************
36  TYPE negf_subgroup_env_type
37  !> number of parallel groups.
38  !> If it is >1 then the global MPI communicator has actually been split into subgroups.
39  !> All other components of the structure are always initialised regardless of the split status
40  !> (they simply point to the corresponding global variables if no splitting has been made).
41  INTEGER :: ngroups = -1
42  !> global MPI rank of the given processor. Local MPI rank can be obtained as para_env%mepos.
43  !> Useful to find out the current group index by accessing the 'group_distribution' array.
44  INTEGER :: mepos_global = -1
45  !> global MPI communicator
46  TYPE(mp_comm_type) :: mpi_comm_global = mp_comm_type()
47  !> group_distribution(0:num_pe) : a process with rank 'i' belongs to the parallel group
48  !> with index 'group_distribution(i)'
49  INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution
50  !> group-specific BLACS parallel environment
51  TYPE(cp_blacs_env_type), POINTER :: blacs_env => null()
52  !> group-specific MPI parallel environment
53  TYPE(mp_para_env_type), POINTER :: para_env => null()
54  END TYPE negf_subgroup_env_type
55 
56 CONTAINS
57 
58 ! **************************************************************************************************
59 !> \brief Split MPI communicator to create a set of parallel (sub)groups.
60 !> \param sub_env parallel (sub)group environment (initialised on exit)
61 !> \param negf_control NEGF input control
62 !> \param blacs_env_global BLACS environment for all the processors
63 !> \param blacs_grid_layout BLACS grid layout
64 !> \param blacs_repeatable BLACS repeatable layout
65 !> \par History
66 !> * 06.2017 created [Sergey Chulkov]
67 ! **************************************************************************************************
68  SUBROUTINE negf_sub_env_create(sub_env, negf_control, blacs_env_global, blacs_grid_layout, blacs_repeatable)
69  TYPE(negf_subgroup_env_type), INTENT(out) :: sub_env
70  TYPE(negf_control_type), POINTER :: negf_control
71  TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
72  INTEGER, INTENT(in) :: blacs_grid_layout
73  LOGICAL, INTENT(in) :: blacs_repeatable
74 
75  CHARACTER(LEN=*), PARAMETER :: routinen = 'negf_sub_env_create'
76 
77  INTEGER :: handle
78  LOGICAL :: is_split
79  TYPE(mp_para_env_type), POINTER :: para_env_global
80 
81  CALL timeset(routinen, handle)
82 
83  CALL blacs_env_global%get(para_env=para_env_global)
84  sub_env%mepos_global = para_env_global%mepos
85  sub_env%mpi_comm_global = para_env_global
86 
87  ! ++ split mpi communicator if
88  ! a) the requested number of processors per group > 0 (means that the split has been requested explicitly), and
89  ! b) the number of subgroups is >= 2
90  is_split = negf_control%nprocs > 0 .AND. negf_control%nprocs*2 <= para_env_global%num_pe
91 
92  IF (is_split) THEN
93  ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
94 
95  ALLOCATE (sub_env%para_env)
96  CALL sub_env%para_env%from_split(comm=para_env_global, ngroups=sub_env%ngroups, &
97  group_distribution=sub_env%group_distribution, subgroup_min_size=negf_control%nprocs)
98 
99  ! ++ create a new parallel environment based on the given sub-communicator)
100  NULLIFY (sub_env%blacs_env)
101 
102  ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations
103  ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'.
104  CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env, blacs_grid_layout, blacs_repeatable)
105  ELSE
106  sub_env%para_env => para_env_global
107  sub_env%ngroups = 1
108 
109  ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
110  sub_env%group_distribution(:) = 0
111 
112  sub_env%blacs_env => blacs_env_global
113  CALL sub_env%blacs_env%retain()
114 
115  sub_env%para_env => para_env_global
116  CALL sub_env%para_env%retain()
117  END IF
118 
119  CALL timestop(handle)
120  END SUBROUTINE negf_sub_env_create
121 
122 ! **************************************************************************************************
123 !> \brief Release a parallel (sub)group environment.
124 !> \param sub_env parallel (sub)group environment to release
125 !> \par History
126 !> * 06.2017 created [Sergey Chulkov]
127 ! **************************************************************************************************
128  SUBROUTINE negf_sub_env_release(sub_env)
129  TYPE(negf_subgroup_env_type), INTENT(inout) :: sub_env
130 
131  CHARACTER(LEN=*), PARAMETER :: routinen = 'negf_sub_env_release'
132 
133  INTEGER :: handle
134 
135  CALL timeset(routinen, handle)
136 
137  CALL cp_blacs_env_release(sub_env%blacs_env)
138  CALL mp_para_env_release(sub_env%para_env)
139 
140  IF (ALLOCATED(sub_env%group_distribution)) &
141  DEALLOCATE (sub_env%group_distribution)
142 
143  sub_env%ngroups = 0
144 
145  CALL timestop(handle)
146  END SUBROUTINE negf_sub_env_release
147 END MODULE negf_subgroup_types
methods related to the blacs parallel environment
Definition: cp_blacs_env.F:15
subroutine, public cp_blacs_env_release(blacs_env)
releases the given blacs_env
Definition: cp_blacs_env.F:282
subroutine, public cp_blacs_env_create(blacs_env, para_env, blacs_grid_layout, blacs_repeatable, row_major, grid_2d)
allocates and initializes a type that represent a blacs context
Definition: cp_blacs_env.F:123
Interface to the message passing library MPI.
subroutine, public mp_para_env_release(para_env)
releases the para object (to be called when you don't want anymore the shared copy of this object)
Input control types for NEGF based quantum transport calculations.
Environment for NEGF based quantum transport calculations.
subroutine, public negf_sub_env_release(sub_env)
Release a parallel (sub)group environment.
subroutine, public negf_sub_env_create(sub_env, negf_control, blacs_env_global, blacs_grid_layout, blacs_repeatable)
Split MPI communicator to create a set of parallel (sub)groups.