(git:ddb311d)
Loading...
Searching...
No Matches
qs_tddfpt2_subgroups.F
Go to the documentation of this file.
1!--------------------------------------------------------------------------------------------------!
2! CP2K: A general program to perform molecular dynamics simulations !
3! Copyright 2000-2025 CP2K developers group <https://cp2k.org> !
4! !
5! SPDX-License-Identifier: GPL-2.0-or-later !
6!--------------------------------------------------------------------------------------------------!
7
9 USE admm_types, ONLY: admm_type,&
14 USE cell_types, ONLY: cell_type
21 USE cp_dbcsr_api, ONLY: dbcsr_create,&
50 USE kinds, ONLY: default_string_length,&
51 dp
57 USE pw_env_methods, ONLY: pw_env_create,&
59 USE pw_env_types, ONLY: pw_env_release,&
64 USE qs_kind_types, ONLY: get_qs_kind,&
78 USE qs_rho0_methods, ONLY: init_rho0
84#include "./base/base_uses.f90"
85
86 IMPLICIT NONE
87
88 PRIVATE
89
90 CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'qs_tddfpt2_subgroups'
91 LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .true.
92
96
97! **************************************************************************************************
98!> \brief Parallel (sub)group environment.
99!> \par History
100!> * 01.2017 created [Sergey Chulkov]
101! **************************************************************************************************
103 !> indicates that the global MPI communicator has been split into subgroups; if it is .FALSE.
104 !> certain components of the structure (blacs_env, para_env, admm_A, and mos_occ)
105 !> can still be accessed; in this case they simply point to the corresponding global variables
106 LOGICAL :: is_split = .false.
107 !> number of parallel groups
108 INTEGER :: ngroups = -1
109 !> group_distribution(0:ngroups-1) : a process with rank 'i' belongs to the parallel group
110 !> with index 'group_distribution(i)'
111 INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution
112 !> group-specific BLACS parallel environment
113 TYPE(cp_blacs_env_type), POINTER :: blacs_env => null()
114 !> group-specific MPI parallel environment
115 TYPE(mp_para_env_type), POINTER :: para_env => null()
116 !> (active) occupied MOs stored in a matrix form [nao x nmo_occ(spin)] distributed across processes
117 !> in the parallel group
118 TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_occ
119 TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_active
120 !> group-specific copy of the ADMM A matrix 'admm_type%A'
121 TYPE(cp_fm_type), POINTER :: admm_a => null()
122 !
123 !> indicates that a set of multi-grids has been allocated; if it is .FALSE. all the components
124 !> below point to the corresponding global variables and can be accessed
125 LOGICAL :: is_mgrid = .false.
126 !> group-specific DBCSR distribution
127 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist => null()
128 !> group-specific two-dimensional distribution of pairs of particles
129 TYPE(distribution_2d_type), POINTER :: dist_2d => null()
130 !> group-specific plane wave environment
131 TYPE(pw_env_type), POINTER :: pw_env => null()
132 !> lists of neighbours in auxiliary and primary basis sets
134 DIMENSION(:), POINTER :: sab_aux_fit => null(), sab_orb => null()
135 !> task lists in auxiliary and primary basis sets
136 TYPE(task_list_type), POINTER :: task_list_aux_fit => null(), task_list_orb => null()
137 !> soft task lists in auxiliary and primary basis sets
138 TYPE(task_list_type), POINTER :: task_list_aux_fit_soft => null(), task_list_orb_soft => null()
139 !> GAPW local atomic grids
140 TYPE(hartree_local_type), POINTER :: hartree_local => null()
141 TYPE(local_rho_type), POINTER :: local_rho_set => null()
142 TYPE(local_rho_type), POINTER :: local_rho_set_admm => null()
144
145! **************************************************************************************************
146!> \brief Structure to save global multi-grid related parameters.
147!> \par History
148!> * 09.2016 created [Sergey Chulkov]
149!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
150! **************************************************************************************************
151 TYPE mgrid_saved_parameters
152 !> create commensurate grids
153 LOGICAL :: commensurate_mgrids = .false.
154 !> create real-space grids
155 LOGICAL :: realspace_mgrids = .false.
156 !> do not perform load balancing
157 LOGICAL :: skip_load_balance = .false.
158 !> cutoff value at the finest grid level
159 REAL(kind=dp) :: cutoff = 0.0_dp
160 !> inverse scale factor
161 REAL(kind=dp) :: progression_factor = 0.0_dp
162 !> relative cutoff
163 REAL(kind=dp) :: relative_cutoff = 0.0_dp
164 !> list of explicitly given cutoff values
165 REAL(kind=dp), DIMENSION(:), POINTER :: e_cutoff => null()
166 END TYPE mgrid_saved_parameters
167
168CONTAINS
169
170! **************************************************************************************************
171!> \brief Split MPI communicator to create a set of parallel (sub)groups.
172!> \param sub_env parallel group environment (initialised on exit)
173!> \param qs_env Quickstep environment
174!> \param mos_occ ground state molecular orbitals in primary atomic basis set
175!> \param mos_active active ground state molecular orbitals in primary atomic basis set
176!> \param kernel Type of kernel (full/sTDA) that will be used
177!> \par History
178!> * 01.2017 (sub)group-related code has been moved here from the main subroutine tddfpt()
179!> [Sergey Chulkov]
180! **************************************************************************************************
181 SUBROUTINE tddfpt_sub_env_init(sub_env, qs_env, mos_occ, mos_active, kernel)
182 TYPE(tddfpt_subgroup_env_type), INTENT(out) :: sub_env
183 TYPE(qs_environment_type), POINTER :: qs_env
184 TYPE(cp_fm_type), DIMENSION(:), INTENT(in) :: mos_occ, mos_active
185 INTEGER, INTENT(in) :: kernel
186
187 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_sub_env_init'
188
189 INTEGER :: handle, ispin, nao, nao_aux, natom, &
190 nmo_active, nmo_occ, nspins
191 TYPE(admm_type), POINTER :: admm_env
192 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
193 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
194 TYPE(cp_fm_struct_type), POINTER :: fm_struct
195 TYPE(dft_control_type), POINTER :: dft_control
196 TYPE(mgrid_saved_parameters) :: mgrid_saved
197 TYPE(mp_para_env_type), POINTER :: para_env_global
198 TYPE(pw_env_type), POINTER :: pw_env_global
199 TYPE(qs_control_type), POINTER :: qs_control
200 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
201 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
202
203 CALL timeset(routinen, handle)
204
205 nspins = SIZE(mos_occ)
206
207 CALL get_qs_env(qs_env, blacs_env=blacs_env_global, dft_control=dft_control, &
208 para_env=para_env_global, pw_env=pw_env_global)
209
210 tddfpt_control => dft_control%tddfpt2_control
211 qs_control => dft_control%qs_control
212
213 ! ++ split mpi communicator if
214 ! a) the requested number of processors per group > 0
215 ! (means that the split has been requested explicitly), and
216 ! b) the number of subgroups is >= 2
217 sub_env%is_split = tddfpt_control%nprocs > 0 .AND. tddfpt_control%nprocs*2 <= para_env_global%num_pe
218
219 ALLOCATE (sub_env%mos_occ(nspins))
220 ALLOCATE (sub_env%mos_active(nspins))
221 NULLIFY (sub_env%admm_A)
222
223 IF (sub_env%is_split) THEN
224 ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
225
226 ALLOCATE (sub_env%para_env)
227 CALL sub_env%para_env%from_split(comm=para_env_global, ngroups=sub_env%ngroups, &
228 group_distribution=sub_env%group_distribution, subgroup_min_size=tddfpt_control%nprocs)
229
230 ! ++ create a new parallel environment based on the given sub-communicator)
231 NULLIFY (sub_env%blacs_env)
232
233 ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations
234 ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'.
235 ! Ideally we should take these parameters from the variables globenv%blacs_grid_layout and
236 ! globenv%blacs_repeatable, however the global environment is not available
237 ! from the subroutine 'qs_energies_properties'.
238 CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env)
239
240 NULLIFY (fm_struct)
241
242 DO ispin = 1, nspins
243 CALL cp_fm_get_info(mos_occ(ispin), nrow_global=nao, ncol_global=nmo_occ)
244 CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_occ, context=sub_env%blacs_env)
245 CALL cp_fm_create(sub_env%mos_occ(ispin), fm_struct)
246 CALL cp_fm_struct_release(fm_struct)
247 CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_occ(ispin), &
248 fm_dest_sub=sub_env%mos_occ(ispin), sub_env=sub_env)
249 END DO
250
251 DO ispin = 1, nspins
252 CALL cp_fm_get_info(mos_active(ispin), nrow_global=nao, ncol_global=nmo_active)
253 CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_active, context=sub_env%blacs_env)
254 CALL cp_fm_create(sub_env%mos_active(ispin), fm_struct)
255 CALL cp_fm_struct_release(fm_struct)
256 CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_active(ispin), &
257 fm_dest_sub=sub_env%mos_active(ispin), sub_env=sub_env)
258 END DO
259
260 IF (dft_control%do_admm) THEN
261 CALL get_qs_env(qs_env, admm_env=admm_env)
262 CALL cp_fm_get_info(admm_env%A, nrow_global=nao_aux, ncol_global=nao)
263 CALL cp_fm_struct_create(fm_struct, nrow_global=nao_aux, ncol_global=nao, context=sub_env%blacs_env)
264 ALLOCATE (sub_env%admm_A)
265 CALL cp_fm_create(sub_env%admm_A, fm_struct)
266 CALL cp_fm_struct_release(fm_struct)
267 CALL tddfpt_fm_replicate_across_subgroups(fm_src=admm_env%A, fm_dest_sub=sub_env%admm_A, sub_env=sub_env)
268 END IF
269 ELSE
270 CALL para_env_global%retain()
271 sub_env%para_env => para_env_global
272
273 CALL blacs_env_global%retain()
274 sub_env%blacs_env => blacs_env_global
275
276 sub_env%mos_occ(:) = mos_occ(:)
277 sub_env%mos_active(:) = mos_active(:)
278
279 IF (dft_control%do_admm) THEN
280 CALL get_qs_env(qs_env, admm_env=admm_env)
281 sub_env%admm_A => admm_env%A
282 END IF
283 END IF
284
285 IF (kernel == tddfpt_kernel_full) THEN
286 ! ++ allocate a new plane wave environment
287 sub_env%is_mgrid = sub_env%is_split .OR. tddfpt_control%mgrid_is_explicit
288
289 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
290 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
291 NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit)
292 NULLIFY (sub_env%task_list_orb_soft, sub_env%task_list_aux_fit_soft)
293
294 IF (sub_env%is_mgrid) THEN
295 IF (tddfpt_control%mgrid_is_explicit) &
296 CALL init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
297
298 NULLIFY (sub_env%pw_env)
299
300 CALL pw_env_create(sub_env%pw_env)
301 CALL pw_env_rebuild(sub_env%pw_env, qs_env, sub_env%para_env)
302
303 CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, &
304 blacs_env=sub_env%blacs_env, qs_env=qs_env)
305
306 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", &
307 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
308 skip_load_balance=qs_control%skip_load_balance_distributed, &
309 reorder_grid_ranks=.true.)
310
311 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
312 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb_soft, sab=sub_env%sab_orb, basis_type="ORB_SOFT", &
313 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
314 skip_load_balance=qs_control%skip_load_balance_distributed, &
315 reorder_grid_ranks=.true.)
316 END IF
317
318 IF (dft_control%do_admm) THEN
319 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit, sab=sub_env%sab_aux_fit, &
320 basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, &
321 pw_env=sub_env%pw_env, qs_env=qs_env, &
322 skip_load_balance=qs_control%skip_load_balance_distributed, &
323 reorder_grid_ranks=.false.)
324 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
325 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit_soft, sab=sub_env%sab_aux_fit, &
326 basis_type="AUX_FIT_SOFT", distribution_2d=sub_env%dist_2d, &
327 pw_env=sub_env%pw_env, qs_env=qs_env, &
328 skip_load_balance=qs_control%skip_load_balance_distributed, &
329 reorder_grid_ranks=.false.)
330 END IF
331 END IF
332
333 IF (tddfpt_control%mgrid_is_explicit) &
334 CALL restore_qs_mgrid(qs_control, mgrid_saved)
335 ELSE
336 CALL pw_env_retain(pw_env_global)
337 sub_env%pw_env => pw_env_global
338
339 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, &
340 sab_orb=sub_env%sab_orb, task_list=sub_env%task_list_orb)
341 IF (dft_control%do_admm) THEN
342 CALL get_admm_env(admm_env, sab_aux_fit=sub_env%sab_aux_fit, &
343 task_list_aux_fit=sub_env%task_list_aux_fit)
344 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
345 sub_env%task_list_aux_fit_soft => admm_env%admm_gapw_env%task_list
346 END IF
347 END IF
348 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
349 CALL get_qs_env(qs_env, task_list_soft=sub_env%task_list_orb_soft)
350 END IF
351 END IF
352
353 ! GAPW initializations
354 IF (dft_control%qs_control%gapw) THEN
355 CALL get_qs_env(qs_env, &
356 atomic_kind_set=atomic_kind_set, &
357 natom=natom, &
358 qs_kind_set=qs_kind_set)
359
360 CALL local_rho_set_create(sub_env%local_rho_set)
361 CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
362 qs_kind_set, dft_control, sub_env%para_env)
363
364 CALL init_rho0(sub_env%local_rho_set, qs_env, dft_control%qs_control%gapw_control, &
365 zcore=0.0_dp)
366 CALL rho0_s_grid_create(sub_env%pw_env, sub_env%local_rho_set%rho0_mpole)
367 CALL hartree_local_create(sub_env%hartree_local)
368 CALL init_coulomb_local(sub_env%hartree_local, natom)
369 ELSEIF (dft_control%qs_control%gapw_xc) THEN
370 CALL get_qs_env(qs_env, &
371 atomic_kind_set=atomic_kind_set, &
372 qs_kind_set=qs_kind_set)
373 CALL local_rho_set_create(sub_env%local_rho_set)
374 CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
375 qs_kind_set, dft_control, sub_env%para_env)
376 END IF
377
378 ! ADMM/GAPW
379 IF (dft_control%do_admm) THEN
380 IF (dft_control%qs_control%gapw .OR. dft_control%qs_control%gapw_xc) THEN
381 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set)
382 CALL local_rho_set_create(sub_env%local_rho_set_admm)
383 CALL allocate_rho_atom_internals(sub_env%local_rho_set_admm%rho_atom_set, atomic_kind_set, &
384 admm_env%admm_gapw_env%admm_kind_set, &
385 dft_control, sub_env%para_env)
386 END IF
387 END IF
388
389 ELSE IF (kernel == tddfpt_kernel_stda) THEN
390 sub_env%is_mgrid = .false.
391 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
392 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
393 NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
394 NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
395 NULLIFY (sub_env%pw_env)
396 IF (sub_env%is_split) THEN
397 cpabort('Subsys option not available')
398 ELSE
399 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
400 END IF
401 ELSE IF (kernel == tddfpt_kernel_none) THEN
402 sub_env%is_mgrid = .false.
403 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
404 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
405 NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
406 NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
407 NULLIFY (sub_env%pw_env)
408 IF (sub_env%is_split) THEN
409 cpabort('Subsys option not available')
410 ELSE
411 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
412 END IF
413 ELSE
414 cpabort("Unknown kernel type")
415 END IF
416
417 CALL timestop(handle)
418
419 END SUBROUTINE tddfpt_sub_env_init
420
421! **************************************************************************************************
422!> \brief Release parallel group environment
423!> \param sub_env parallel group environment (modified on exit)
424!> \par History
425!> * 01.2017 created [Sergey Chulkov]
426! **************************************************************************************************
427 SUBROUTINE tddfpt_sub_env_release(sub_env)
428 TYPE(tddfpt_subgroup_env_type), INTENT(inout) :: sub_env
429
430 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_sub_env_release'
431
432 INTEGER :: handle, i
433
434 CALL timeset(routinen, handle)
435
436 IF (sub_env%is_mgrid) THEN
437 IF (ASSOCIATED(sub_env%task_list_aux_fit)) &
438 CALL deallocate_task_list(sub_env%task_list_aux_fit)
439
440 IF (ASSOCIATED(sub_env%task_list_aux_fit_soft)) &
441 CALL deallocate_task_list(sub_env%task_list_aux_fit_soft)
442
443 IF (ASSOCIATED(sub_env%task_list_orb)) &
444 CALL deallocate_task_list(sub_env%task_list_orb)
445
446 IF (ASSOCIATED(sub_env%task_list_orb_soft)) &
447 CALL deallocate_task_list(sub_env%task_list_orb_soft)
448
449 CALL release_neighbor_list_sets(sub_env%sab_aux_fit)
450 CALL release_neighbor_list_sets(sub_env%sab_orb)
451
452 IF (ASSOCIATED(sub_env%dbcsr_dist)) THEN
453 CALL dbcsr_distribution_release(sub_env%dbcsr_dist)
454 DEALLOCATE (sub_env%dbcsr_dist)
455 END IF
456
457 IF (ASSOCIATED(sub_env%dist_2d)) &
458 CALL distribution_2d_release(sub_env%dist_2d)
459 END IF
460
461 ! GAPW
462 IF (ASSOCIATED(sub_env%local_rho_set)) THEN
463 CALL local_rho_set_release(sub_env%local_rho_set)
464 END IF
465 IF (ASSOCIATED(sub_env%hartree_local)) THEN
466 CALL hartree_local_release(sub_env%hartree_local)
467 END IF
468 IF (ASSOCIATED(sub_env%local_rho_set_admm)) THEN
469 CALL local_rho_set_release(sub_env%local_rho_set_admm)
470 END IF
471
472 ! if TDDFPT-specific plane-wave environment has not been requested,
473 ! the pointers sub_env%dbcsr_dist, sub_env%sab_*, and sub_env%task_list_*
474 ! point to the corresponding ground-state variables from qs_env
475 ! and should not be deallocated
476
477 CALL pw_env_release(sub_env%pw_env)
478
479 sub_env%is_mgrid = .false.
480
481 IF (sub_env%is_split .AND. ASSOCIATED(sub_env%admm_A)) THEN
482 CALL cp_fm_release(sub_env%admm_A)
483 DEALLOCATE (sub_env%admm_A)
484 NULLIFY (sub_env%admm_A)
485 END IF
486
487 IF (sub_env%is_split) THEN
488 DO i = SIZE(sub_env%mos_occ), 1, -1
489 CALL cp_fm_release(sub_env%mos_occ(i))
490 END DO
491 DO i = SIZE(sub_env%mos_active), 1, -1
492 CALL cp_fm_release(sub_env%mos_active(i))
493 END DO
494 END IF
495 DEALLOCATE (sub_env%mos_occ)
496 DEALLOCATE (sub_env%mos_active)
497
498 CALL cp_blacs_env_release(sub_env%blacs_env)
499 CALL mp_para_env_release(sub_env%para_env)
500
501 IF (ALLOCATED(sub_env%group_distribution)) &
502 DEALLOCATE (sub_env%group_distribution)
503
504 sub_env%is_split = .false.
505
506 CALL timestop(handle)
507
508 END SUBROUTINE tddfpt_sub_env_release
509
510! **************************************************************************************************
511!> \brief Replace the global multi-grid related parameters in qs_control by the ones given in the
512!> TDDFPT/MGRID subsection. The original parameters are stored into the 'mgrid_saved'
513!> variable.
514!> \param qs_control Quickstep control parameters (modified on exit)
515!> \param tddfpt_control TDDFPT control parameters
516!> \param mgrid_saved structure to hold global MGRID-related parameters (initialised on exit)
517!> \par History
518!> * 09.2016 created [Sergey Chulkov]
519!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
520!> \note the code to build the 'e_cutoff' list was taken from the subroutine read_mgrid_section()
521! **************************************************************************************************
522 SUBROUTINE init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
523 TYPE(qs_control_type), POINTER :: qs_control
524 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
525 TYPE(mgrid_saved_parameters), INTENT(out) :: mgrid_saved
526
527 CHARACTER(LEN=*), PARAMETER :: routinen = 'init_tddfpt_mgrid'
528
529 INTEGER :: handle, igrid, ngrids
530
531 CALL timeset(routinen, handle)
532
533 ! ++ save global plane-wave grid parameters to the variable 'mgrid_saved'
534 mgrid_saved%commensurate_mgrids = qs_control%commensurate_mgrids
535 mgrid_saved%realspace_mgrids = qs_control%realspace_mgrids
536 mgrid_saved%skip_load_balance = qs_control%skip_load_balance_distributed
537 mgrid_saved%cutoff = qs_control%cutoff
538 mgrid_saved%progression_factor = qs_control%progression_factor
539 mgrid_saved%relative_cutoff = qs_control%relative_cutoff
540 mgrid_saved%e_cutoff => qs_control%e_cutoff
541
542 ! ++ set parameters from 'tddfpt_control' as default ones for all newly allocated plane-wave grids
543 qs_control%commensurate_mgrids = tddfpt_control%mgrid_commensurate_mgrids
544 qs_control%realspace_mgrids = tddfpt_control%mgrid_realspace_mgrids
545 qs_control%skip_load_balance_distributed = tddfpt_control%mgrid_skip_load_balance
546 qs_control%cutoff = tddfpt_control%mgrid_cutoff
547 qs_control%progression_factor = tddfpt_control%mgrid_progression_factor
548 qs_control%relative_cutoff = tddfpt_control%mgrid_relative_cutoff
549
550 ALLOCATE (qs_control%e_cutoff(tddfpt_control%mgrid_ngrids))
551 ngrids = tddfpt_control%mgrid_ngrids
552 IF (ASSOCIATED(tddfpt_control%mgrid_e_cutoff)) THEN
553 ! following read_mgrid_section() there is a magic scale factor there (0.5_dp)
554 DO igrid = 1, ngrids
555 qs_control%e_cutoff(igrid) = tddfpt_control%mgrid_e_cutoff(igrid)*0.5_dp
556 END DO
557 ! ++ round 'qs_control%cutoff' upward to the nearest sub-grid's cutoff value;
558 ! here we take advantage of the fact that the array 'e_cutoff' has been sorted in descending order
559 DO igrid = ngrids, 1, -1
560 IF (qs_control%cutoff <= qs_control%e_cutoff(igrid)) THEN
561 qs_control%cutoff = qs_control%e_cutoff(igrid)
562 EXIT
563 END IF
564 END DO
565 ! igrid == 0 if qs_control%cutoff is larger than the largest manually provided cutoff value;
566 ! use the largest actual value
567 IF (igrid <= 0) &
568 qs_control%cutoff = qs_control%e_cutoff(1)
569 ELSE
570 qs_control%e_cutoff(1) = qs_control%cutoff
571 DO igrid = 2, ngrids
572 qs_control%e_cutoff(igrid) = qs_control%e_cutoff(igrid - 1)/qs_control%progression_factor
573 END DO
574 END IF
575
576 CALL timestop(handle)
577 END SUBROUTINE init_tddfpt_mgrid
578
579! **************************************************************************************************
580!> \brief Restore the global multi-grid related parameters stored in the 'mgrid_saved' variable.
581!> \param qs_control Quickstep control parameters (modified on exit)
582!> \param mgrid_saved structure that holds global MGRID-related parameters
583!> \par History
584!> * 09.2016 created [Sergey Chulkov]
585! **************************************************************************************************
586 SUBROUTINE restore_qs_mgrid(qs_control, mgrid_saved)
587 TYPE(qs_control_type), POINTER :: qs_control
588 TYPE(mgrid_saved_parameters), INTENT(in) :: mgrid_saved
589
590 CHARACTER(LEN=*), PARAMETER :: routinen = 'restore_qs_mgrid'
591
592 INTEGER :: handle
593
594 CALL timeset(routinen, handle)
595
596 IF (ASSOCIATED(qs_control%e_cutoff)) &
597 DEALLOCATE (qs_control%e_cutoff)
598
599 qs_control%commensurate_mgrids = mgrid_saved%commensurate_mgrids
600 qs_control%realspace_mgrids = mgrid_saved%realspace_mgrids
601 qs_control%skip_load_balance_distributed = mgrid_saved%skip_load_balance
602 qs_control%cutoff = mgrid_saved%cutoff
603 qs_control%progression_factor = mgrid_saved%progression_factor
604 qs_control%relative_cutoff = mgrid_saved%relative_cutoff
605 qs_control%e_cutoff => mgrid_saved%e_cutoff
606
607 CALL timestop(handle)
608 END SUBROUTINE restore_qs_mgrid
609
610! **************************************************************************************************
611!> \brief Distribute atoms across the two-dimensional grid of processors.
612!> \param distribution_2d new two-dimensional distribution of pairs of particles
613!> (allocated and initialised on exit)
614!> \param dbcsr_dist new DBCSR distribution (allocated and initialised on exit)
615!> \param blacs_env BLACS parallel environment
616!> \param qs_env Quickstep environment
617!> \par History
618!> * 09.2016 created [Sergey Chulkov]
619!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
620! **************************************************************************************************
621 SUBROUTINE tddfpt_build_distribution_2d(distribution_2d, dbcsr_dist, blacs_env, qs_env)
622 TYPE(distribution_2d_type), POINTER :: distribution_2d
623 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
624 TYPE(cp_blacs_env_type), POINTER :: blacs_env
625 TYPE(qs_environment_type), POINTER :: qs_env
626
627 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_build_distribution_2d'
628
629 INTEGER :: handle
630 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
631 TYPE(cell_type), POINTER :: cell
632 TYPE(molecule_kind_type), DIMENSION(:), POINTER :: molecule_kind_set
633 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
634 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
635 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
636 TYPE(section_vals_type), POINTER :: input
637
638 CALL timeset(routinen, handle)
639
640 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
641 molecule_kind_set=molecule_kind_set, molecule_set=molecule_set, &
642 particle_set=particle_set, qs_kind_set=qs_kind_set)
643
644 NULLIFY (distribution_2d)
645 CALL distribute_molecules_2d(cell=cell, &
646 atomic_kind_set=atomic_kind_set, &
647 particle_set=particle_set, &
648 qs_kind_set=qs_kind_set, &
649 molecule_kind_set=molecule_kind_set, &
650 molecule_set=molecule_set, &
651 distribution_2d=distribution_2d, &
652 blacs_env=blacs_env, &
653 force_env_section=input)
654
655 ALLOCATE (dbcsr_dist)
656 CALL cp_dbcsr_dist2d_to_dist(distribution_2d, dbcsr_dist)
657
658 CALL timestop(handle)
659 END SUBROUTINE tddfpt_build_distribution_2d
660
661! **************************************************************************************************
662!> \brief Build task and neighbour lists for the given plane wave environment and basis set.
663!> \param task_list new task list (allocated and initialised on exit)
664!> \param sab new list of neighbours (allocated and initialised on exit)
665!> \param basis_type type of the basis set
666!> \param distribution_2d two-dimensional distribution of pairs of particles
667!> \param pw_env plane wave environment
668!> \param qs_env Quickstep environment
669!> \param skip_load_balance do not perform load balancing
670!> \param reorder_grid_ranks re-optimise grid ranks and re-create the real-space grid descriptor
671!> as well as grids
672!> \par History
673!> * 09.2016 created [Sergey Chulkov]
674!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
675! **************************************************************************************************
676 SUBROUTINE tddfpt_build_tasklist(task_list, sab, basis_type, distribution_2d, pw_env, qs_env, &
677 skip_load_balance, reorder_grid_ranks)
678 TYPE(task_list_type), POINTER :: task_list
679 TYPE(neighbor_list_set_p_type), DIMENSION(:), &
680 POINTER :: sab
681 CHARACTER(len=*), INTENT(in) :: basis_type
682 TYPE(distribution_2d_type), POINTER :: distribution_2d
683 TYPE(pw_env_type), POINTER :: pw_env
684 TYPE(qs_environment_type), POINTER :: qs_env
685 LOGICAL, INTENT(in) :: skip_load_balance, reorder_grid_ranks
686
687 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_build_tasklist'
688
689 INTEGER :: handle, ikind, nkinds
690 LOGICAL, ALLOCATABLE, DIMENSION(:) :: orb_present
691 REAL(kind=dp) :: subcells
692 REAL(kind=dp), ALLOCATABLE, DIMENSION(:) :: orb_radius
693 REAL(kind=dp), ALLOCATABLE, DIMENSION(:, :) :: pair_radius
694 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
695 TYPE(cell_type), POINTER :: cell
696 TYPE(distribution_1d_type), POINTER :: local_particles
697 TYPE(gto_basis_set_type), POINTER :: orb_basis_set
698 TYPE(local_atoms_type), ALLOCATABLE, DIMENSION(:) :: atom2d
699 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
700 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
701 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
702 TYPE(qs_ks_env_type), POINTER :: ks_env
703 TYPE(section_vals_type), POINTER :: input
704
705 CALL timeset(routinen, handle)
706
707 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
708 ks_env=ks_env, local_particles=local_particles, molecule_set=molecule_set, &
709 particle_set=particle_set, qs_kind_set=qs_kind_set)
710
711 nkinds = SIZE(atomic_kind_set)
712
713 IF (.NOT. (ASSOCIATED(sab))) THEN
714 ALLOCATE (atom2d(nkinds))
715 CALL atom2d_build(atom2d, local_particles, distribution_2d, atomic_kind_set, &
716 molecule_set, molecule_only=.false., particle_set=particle_set)
717
718 ALLOCATE (orb_present(nkinds))
719 ALLOCATE (orb_radius(nkinds))
720 ALLOCATE (pair_radius(nkinds, nkinds))
721
722 DO ikind = 1, nkinds
723 CALL get_qs_kind(qs_kind_set(ikind), basis_set=orb_basis_set, basis_type=basis_type)
724 IF (ASSOCIATED(orb_basis_set)) THEN
725 orb_present(ikind) = .true.
726 CALL get_gto_basis_set(gto_basis_set=orb_basis_set, kind_radius=orb_radius(ikind))
727 ELSE
728 orb_present(ikind) = .false.
729 orb_radius(ikind) = 0.0_dp
730 END IF
731 END DO
732
733 CALL pair_radius_setup(orb_present, orb_present, orb_radius, orb_radius, pair_radius)
734
735 NULLIFY (sab)
736 CALL section_vals_val_get(input, "DFT%SUBCELLS", r_val=subcells)
737 CALL build_neighbor_lists(sab, particle_set, atom2d, cell, pair_radius, &
738 mic=.false., subcells=subcells, molecular=.false., nlname="sab_orb")
739
740 CALL atom2d_cleanup(atom2d)
741 DEALLOCATE (atom2d, orb_present, orb_radius, pair_radius)
742 END IF
743
744 CALL allocate_task_list(task_list)
745 CALL generate_qs_task_list(ks_env, task_list, basis_type=basis_type, &
746 reorder_rs_grid_ranks=reorder_grid_ranks, &
747 skip_load_balance_distributed=skip_load_balance, &
748 pw_env_external=pw_env, sab_orb_external=sab)
749
750 CALL timestop(handle)
751 END SUBROUTINE tddfpt_build_tasklist
752
753! **************************************************************************************************
754!> \brief Create a DBCSR matrix based on a template matrix, distribution object, and the list of
755!> neighbours.
756!> \param matrix matrix to create
757!> \param template template matrix
758!> \param dbcsr_dist DBCSR distribution
759!> \param sab list of neighbours
760!> \par History
761!> * 09.2016 created [Sergey Chulkov]
762!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
763! **************************************************************************************************
764 SUBROUTINE tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
765 TYPE(dbcsr_type), POINTER :: matrix, template
766 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
767 TYPE(neighbor_list_set_p_type), DIMENSION(:), &
768 POINTER :: sab
769
770 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_dbcsr_create_by_dist'
771
772 CHARACTER :: matrix_type
773 CHARACTER(len=default_string_length) :: matrix_name
774 INTEGER :: handle
775 INTEGER, DIMENSION(:), POINTER :: col_blk_sizes, row_blk_sizes
776
777 CALL timeset(routinen, handle)
778
779 cpassert(ASSOCIATED(template))
780 CALL dbcsr_get_info(template, row_blk_size=row_blk_sizes, col_blk_size=col_blk_sizes, &
781 name=matrix_name, matrix_type=matrix_type)
782
783 IF (ASSOCIATED(matrix)) THEN
784 CALL dbcsr_release(matrix)
785 ELSE
786 ALLOCATE (matrix)
787 END IF
788
789 CALL dbcsr_create(matrix, matrix_name, dbcsr_dist, matrix_type, row_blk_sizes, col_blk_sizes)
790 CALL cp_dbcsr_alloc_block_from_nbl(matrix, sab)
791
792 CALL timestop(handle)
793
794 END SUBROUTINE tddfpt_dbcsr_create_by_dist
795
796! **************************************************************************************************
797!> \brief Replicate a globally distributed matrix across all sub-groups. At the end
798!> every sub-group will hold a local copy of the original globally distributed matrix.
799!>
800!> |--------------------|
801!> fm_src | 0 1 2 3 |
802!> |--------------------|
803!> / MPI ranks \
804!> |/_ _\|
805!> |--------------------| |--------------------|
806!> fm_dest_subgroup0 | 0 1 | | 2 3 | fm_dest_subgroup1
807!> |--------------------| |--------------------|
808!> subgroup 0 subgroup 1
809!>
810!> \param fm_src globally distributed matrix to replicate
811!> \param fm_dest_sub subgroup-specific copy of the replicated matrix
812!> \param sub_env subgroup environment
813!> \par History
814!> * 09.2016 created [Sergey Chulkov]
815!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
816! **************************************************************************************************
817 SUBROUTINE tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
818 TYPE(cp_fm_type), INTENT(IN) :: fm_src, fm_dest_sub
819 TYPE(tddfpt_subgroup_env_type), INTENT(in) :: sub_env
820
821 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_fm_replicate_across_subgroups'
822
823 INTEGER :: handle, igroup, igroup_local, ncols_global_dest, ncols_global_src, ngroups, &
824 nrows_global_dest, nrows_global_src
825 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
826 TYPE(cp_fm_type) :: fm_null
827 TYPE(mp_para_env_type), POINTER :: para_env_global
828
829 IF (sub_env%is_split) THEN
830 CALL timeset(routinen, handle)
831
832 CALL cp_fm_get_info(fm_src, nrow_global=nrows_global_src, ncol_global=ncols_global_src, &
833 context=blacs_env_global, para_env=para_env_global)
834 CALL cp_fm_get_info(fm_dest_sub, nrow_global=nrows_global_dest, ncol_global=ncols_global_dest)
835
836 IF (debug_this_module) THEN
837 cpassert(nrows_global_src == nrows_global_dest)
838 cpassert(ncols_global_src == ncols_global_dest)
839 END IF
840
841 igroup_local = sub_env%group_distribution(para_env_global%mepos)
842 ngroups = sub_env%ngroups
843
844 DO igroup = 0, ngroups - 1
845 IF (igroup == igroup_local) THEN
846 CALL cp_fm_copy_general(fm_src, fm_dest_sub, para_env_global)
847 ELSE
848 CALL cp_fm_copy_general(fm_src, fm_null, para_env_global)
849 END IF
850 END DO
851
852 CALL timestop(handle)
853 END IF
855END MODULE qs_tddfpt2_subgroups
856
Types and set/get functions for auxiliary density matrix methods.
Definition admm_types.F:15
subroutine, public get_admm_env(admm_env, mo_derivs_aux_fit, mos_aux_fit, sab_aux_fit, sab_aux_fit_asymm, sab_aux_fit_vs_orb, matrix_s_aux_fit, matrix_s_aux_fit_kp, matrix_s_aux_fit_vs_orb, matrix_s_aux_fit_vs_orb_kp, task_list_aux_fit, matrix_ks_aux_fit, matrix_ks_aux_fit_kp, matrix_ks_aux_fit_im, matrix_ks_aux_fit_dft, matrix_ks_aux_fit_hfx, matrix_ks_aux_fit_dft_kp, matrix_ks_aux_fit_hfx_kp, rho_aux_fit, rho_aux_fit_buffer, admm_dm)
Get routine for the ADMM env.
Definition admm_types.F:593
Define the atomic kind types and their sub types.
subroutine, public get_gto_basis_set(gto_basis_set, name, aliases, norm_type, kind_radius, ncgf, nset, nsgf, cgf_symbol, sgf_symbol, norm_cgf, set_radius, lmax, lmin, lx, ly, lz, m, ncgf_set, npgf, nsgf_set, nshell, cphi, pgf_radius, sphi, scon, zet, first_cgf, first_sgf, l, last_cgf, last_sgf, n, gcc, maxco, maxl, maxpgf, maxsgf_set, maxshell, maxso, nco_sum, npgf_sum, nshell_sum, maxder, short_kind_radius, npgf_seg_sum)
...
Handles all functions related to the CELL.
Definition cell_types.F:15
methods related to the blacs parallel environment
subroutine, public cp_blacs_env_release(blacs_env)
releases the given blacs_env
subroutine, public cp_blacs_env_create(blacs_env, para_env, blacs_grid_layout, blacs_repeatable, row_major, grid_2d)
allocates and initializes a type that represent a blacs context
Defines control structures, which contain the parameters and the settings for the DFT-based calculati...
subroutine, public dbcsr_distribution_release(dist)
...
subroutine, public dbcsr_get_info(matrix, nblkrows_total, nblkcols_total, nfullrows_total, nfullcols_total, nblkrows_local, nblkcols_local, nfullrows_local, nfullcols_local, my_prow, my_pcol, local_rows, local_cols, proc_row_dist, proc_col_dist, row_blk_size, col_blk_size, row_blk_offset, col_blk_offset, distribution, name, matrix_type, group)
...
subroutine, public dbcsr_release(matrix)
...
DBCSR operations in CP2K.
subroutine, public cp_dbcsr_dist2d_to_dist(dist2d, dist)
Creates a DBCSR distribution from a distribution_2d.
represent the structure of a full matrix
subroutine, public cp_fm_struct_create(fmstruct, para_env, context, nrow_global, ncol_global, nrow_block, ncol_block, descriptor, first_p_pos, local_leading_dimension, template_fmstruct, square_blocks, force_block)
allocates and initializes a full matrix structure
subroutine, public cp_fm_struct_release(fmstruct)
releases a full matrix structure
represent a full matrix distributed on many processors
Definition cp_fm_types.F:15
subroutine, public cp_fm_copy_general(source, destination, para_env)
General copy of a fm matrix to another fm matrix. Uses non-blocking MPI rather than ScaLAPACK.
subroutine, public cp_fm_get_info(matrix, name, nrow_global, ncol_global, nrow_block, ncol_block, nrow_local, ncol_local, row_indices, col_indices, local_data, context, nrow_locals, ncol_locals, matrix_struct, para_env)
returns all kind of information about the full matrix
subroutine, public cp_fm_create(matrix, matrix_struct, name, use_sp, nrow, ncol, set_zero)
creates a new full matrix with the given structure
stores a lists of integer that are local to a processor. The idea is that these integers represent ob...
stores a mapping of 2D info (e.g. matrix) on a 2D processor distribution (i.e. blacs grid) where cpus...
subroutine, public distribution_2d_release(distribution_2d)
...
Distribution methods for atoms, particles, or molecules.
subroutine, public distribute_molecules_2d(cell, atomic_kind_set, particle_set, qs_kind_set, molecule_kind_set, molecule_set, distribution_2d, blacs_env, force_env_section)
Distributes the particle pairs creating a 2d distribution optimally suited for quickstep.
subroutine, public init_coulomb_local(hartree_local, natom)
...
subroutine, public hartree_local_release(hartree_local)
...
subroutine, public hartree_local_create(hartree_local)
...
collects all constants needed in input so that they can be used without circular dependencies
integer, parameter, public tddfpt_kernel_none
integer, parameter, public tddfpt_kernel_full
integer, parameter, public tddfpt_kernel_stda
objects that represent the structure of input sections and the data contained in an input section
subroutine, public section_vals_val_get(section_vals, keyword_name, i_rep_section, i_rep_val, n_rep_val, val, l_val, i_val, r_val, c_val, l_vals, i_vals, r_vals, c_vals, explicit)
returns the requested value
Defines the basic variable types.
Definition kinds.F:23
integer, parameter, public dp
Definition kinds.F:34
integer, parameter, public default_string_length
Definition kinds.F:57
Interface to the message passing library MPI.
subroutine, public mp_para_env_release(para_env)
releases the para object (to be called when you don't want anymore the shared copy of this object)
Define the molecule kind structure types and the corresponding functionality.
Define the data structure for the molecule information.
Define the data structure for the particle information.
methods of pw_env that have dependence on qs_env
subroutine, public pw_env_rebuild(pw_env, qs_env, external_para_env)
rebuilds the pw_env data (necessary if cell or cutoffs change)
subroutine, public pw_env_create(pw_env)
creates a pw_env, if qs_env is given calls pw_env_rebuild
container for various plainwaves related things
subroutine, public pw_env_retain(pw_env)
retains the pw_env (see doc/ReferenceCounting.html)
subroutine, public pw_env_release(pw_env, para_env)
releases the given pw_env (see doc/ReferenceCounting.html)
subroutine, public get_qs_env(qs_env, atomic_kind_set, qs_kind_set, cell, super_cell, cell_ref, use_ref_cell, kpoints, dft_control, mos, sab_orb, sab_all, qmmm, qmmm_periodic, sac_ae, sac_ppl, sac_lri, sap_ppnl, sab_vdw, sab_scp, sap_oce, sab_lrc, sab_se, sab_xtbe, sab_tbe, sab_core, sab_xb, sab_xtb_pp, sab_xtb_nonbond, sab_almo, sab_kp, sab_kp_nosym, sab_cneo, particle_set, energy, force, matrix_h, matrix_h_im, matrix_ks, matrix_ks_im, matrix_vxc, run_rtp, rtp, matrix_h_kp, matrix_h_im_kp, matrix_ks_kp, matrix_ks_im_kp, matrix_vxc_kp, kinetic_kp, matrix_s_kp, matrix_w_kp, matrix_s_ri_aux_kp, matrix_s, matrix_s_ri_aux, matrix_w, matrix_p_mp2, matrix_p_mp2_admm, rho, rho_xc, pw_env, ewald_env, ewald_pw, active_space, mpools, input, para_env, blacs_env, scf_control, rel_control, kinetic, qs_charges, vppl, rho_core, rho_nlcc, rho_nlcc_g, ks_env, ks_qmmm_env, wf_history, scf_env, local_particles, local_molecules, distribution_2d, dbcsr_dist, molecule_kind_set, molecule_set, subsys, cp_subsys, oce, local_rho_set, rho_atom_set, task_list, task_list_soft, rho0_atom_set, rho0_mpole, rhoz_set, rhoz_cneo_set, ecoul_1c, rho0_s_rs, rho0_s_gs, rhoz_cneo_s_rs, rhoz_cneo_s_gs, do_kpoints, has_unit_metric, requires_mo_derivs, mo_derivs, mo_loc_history, nkind, natom, nelectron_total, nelectron_spin, efield, neighbor_list_id, linres_control, xas_env, virial, cp_ddapc_env, cp_ddapc_ewald, outer_scf_history, outer_scf_ihistory, x_data, et_coupling, dftb_potential, results, se_taper, se_store_int_env, se_nddo_mpole, se_nonbond_env, admm_env, lri_env, lri_density, exstate_env, ec_env, harris_env, dispersion_env, gcp_env, vee, rho_external, external_vxc, mask, mp2_env, bs_env, kg_env, wanniercentres, atprop, ls_scf_env, do_transport, transport_env, v_hartree_rspace, s_mstruct_changed, rho_changed, potential_changed, forces_up_to_date, mscfg_env, almo_scf_env, gradient_history, variable_history, embed_pot, spin_embed_pot, polar_env, mos_last_converged, eeq, rhs, do_rixs, tb_tblite)
Get the QUICKSTEP environment.
Define the quickstep kind type and their sub types.
subroutine, public get_qs_kind(qs_kind, basis_set, basis_type, ncgf, nsgf, all_potential, tnadd_potential, gth_potential, sgp_potential, upf_potential, cneo_potential, se_parameter, dftb_parameter, xtb_parameter, dftb3_param, zatom, zeff, elec_conf, mao, lmax_dftb, alpha_core_charge, ccore_charge, core_charge, core_charge_radius, paw_proj_set, paw_atom, hard_radius, hard0_radius, max_rad_local, covalent_radius, vdw_radius, gpw_type_forced, harmonics, max_iso_not0, max_s_harm, grid_atom, ngrid_ang, ngrid_rad, lmax_rho0, dft_plus_u_atom, l_of_dft_plus_u, n_of_dft_plus_u, u_minus_j, u_of_dft_plus_u, j_of_dft_plus_u, alpha_of_dft_plus_u, beta_of_dft_plus_u, j0_of_dft_plus_u, occupation_of_dft_plus_u, dispersion, bs_occupation, magnetization, no_optimize, addel, laddel, naddel, orbitals, max_scf, eps_scf, smear, u_ramping, u_minus_j_target, eps_u_ramping, init_u_ramping_each_scf, reltmat, ghost, monovalent, floating, name, element_symbol, pao_basis_size, pao_model_file, pao_potentials, pao_descriptors, nelec)
Get attributes of an atomic kind.
subroutine, public local_rho_set_create(local_rho_set)
...
subroutine, public local_rho_set_release(local_rho_set)
...
Define the neighbor list data types and the corresponding functionality.
subroutine, public release_neighbor_list_sets(nlists)
releases an array of neighbor_list_sets
Generate the atomic neighbor lists.
subroutine, public atom2d_cleanup(atom2d)
free the internals of atom2d
subroutine, public pair_radius_setup(present_a, present_b, radius_a, radius_b, pair_radius, prmin)
...
subroutine, public build_neighbor_lists(ab_list, particle_set, atom, cell, pair_radius, subcells, mic, symmetric, molecular, subset_of_mol, current_subset, operator_type, nlname, atomb_to_keep)
Build simple pair neighbor lists.
subroutine, public atom2d_build(atom2d, distribution_1d, distribution_2d, atomic_kind_set, molecule_set, molecule_only, particle_set)
Build some distribution structure of atoms, refactored from build_qs_neighbor_lists.
subroutine, public rho0_s_grid_create(pw_env, rho0_mpole)
...
subroutine, public init_rho0(local_rho_set, qs_env, gapw_control, zcore)
...
subroutine, public allocate_rho_atom_internals(rho_atom_set, atomic_kind_set, qs_kind_set, dft_control, para_env)
...
subroutine, public tddfpt_sub_env_init(sub_env, qs_env, mos_occ, mos_active, kernel)
Split MPI communicator to create a set of parallel (sub)groups.
subroutine, public tddfpt_sub_env_release(sub_env)
Release parallel group environment.
subroutine, public tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
Replicate a globally distributed matrix across all sub-groups. At the end every sub-group will hold a...
subroutine, public tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
Create a DBCSR matrix based on a template matrix, distribution object, and the list of neighbours.
generate the tasks lists used by collocate and integrate routines
subroutine, public generate_qs_task_list(ks_env, task_list, basis_type, reorder_rs_grid_ranks, skip_load_balance_distributed, pw_env_external, sab_orb_external)
...
types for task lists
subroutine, public deallocate_task_list(task_list)
deallocates the components and the object itself
subroutine, public allocate_task_list(task_list)
allocates and initialised the components of the task_list_type
stores some data used in wavefunction fitting
Definition admm_types.F:120
Provides all information about an atomic kind.
Type defining parameters related to the simulation cell.
Definition cell_types.F:60
represent a blacs multidimensional parallel environment (for the mpi corrispective see cp_paratypes/m...
keeps the information about the structure of a full matrix
represent a full matrix
structure to store local (to a processor) ordered lists of integers.
distributes pairs on a 2d grid of processors
stores all the informations relevant to an mpi environment
contained for different pw related things
Provides all information about a quickstep kind.
calculation environment to calculate the ks matrix, holds all the needed vars. assumes that the core ...