(git:1155b05)
Loading...
Searching...
No Matches
qs_tddfpt2_subgroups.F
Go to the documentation of this file.
1!--------------------------------------------------------------------------------------------------!
2! CP2K: A general program to perform molecular dynamics simulations !
3! Copyright 2000-2026 CP2K developers group <https://cp2k.org> !
4! !
5! SPDX-License-Identifier: GPL-2.0-or-later !
6!--------------------------------------------------------------------------------------------------!
7
9 USE admm_types, ONLY: admm_type,&
14 USE cell_types, ONLY: cell_type
21 USE cp_dbcsr_api, ONLY: dbcsr_create,&
50 USE kinds, ONLY: default_string_length,&
51 dp
57 USE pw_env_methods, ONLY: pw_env_create,&
59 USE pw_env_types, ONLY: pw_env_release,&
62 USE pw_types, ONLY: pw_r3d_rs_type
65 USE qs_kind_types, ONLY: get_qs_kind,&
79 USE qs_rho0_methods, ONLY: init_rho0
85#include "./base/base_uses.f90"
86
87 IMPLICIT NONE
88
89 PRIVATE
90
91 CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'qs_tddfpt2_subgroups'
92 LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .true.
93
97
98! **************************************************************************************************
99!> \brief Parallel (sub)group environment.
100!> \par History
101!> * 01.2017 created [Sergey Chulkov]
102! **************************************************************************************************
104 !> indicates that the global MPI communicator has been split into subgroups; if it is .FALSE.
105 !> certain components of the structure (blacs_env, para_env, admm_A, and mos_occ)
106 !> can still be accessed; in this case they simply point to the corresponding global variables
107 LOGICAL :: is_split = .false.
108 !> number of parallel groups
109 INTEGER :: ngroups = -1
110 !> group_distribution(0:ngroups-1) : a process with rank 'i' belongs to the parallel group
111 !> with index 'group_distribution(i)'
112 INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution
113 !> group-specific BLACS parallel environment
114 TYPE(cp_blacs_env_type), POINTER :: blacs_env => null()
115 !> group-specific MPI parallel environment
116 TYPE(mp_para_env_type), POINTER :: para_env => null()
117 !> (active) occupied MOs stored in a matrix form [nao x nmo_occ(spin)] distributed across processes
118 !> in the parallel group
119 TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_occ
120 TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_active
121 !> group-specific copy of the ADMM A matrix 'admm_type%A'
122 TYPE(cp_fm_type), POINTER :: admm_a => null()
123 !
124 !> indicates that a set of multi-grids has been allocated; if it is .FALSE. all the components
125 !> below point to the corresponding global variables and can be accessed
126 LOGICAL :: is_mgrid = .false.
127 !> group-specific DBCSR distribution
128 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist => null()
129 !> group-specific two-dimensional distribution of pairs of particles
130 TYPE(distribution_2d_type), POINTER :: dist_2d => null()
131 !> group-specific plane wave environment
132 TYPE(pw_env_type), POINTER :: pw_env => null()
133 !> integration weights
134 TYPE(pw_r3d_rs_type), POINTER :: xcint_weights => null()
135 !> lists of neighbours in auxiliary and primary basis sets
137 DIMENSION(:), POINTER :: sab_aux_fit => null(), sab_orb => null()
138 !> task lists in auxiliary and primary basis sets
139 TYPE(task_list_type), POINTER :: task_list_aux_fit => null(), task_list_orb => null()
140 !> soft task lists in auxiliary and primary basis sets
141 TYPE(task_list_type), POINTER :: task_list_aux_fit_soft => null(), task_list_orb_soft => null()
142 !> GAPW local atomic grids
143 TYPE(hartree_local_type), POINTER :: hartree_local => null()
144 TYPE(local_rho_type), POINTER :: local_rho_set => null()
145 TYPE(local_rho_type), POINTER :: local_rho_set_admm => null()
147
148! **************************************************************************************************
149!> \brief Structure to save global multi-grid related parameters.
150!> \par History
151!> * 09.2016 created [Sergey Chulkov]
152!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
153! **************************************************************************************************
154 TYPE mgrid_saved_parameters
155 !> create commensurate grids
156 LOGICAL :: commensurate_mgrids = .false.
157 !> create real-space grids
158 LOGICAL :: realspace_mgrids = .false.
159 !> do not perform load balancing
160 LOGICAL :: skip_load_balance = .false.
161 !> cutoff value at the finest grid level
162 REAL(kind=dp) :: cutoff = 0.0_dp
163 !> inverse scale factor
164 REAL(kind=dp) :: progression_factor = 0.0_dp
165 !> relative cutoff
166 REAL(kind=dp) :: relative_cutoff = 0.0_dp
167 !> list of explicitly given cutoff values
168 REAL(kind=dp), DIMENSION(:), POINTER :: e_cutoff => null()
169 END TYPE mgrid_saved_parameters
170
171CONTAINS
172
173! **************************************************************************************************
174!> \brief Split MPI communicator to create a set of parallel (sub)groups.
175!> \param sub_env parallel group environment (initialised on exit)
176!> \param qs_env Quickstep environment
177!> \param mos_occ ground state molecular orbitals in primary atomic basis set
178!> \param mos_active active ground state molecular orbitals in primary atomic basis set
179!> \param kernel Type of kernel (full/sTDA) that will be used
180!> \par History
181!> * 01.2017 (sub)group-related code has been moved here from the main subroutine tddfpt()
182!> [Sergey Chulkov]
183! **************************************************************************************************
184 SUBROUTINE tddfpt_sub_env_init(sub_env, qs_env, mos_occ, mos_active, kernel)
185 TYPE(tddfpt_subgroup_env_type), INTENT(out) :: sub_env
186 TYPE(qs_environment_type), POINTER :: qs_env
187 TYPE(cp_fm_type), DIMENSION(:), INTENT(in) :: mos_occ, mos_active
188 INTEGER, INTENT(in) :: kernel
189
190 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_sub_env_init'
191
192 INTEGER :: handle, ispin, nao, nao_aux, natom, &
193 nmo_active, nmo_occ, nspins
194 TYPE(admm_type), POINTER :: admm_env
195 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
196 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
197 TYPE(cp_fm_struct_type), POINTER :: fm_struct
198 TYPE(dft_control_type), POINTER :: dft_control
199 TYPE(mgrid_saved_parameters) :: mgrid_saved
200 TYPE(mp_para_env_type), POINTER :: para_env_global
201 TYPE(pw_env_type), POINTER :: pw_env_global
202 TYPE(pw_r3d_rs_type), POINTER :: weights
203 TYPE(qs_control_type), POINTER :: qs_control
204 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
205 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
206
207 CALL timeset(routinen, handle)
208
209 nspins = SIZE(mos_occ)
210
211 CALL get_qs_env(qs_env, blacs_env=blacs_env_global, dft_control=dft_control, &
212 para_env=para_env_global, pw_env=pw_env_global)
213
214 tddfpt_control => dft_control%tddfpt2_control
215 qs_control => dft_control%qs_control
216
217 ! ++ split mpi communicator if
218 ! a) the requested number of processors per group > 0
219 ! (means that the split has been requested explicitly), and
220 ! b) the number of subgroups is >= 2
221 sub_env%is_split = tddfpt_control%nprocs > 0 .AND. tddfpt_control%nprocs*2 <= para_env_global%num_pe
222
223 ALLOCATE (sub_env%mos_occ(nspins))
224 ALLOCATE (sub_env%mos_active(nspins))
225 NULLIFY (sub_env%admm_A)
226
227 CALL get_qs_env(qs_env, xcint_weights=weights)
228 sub_env%xcint_weights => weights
229 IF (sub_env%is_split .AND. ASSOCIATED(weights)) THEN
230 cpabort('subgroups and integration weights not compatible')
231 END IF
232
233 IF (sub_env%is_split) THEN
234 ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
235
236 ALLOCATE (sub_env%para_env)
237 CALL sub_env%para_env%from_split(comm=para_env_global, ngroups=sub_env%ngroups, &
238 group_distribution=sub_env%group_distribution, subgroup_min_size=tddfpt_control%nprocs)
239
240 ! ++ create a new parallel environment based on the given sub-communicator)
241 NULLIFY (sub_env%blacs_env)
242
243 ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations
244 ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'.
245 ! Ideally we should take these parameters from the variables globenv%blacs_grid_layout and
246 ! globenv%blacs_repeatable, however the global environment is not available
247 ! from the subroutine 'qs_energies_properties'.
248 CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env)
249
250 NULLIFY (fm_struct)
251
252 DO ispin = 1, nspins
253 CALL cp_fm_get_info(mos_occ(ispin), nrow_global=nao, ncol_global=nmo_occ)
254 CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_occ, context=sub_env%blacs_env)
255 CALL cp_fm_create(sub_env%mos_occ(ispin), fm_struct)
256 CALL cp_fm_struct_release(fm_struct)
257 CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_occ(ispin), &
258 fm_dest_sub=sub_env%mos_occ(ispin), sub_env=sub_env)
259 END DO
260
261 DO ispin = 1, nspins
262 CALL cp_fm_get_info(mos_active(ispin), nrow_global=nao, ncol_global=nmo_active)
263 CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_active, context=sub_env%blacs_env)
264 CALL cp_fm_create(sub_env%mos_active(ispin), fm_struct)
265 CALL cp_fm_struct_release(fm_struct)
266 CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_active(ispin), &
267 fm_dest_sub=sub_env%mos_active(ispin), sub_env=sub_env)
268 END DO
269
270 IF (dft_control%do_admm) THEN
271 CALL get_qs_env(qs_env, admm_env=admm_env)
272 CALL cp_fm_get_info(admm_env%A, nrow_global=nao_aux, ncol_global=nao)
273 CALL cp_fm_struct_create(fm_struct, nrow_global=nao_aux, ncol_global=nao, context=sub_env%blacs_env)
274 ALLOCATE (sub_env%admm_A)
275 CALL cp_fm_create(sub_env%admm_A, fm_struct)
276 CALL cp_fm_struct_release(fm_struct)
277 CALL tddfpt_fm_replicate_across_subgroups(fm_src=admm_env%A, fm_dest_sub=sub_env%admm_A, sub_env=sub_env)
278 END IF
279 ELSE
280 CALL para_env_global%retain()
281 sub_env%para_env => para_env_global
282
283 CALL blacs_env_global%retain()
284 sub_env%blacs_env => blacs_env_global
285
286 sub_env%mos_occ(:) = mos_occ(:)
287 sub_env%mos_active(:) = mos_active(:)
288
289 IF (dft_control%do_admm) THEN
290 CALL get_qs_env(qs_env, admm_env=admm_env)
291 sub_env%admm_A => admm_env%A
292 END IF
293 END IF
294
295 IF (kernel == tddfpt_kernel_full) THEN
296 ! ++ allocate a new plane wave environment
297 sub_env%is_mgrid = sub_env%is_split .OR. tddfpt_control%mgrid_is_explicit
298
299 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
300 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
301 NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit)
302 NULLIFY (sub_env%task_list_orb_soft, sub_env%task_list_aux_fit_soft)
303
304 IF (sub_env%is_mgrid) THEN
305 IF (tddfpt_control%mgrid_is_explicit) &
306 CALL init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
307
308 IF (ASSOCIATED(weights)) THEN
309 cpabort('Redefining MGRID and integration weights not compatible')
310 END IF
311
312 NULLIFY (sub_env%pw_env)
313
314 CALL pw_env_create(sub_env%pw_env)
315 CALL pw_env_rebuild(sub_env%pw_env, qs_env, sub_env%para_env)
316
317 CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, &
318 blacs_env=sub_env%blacs_env, qs_env=qs_env)
319
320 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", &
321 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
322 skip_load_balance=qs_control%skip_load_balance_distributed, &
323 reorder_grid_ranks=.true.)
324
325 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
326 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb_soft, sab=sub_env%sab_orb, basis_type="ORB_SOFT", &
327 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
328 skip_load_balance=qs_control%skip_load_balance_distributed, &
329 reorder_grid_ranks=.true.)
330 END IF
331
332 IF (dft_control%do_admm) THEN
333 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit, sab=sub_env%sab_aux_fit, &
334 basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, &
335 pw_env=sub_env%pw_env, qs_env=qs_env, &
336 skip_load_balance=qs_control%skip_load_balance_distributed, &
337 reorder_grid_ranks=.false.)
338 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
339 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit_soft, sab=sub_env%sab_aux_fit, &
340 basis_type="AUX_FIT_SOFT", distribution_2d=sub_env%dist_2d, &
341 pw_env=sub_env%pw_env, qs_env=qs_env, &
342 skip_load_balance=qs_control%skip_load_balance_distributed, &
343 reorder_grid_ranks=.false.)
344 END IF
345 END IF
346
347 IF (tddfpt_control%mgrid_is_explicit) &
348 CALL restore_qs_mgrid(qs_control, mgrid_saved)
349 ELSE
350 CALL pw_env_retain(pw_env_global)
351 sub_env%pw_env => pw_env_global
352
353 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, &
354 sab_orb=sub_env%sab_orb, task_list=sub_env%task_list_orb)
355 IF (dft_control%do_admm) THEN
356 CALL get_admm_env(admm_env, sab_aux_fit=sub_env%sab_aux_fit, &
357 task_list_aux_fit=sub_env%task_list_aux_fit)
358 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
359 sub_env%task_list_aux_fit_soft => admm_env%admm_gapw_env%task_list
360 END IF
361 END IF
362 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
363 CALL get_qs_env(qs_env, task_list_soft=sub_env%task_list_orb_soft)
364 END IF
365 END IF
366
367 ! GAPW initializations
368 IF (dft_control%qs_control%gapw) THEN
369 CALL get_qs_env(qs_env, &
370 atomic_kind_set=atomic_kind_set, &
371 natom=natom, &
372 qs_kind_set=qs_kind_set)
373
374 CALL local_rho_set_create(sub_env%local_rho_set)
375 CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
376 qs_kind_set, dft_control, sub_env%para_env)
377
378 CALL init_rho0(sub_env%local_rho_set, qs_env, dft_control%qs_control%gapw_control, &
379 zcore=0.0_dp)
380 CALL rho0_s_grid_create(sub_env%pw_env, sub_env%local_rho_set%rho0_mpole)
381 CALL hartree_local_create(sub_env%hartree_local)
382 CALL init_coulomb_local(sub_env%hartree_local, natom)
383 ELSEIF (dft_control%qs_control%gapw_xc) THEN
384 CALL get_qs_env(qs_env, &
385 atomic_kind_set=atomic_kind_set, &
386 qs_kind_set=qs_kind_set)
387 CALL local_rho_set_create(sub_env%local_rho_set)
388 CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
389 qs_kind_set, dft_control, sub_env%para_env)
390 END IF
391
392 ! ADMM/GAPW
393 IF (dft_control%do_admm) THEN
394 IF (dft_control%qs_control%gapw .OR. dft_control%qs_control%gapw_xc) THEN
395 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set)
396 CALL local_rho_set_create(sub_env%local_rho_set_admm)
397 CALL allocate_rho_atom_internals(sub_env%local_rho_set_admm%rho_atom_set, atomic_kind_set, &
398 admm_env%admm_gapw_env%admm_kind_set, &
399 dft_control, sub_env%para_env)
400 END IF
401 END IF
402
403 ELSE IF (kernel == tddfpt_kernel_stda) THEN
404 sub_env%is_mgrid = .false.
405 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
406 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
407 NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
408 NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
409 NULLIFY (sub_env%pw_env)
410 IF (sub_env%is_split) THEN
411 cpabort('Subsys option not available')
412 ELSE
413 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
414 END IF
415 ELSE IF (kernel == tddfpt_kernel_none) THEN
416 sub_env%is_mgrid = .false.
417 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
418 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
419 NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
420 NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
421 NULLIFY (sub_env%pw_env)
422 IF (sub_env%is_split) THEN
423 cpabort('Subsys option not available')
424 ELSE
425 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
426 END IF
427 ELSE
428 cpabort("Unknown kernel type")
429 END IF
430
431 CALL timestop(handle)
432
433 END SUBROUTINE tddfpt_sub_env_init
434
435! **************************************************************************************************
436!> \brief Release parallel group environment
437!> \param sub_env parallel group environment (modified on exit)
438!> \par History
439!> * 01.2017 created [Sergey Chulkov]
440! **************************************************************************************************
441 SUBROUTINE tddfpt_sub_env_release(sub_env)
442 TYPE(tddfpt_subgroup_env_type), INTENT(inout) :: sub_env
443
444 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_sub_env_release'
445
446 INTEGER :: handle, i
447
448 CALL timeset(routinen, handle)
449
450 IF (sub_env%is_mgrid) THEN
451 IF (ASSOCIATED(sub_env%task_list_aux_fit)) &
452 CALL deallocate_task_list(sub_env%task_list_aux_fit)
453
454 IF (ASSOCIATED(sub_env%task_list_aux_fit_soft)) &
455 CALL deallocate_task_list(sub_env%task_list_aux_fit_soft)
456
457 IF (ASSOCIATED(sub_env%task_list_orb)) &
458 CALL deallocate_task_list(sub_env%task_list_orb)
459
460 IF (ASSOCIATED(sub_env%task_list_orb_soft)) &
461 CALL deallocate_task_list(sub_env%task_list_orb_soft)
462
463 CALL release_neighbor_list_sets(sub_env%sab_aux_fit)
464 CALL release_neighbor_list_sets(sub_env%sab_orb)
465
466 IF (ASSOCIATED(sub_env%dbcsr_dist)) THEN
467 CALL dbcsr_distribution_release(sub_env%dbcsr_dist)
468 DEALLOCATE (sub_env%dbcsr_dist)
469 END IF
470
471 IF (ASSOCIATED(sub_env%dist_2d)) &
472 CALL distribution_2d_release(sub_env%dist_2d)
473 END IF
474
475 ! GAPW
476 IF (ASSOCIATED(sub_env%local_rho_set)) THEN
477 CALL local_rho_set_release(sub_env%local_rho_set)
478 END IF
479 IF (ASSOCIATED(sub_env%hartree_local)) THEN
480 CALL hartree_local_release(sub_env%hartree_local)
481 END IF
482 IF (ASSOCIATED(sub_env%local_rho_set_admm)) THEN
483 CALL local_rho_set_release(sub_env%local_rho_set_admm)
484 END IF
485
486 ! if TDDFPT-specific plane-wave environment has not been requested,
487 ! the pointers sub_env%dbcsr_dist, sub_env%sab_*, and sub_env%task_list_*
488 ! point to the corresponding ground-state variables from qs_env
489 ! and should not be deallocated
490
491 CALL pw_env_release(sub_env%pw_env)
492
493 sub_env%is_mgrid = .false.
494
495 IF (sub_env%is_split .AND. ASSOCIATED(sub_env%admm_A)) THEN
496 CALL cp_fm_release(sub_env%admm_A)
497 DEALLOCATE (sub_env%admm_A)
498 NULLIFY (sub_env%admm_A)
499 END IF
500
501 IF (sub_env%is_split) THEN
502 DO i = SIZE(sub_env%mos_occ), 1, -1
503 CALL cp_fm_release(sub_env%mos_occ(i))
504 END DO
505 DO i = SIZE(sub_env%mos_active), 1, -1
506 CALL cp_fm_release(sub_env%mos_active(i))
507 END DO
508 END IF
509 DEALLOCATE (sub_env%mos_occ)
510 DEALLOCATE (sub_env%mos_active)
511
512 CALL cp_blacs_env_release(sub_env%blacs_env)
513 CALL mp_para_env_release(sub_env%para_env)
514
515 IF (ALLOCATED(sub_env%group_distribution)) &
516 DEALLOCATE (sub_env%group_distribution)
517
518 sub_env%is_split = .false.
519
520 CALL timestop(handle)
521
522 END SUBROUTINE tddfpt_sub_env_release
523
524! **************************************************************************************************
525!> \brief Replace the global multi-grid related parameters in qs_control by the ones given in the
526!> TDDFPT/MGRID subsection. The original parameters are stored into the 'mgrid_saved'
527!> variable.
528!> \param qs_control Quickstep control parameters (modified on exit)
529!> \param tddfpt_control TDDFPT control parameters
530!> \param mgrid_saved structure to hold global MGRID-related parameters (initialised on exit)
531!> \par History
532!> * 09.2016 created [Sergey Chulkov]
533!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
534!> \note the code to build the 'e_cutoff' list was taken from the subroutine read_mgrid_section()
535! **************************************************************************************************
536 SUBROUTINE init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
537 TYPE(qs_control_type), POINTER :: qs_control
538 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
539 TYPE(mgrid_saved_parameters), INTENT(out) :: mgrid_saved
540
541 CHARACTER(LEN=*), PARAMETER :: routinen = 'init_tddfpt_mgrid'
542
543 INTEGER :: handle, igrid, ngrids
544
545 CALL timeset(routinen, handle)
546
547 ! ++ save global plane-wave grid parameters to the variable 'mgrid_saved'
548 mgrid_saved%commensurate_mgrids = qs_control%commensurate_mgrids
549 mgrid_saved%realspace_mgrids = qs_control%realspace_mgrids
550 mgrid_saved%skip_load_balance = qs_control%skip_load_balance_distributed
551 mgrid_saved%cutoff = qs_control%cutoff
552 mgrid_saved%progression_factor = qs_control%progression_factor
553 mgrid_saved%relative_cutoff = qs_control%relative_cutoff
554 mgrid_saved%e_cutoff => qs_control%e_cutoff
555
556 ! ++ set parameters from 'tddfpt_control' as default ones for all newly allocated plane-wave grids
557 qs_control%commensurate_mgrids = tddfpt_control%mgrid_commensurate_mgrids
558 qs_control%realspace_mgrids = tddfpt_control%mgrid_realspace_mgrids
559 qs_control%skip_load_balance_distributed = tddfpt_control%mgrid_skip_load_balance
560 qs_control%cutoff = tddfpt_control%mgrid_cutoff
561 qs_control%progression_factor = tddfpt_control%mgrid_progression_factor
562 qs_control%relative_cutoff = tddfpt_control%mgrid_relative_cutoff
563
564 ALLOCATE (qs_control%e_cutoff(tddfpt_control%mgrid_ngrids))
565 ngrids = tddfpt_control%mgrid_ngrids
566 IF (ASSOCIATED(tddfpt_control%mgrid_e_cutoff)) THEN
567 ! following read_mgrid_section() there is a magic scale factor there (0.5_dp)
568 DO igrid = 1, ngrids
569 qs_control%e_cutoff(igrid) = tddfpt_control%mgrid_e_cutoff(igrid)*0.5_dp
570 END DO
571 ! ++ round 'qs_control%cutoff' upward to the nearest sub-grid's cutoff value;
572 ! here we take advantage of the fact that the array 'e_cutoff' has been sorted in descending order
573 DO igrid = ngrids, 1, -1
574 IF (qs_control%cutoff <= qs_control%e_cutoff(igrid)) THEN
575 qs_control%cutoff = qs_control%e_cutoff(igrid)
576 EXIT
577 END IF
578 END DO
579 ! igrid == 0 if qs_control%cutoff is larger than the largest manually provided cutoff value;
580 ! use the largest actual value
581 IF (igrid <= 0) &
582 qs_control%cutoff = qs_control%e_cutoff(1)
583 ELSE
584 qs_control%e_cutoff(1) = qs_control%cutoff
585 DO igrid = 2, ngrids
586 qs_control%e_cutoff(igrid) = qs_control%e_cutoff(igrid - 1)/qs_control%progression_factor
587 END DO
588 END IF
589
590 CALL timestop(handle)
591 END SUBROUTINE init_tddfpt_mgrid
592
593! **************************************************************************************************
594!> \brief Restore the global multi-grid related parameters stored in the 'mgrid_saved' variable.
595!> \param qs_control Quickstep control parameters (modified on exit)
596!> \param mgrid_saved structure that holds global MGRID-related parameters
597!> \par History
598!> * 09.2016 created [Sergey Chulkov]
599! **************************************************************************************************
600 SUBROUTINE restore_qs_mgrid(qs_control, mgrid_saved)
601 TYPE(qs_control_type), POINTER :: qs_control
602 TYPE(mgrid_saved_parameters), INTENT(in) :: mgrid_saved
603
604 CHARACTER(LEN=*), PARAMETER :: routinen = 'restore_qs_mgrid'
605
606 INTEGER :: handle
607
608 CALL timeset(routinen, handle)
609
610 IF (ASSOCIATED(qs_control%e_cutoff)) &
611 DEALLOCATE (qs_control%e_cutoff)
612
613 qs_control%commensurate_mgrids = mgrid_saved%commensurate_mgrids
614 qs_control%realspace_mgrids = mgrid_saved%realspace_mgrids
615 qs_control%skip_load_balance_distributed = mgrid_saved%skip_load_balance
616 qs_control%cutoff = mgrid_saved%cutoff
617 qs_control%progression_factor = mgrid_saved%progression_factor
618 qs_control%relative_cutoff = mgrid_saved%relative_cutoff
619 qs_control%e_cutoff => mgrid_saved%e_cutoff
620
621 CALL timestop(handle)
622 END SUBROUTINE restore_qs_mgrid
623
624! **************************************************************************************************
625!> \brief Distribute atoms across the two-dimensional grid of processors.
626!> \param distribution_2d new two-dimensional distribution of pairs of particles
627!> (allocated and initialised on exit)
628!> \param dbcsr_dist new DBCSR distribution (allocated and initialised on exit)
629!> \param blacs_env BLACS parallel environment
630!> \param qs_env Quickstep environment
631!> \par History
632!> * 09.2016 created [Sergey Chulkov]
633!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
634! **************************************************************************************************
635 SUBROUTINE tddfpt_build_distribution_2d(distribution_2d, dbcsr_dist, blacs_env, qs_env)
636 TYPE(distribution_2d_type), POINTER :: distribution_2d
637 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
638 TYPE(cp_blacs_env_type), POINTER :: blacs_env
639 TYPE(qs_environment_type), POINTER :: qs_env
640
641 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_build_distribution_2d'
642
643 INTEGER :: handle
644 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
645 TYPE(cell_type), POINTER :: cell
646 TYPE(molecule_kind_type), DIMENSION(:), POINTER :: molecule_kind_set
647 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
648 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
649 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
650 TYPE(section_vals_type), POINTER :: input
651
652 CALL timeset(routinen, handle)
653
654 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
655 molecule_kind_set=molecule_kind_set, molecule_set=molecule_set, &
656 particle_set=particle_set, qs_kind_set=qs_kind_set)
657
658 NULLIFY (distribution_2d)
659 CALL distribute_molecules_2d(cell=cell, &
660 atomic_kind_set=atomic_kind_set, &
661 particle_set=particle_set, &
662 qs_kind_set=qs_kind_set, &
663 molecule_kind_set=molecule_kind_set, &
664 molecule_set=molecule_set, &
665 distribution_2d=distribution_2d, &
666 blacs_env=blacs_env, &
667 force_env_section=input)
668
669 ALLOCATE (dbcsr_dist)
670 CALL cp_dbcsr_dist2d_to_dist(distribution_2d, dbcsr_dist)
671
672 CALL timestop(handle)
673 END SUBROUTINE tddfpt_build_distribution_2d
674
675! **************************************************************************************************
676!> \brief Build task and neighbour lists for the given plane wave environment and basis set.
677!> \param task_list new task list (allocated and initialised on exit)
678!> \param sab new list of neighbours (allocated and initialised on exit)
679!> \param basis_type type of the basis set
680!> \param distribution_2d two-dimensional distribution of pairs of particles
681!> \param pw_env plane wave environment
682!> \param qs_env Quickstep environment
683!> \param skip_load_balance do not perform load balancing
684!> \param reorder_grid_ranks re-optimise grid ranks and re-create the real-space grid descriptor
685!> as well as grids
686!> \par History
687!> * 09.2016 created [Sergey Chulkov]
688!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
689! **************************************************************************************************
690 SUBROUTINE tddfpt_build_tasklist(task_list, sab, basis_type, distribution_2d, pw_env, qs_env, &
691 skip_load_balance, reorder_grid_ranks)
692 TYPE(task_list_type), POINTER :: task_list
693 TYPE(neighbor_list_set_p_type), DIMENSION(:), &
694 POINTER :: sab
695 CHARACTER(len=*), INTENT(in) :: basis_type
696 TYPE(distribution_2d_type), POINTER :: distribution_2d
697 TYPE(pw_env_type), POINTER :: pw_env
698 TYPE(qs_environment_type), POINTER :: qs_env
699 LOGICAL, INTENT(in) :: skip_load_balance, reorder_grid_ranks
700
701 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_build_tasklist'
702
703 INTEGER :: handle, ikind, nkinds
704 LOGICAL, ALLOCATABLE, DIMENSION(:) :: orb_present
705 REAL(kind=dp) :: subcells
706 REAL(kind=dp), ALLOCATABLE, DIMENSION(:) :: orb_radius
707 REAL(kind=dp), ALLOCATABLE, DIMENSION(:, :) :: pair_radius
708 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
709 TYPE(cell_type), POINTER :: cell
710 TYPE(distribution_1d_type), POINTER :: local_particles
711 TYPE(gto_basis_set_type), POINTER :: orb_basis_set
712 TYPE(local_atoms_type), ALLOCATABLE, DIMENSION(:) :: atom2d
713 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
714 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
715 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
716 TYPE(qs_ks_env_type), POINTER :: ks_env
717 TYPE(section_vals_type), POINTER :: input
718
719 CALL timeset(routinen, handle)
720
721 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
722 ks_env=ks_env, local_particles=local_particles, molecule_set=molecule_set, &
723 particle_set=particle_set, qs_kind_set=qs_kind_set)
724
725 nkinds = SIZE(atomic_kind_set)
726
727 IF (.NOT. (ASSOCIATED(sab))) THEN
728 ALLOCATE (atom2d(nkinds))
729 CALL atom2d_build(atom2d, local_particles, distribution_2d, atomic_kind_set, &
730 molecule_set, molecule_only=.false., particle_set=particle_set)
731
732 ALLOCATE (orb_present(nkinds))
733 ALLOCATE (orb_radius(nkinds))
734 ALLOCATE (pair_radius(nkinds, nkinds))
735
736 DO ikind = 1, nkinds
737 CALL get_qs_kind(qs_kind_set(ikind), basis_set=orb_basis_set, basis_type=basis_type)
738 IF (ASSOCIATED(orb_basis_set)) THEN
739 orb_present(ikind) = .true.
740 CALL get_gto_basis_set(gto_basis_set=orb_basis_set, kind_radius=orb_radius(ikind))
741 ELSE
742 orb_present(ikind) = .false.
743 orb_radius(ikind) = 0.0_dp
744 END IF
745 END DO
746
747 CALL pair_radius_setup(orb_present, orb_present, orb_radius, orb_radius, pair_radius)
748
749 NULLIFY (sab)
750 CALL section_vals_val_get(input, "DFT%SUBCELLS", r_val=subcells)
751 CALL build_neighbor_lists(sab, particle_set, atom2d, cell, pair_radius, &
752 mic=.false., subcells=subcells, molecular=.false., nlname="sab_orb")
753
754 CALL atom2d_cleanup(atom2d)
755 DEALLOCATE (atom2d, orb_present, orb_radius, pair_radius)
756 END IF
757
758 CALL allocate_task_list(task_list)
759 CALL generate_qs_task_list(ks_env, task_list, basis_type=basis_type, &
760 reorder_rs_grid_ranks=reorder_grid_ranks, &
761 skip_load_balance_distributed=skip_load_balance, &
762 pw_env_external=pw_env, sab_orb_external=sab)
763
764 CALL timestop(handle)
765 END SUBROUTINE tddfpt_build_tasklist
766
767! **************************************************************************************************
768!> \brief Create a DBCSR matrix based on a template matrix, distribution object, and the list of
769!> neighbours.
770!> \param matrix matrix to create
771!> \param template template matrix
772!> \param dbcsr_dist DBCSR distribution
773!> \param sab list of neighbours
774!> \par History
775!> * 09.2016 created [Sergey Chulkov]
776!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
777! **************************************************************************************************
778 SUBROUTINE tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
779 TYPE(dbcsr_type), POINTER :: matrix, template
780 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
781 TYPE(neighbor_list_set_p_type), DIMENSION(:), &
782 POINTER :: sab
783
784 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_dbcsr_create_by_dist'
785
786 CHARACTER :: matrix_type
787 CHARACTER(len=default_string_length) :: matrix_name
788 INTEGER :: handle
789 INTEGER, DIMENSION(:), POINTER :: col_blk_sizes, row_blk_sizes
790
791 CALL timeset(routinen, handle)
792
793 cpassert(ASSOCIATED(template))
794 CALL dbcsr_get_info(template, row_blk_size=row_blk_sizes, col_blk_size=col_blk_sizes, &
795 name=matrix_name, matrix_type=matrix_type)
796
797 IF (ASSOCIATED(matrix)) THEN
798 CALL dbcsr_release(matrix)
799 ELSE
800 ALLOCATE (matrix)
801 END IF
802
803 CALL dbcsr_create(matrix, matrix_name, dbcsr_dist, matrix_type, row_blk_sizes, col_blk_sizes)
804 CALL cp_dbcsr_alloc_block_from_nbl(matrix, sab)
805
806 CALL timestop(handle)
807
808 END SUBROUTINE tddfpt_dbcsr_create_by_dist
809
810! **************************************************************************************************
811!> \brief Replicate a globally distributed matrix across all sub-groups. At the end
812!> every sub-group will hold a local copy of the original globally distributed matrix.
813!>
814!> |--------------------|
815!> fm_src | 0 1 2 3 |
816!> |--------------------|
817!> / MPI ranks \
818!> |/_ _\|
819!> |--------------------| |--------------------|
820!> fm_dest_subgroup0 | 0 1 | | 2 3 | fm_dest_subgroup1
821!> |--------------------| |--------------------|
822!> subgroup 0 subgroup 1
823!>
824!> \param fm_src globally distributed matrix to replicate
825!> \param fm_dest_sub subgroup-specific copy of the replicated matrix
826!> \param sub_env subgroup environment
827!> \par History
828!> * 09.2016 created [Sergey Chulkov]
829!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
830! **************************************************************************************************
831 SUBROUTINE tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
832 TYPE(cp_fm_type), INTENT(IN) :: fm_src, fm_dest_sub
833 TYPE(tddfpt_subgroup_env_type), INTENT(in) :: sub_env
834
835 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_fm_replicate_across_subgroups'
836
837 INTEGER :: handle, igroup, igroup_local, ncols_global_dest, ncols_global_src, ngroups, &
838 nrows_global_dest, nrows_global_src
839 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
840 TYPE(cp_fm_type) :: fm_null
841 TYPE(mp_para_env_type), POINTER :: para_env_global
842
843 IF (sub_env%is_split) THEN
844 CALL timeset(routinen, handle)
845
846 CALL cp_fm_get_info(fm_src, nrow_global=nrows_global_src, ncol_global=ncols_global_src, &
847 context=blacs_env_global, para_env=para_env_global)
848 CALL cp_fm_get_info(fm_dest_sub, nrow_global=nrows_global_dest, ncol_global=ncols_global_dest)
849
850 IF (debug_this_module) THEN
851 cpassert(nrows_global_src == nrows_global_dest)
852 cpassert(ncols_global_src == ncols_global_dest)
853 END IF
854
855 igroup_local = sub_env%group_distribution(para_env_global%mepos)
856 ngroups = sub_env%ngroups
857
858 DO igroup = 0, ngroups - 1
859 IF (igroup == igroup_local) THEN
860 CALL cp_fm_copy_general(fm_src, fm_dest_sub, para_env_global)
861 ELSE
862 CALL cp_fm_copy_general(fm_src, fm_null, para_env_global)
863 END IF
864 END DO
865
866 CALL timestop(handle)
867 END IF
869END MODULE qs_tddfpt2_subgroups
870
Types and set/get functions for auxiliary density matrix methods.
Definition admm_types.F:15
subroutine, public get_admm_env(admm_env, mo_derivs_aux_fit, mos_aux_fit, sab_aux_fit, sab_aux_fit_asymm, sab_aux_fit_vs_orb, matrix_s_aux_fit, matrix_s_aux_fit_kp, matrix_s_aux_fit_vs_orb, matrix_s_aux_fit_vs_orb_kp, task_list_aux_fit, matrix_ks_aux_fit, matrix_ks_aux_fit_kp, matrix_ks_aux_fit_im, matrix_ks_aux_fit_dft, matrix_ks_aux_fit_hfx, matrix_ks_aux_fit_dft_kp, matrix_ks_aux_fit_hfx_kp, rho_aux_fit, rho_aux_fit_buffer, admm_dm)
Get routine for the ADMM env.
Definition admm_types.F:593
Define the atomic kind types and their sub types.
subroutine, public get_gto_basis_set(gto_basis_set, name, aliases, norm_type, kind_radius, ncgf, nset, nsgf, cgf_symbol, sgf_symbol, norm_cgf, set_radius, lmax, lmin, lx, ly, lz, m, ncgf_set, npgf, nsgf_set, nshell, cphi, pgf_radius, sphi, scon, zet, first_cgf, first_sgf, l, last_cgf, last_sgf, n, gcc, maxco, maxl, maxpgf, maxsgf_set, maxshell, maxso, nco_sum, npgf_sum, nshell_sum, maxder, short_kind_radius, npgf_seg_sum)
...
Handles all functions related to the CELL.
Definition cell_types.F:15
methods related to the blacs parallel environment
subroutine, public cp_blacs_env_release(blacs_env)
releases the given blacs_env
subroutine, public cp_blacs_env_create(blacs_env, para_env, blacs_grid_layout, blacs_repeatable, row_major, grid_2d)
allocates and initializes a type that represent a blacs context
Defines control structures, which contain the parameters and the settings for the DFT-based calculati...
subroutine, public dbcsr_distribution_release(dist)
...
subroutine, public dbcsr_get_info(matrix, nblkrows_total, nblkcols_total, nfullrows_total, nfullcols_total, nblkrows_local, nblkcols_local, nfullrows_local, nfullcols_local, my_prow, my_pcol, local_rows, local_cols, proc_row_dist, proc_col_dist, row_blk_size, col_blk_size, row_blk_offset, col_blk_offset, distribution, name, matrix_type, group)
...
subroutine, public dbcsr_release(matrix)
...
DBCSR operations in CP2K.
subroutine, public cp_dbcsr_dist2d_to_dist(dist2d, dist)
Creates a DBCSR distribution from a distribution_2d.
represent the structure of a full matrix
subroutine, public cp_fm_struct_create(fmstruct, para_env, context, nrow_global, ncol_global, nrow_block, ncol_block, descriptor, first_p_pos, local_leading_dimension, template_fmstruct, square_blocks, force_block)
allocates and initializes a full matrix structure
subroutine, public cp_fm_struct_release(fmstruct)
releases a full matrix structure
represent a full matrix distributed on many processors
Definition cp_fm_types.F:15
subroutine, public cp_fm_copy_general(source, destination, para_env)
General copy of a fm matrix to another fm matrix. Uses non-blocking MPI rather than ScaLAPACK.
subroutine, public cp_fm_get_info(matrix, name, nrow_global, ncol_global, nrow_block, ncol_block, nrow_local, ncol_local, row_indices, col_indices, local_data, context, nrow_locals, ncol_locals, matrix_struct, para_env)
returns all kind of information about the full matrix
subroutine, public cp_fm_create(matrix, matrix_struct, name, use_sp, nrow, ncol, set_zero)
creates a new full matrix with the given structure
stores a lists of integer that are local to a processor. The idea is that these integers represent ob...
stores a mapping of 2D info (e.g. matrix) on a 2D processor distribution (i.e. blacs grid) where cpus...
subroutine, public distribution_2d_release(distribution_2d)
...
Distribution methods for atoms, particles, or molecules.
subroutine, public distribute_molecules_2d(cell, atomic_kind_set, particle_set, qs_kind_set, molecule_kind_set, molecule_set, distribution_2d, blacs_env, force_env_section)
Distributes the particle pairs creating a 2d distribution optimally suited for quickstep.
subroutine, public init_coulomb_local(hartree_local, natom)
...
subroutine, public hartree_local_release(hartree_local)
...
subroutine, public hartree_local_create(hartree_local)
...
collects all constants needed in input so that they can be used without circular dependencies
integer, parameter, public tddfpt_kernel_none
integer, parameter, public tddfpt_kernel_full
integer, parameter, public tddfpt_kernel_stda
objects that represent the structure of input sections and the data contained in an input section
subroutine, public section_vals_val_get(section_vals, keyword_name, i_rep_section, i_rep_val, n_rep_val, val, l_val, i_val, r_val, c_val, l_vals, i_vals, r_vals, c_vals, explicit)
returns the requested value
Defines the basic variable types.
Definition kinds.F:23
integer, parameter, public dp
Definition kinds.F:34
integer, parameter, public default_string_length
Definition kinds.F:57
Interface to the message passing library MPI.
subroutine, public mp_para_env_release(para_env)
releases the para object (to be called when you don't want anymore the shared copy of this object)
Define the molecule kind structure types and the corresponding functionality.
Define the data structure for the molecule information.
Define the data structure for the particle information.
methods of pw_env that have dependence on qs_env
subroutine, public pw_env_rebuild(pw_env, qs_env, external_para_env)
rebuilds the pw_env data (necessary if cell or cutoffs change)
subroutine, public pw_env_create(pw_env)
creates a pw_env, if qs_env is given calls pw_env_rebuild
container for various plainwaves related things
subroutine, public pw_env_retain(pw_env)
retains the pw_env (see doc/ReferenceCounting.html)
subroutine, public pw_env_release(pw_env, para_env)
releases the given pw_env (see doc/ReferenceCounting.html)
subroutine, public get_qs_env(qs_env, atomic_kind_set, qs_kind_set, cell, super_cell, cell_ref, use_ref_cell, kpoints, dft_control, mos, sab_orb, sab_all, qmmm, qmmm_periodic, mimic, sac_ae, sac_ppl, sac_lri, sap_ppnl, sab_vdw, sab_scp, sap_oce, sab_lrc, sab_se, sab_xtbe, sab_tbe, sab_core, sab_xb, sab_xtb_pp, sab_xtb_nonbond, sab_almo, sab_kp, sab_kp_nosym, sab_cneo, particle_set, energy, force, matrix_h, matrix_h_im, matrix_ks, matrix_ks_im, matrix_vxc, run_rtp, rtp, matrix_h_kp, matrix_h_im_kp, matrix_ks_kp, matrix_ks_im_kp, matrix_vxc_kp, kinetic_kp, matrix_s_kp, matrix_w_kp, matrix_s_ri_aux_kp, matrix_s, matrix_s_ri_aux, matrix_w, matrix_p_mp2, matrix_p_mp2_admm, rho, rho_xc, pw_env, ewald_env, ewald_pw, active_space, mpools, input, para_env, blacs_env, scf_control, rel_control, kinetic, qs_charges, vppl, xcint_weights, rho_core, rho_nlcc, rho_nlcc_g, ks_env, ks_qmmm_env, wf_history, scf_env, local_particles, local_molecules, distribution_2d, dbcsr_dist, molecule_kind_set, molecule_set, subsys, cp_subsys, oce, local_rho_set, rho_atom_set, task_list, task_list_soft, rho0_atom_set, rho0_mpole, rhoz_set, rhoz_cneo_set, ecoul_1c, rho0_s_rs, rho0_s_gs, rhoz_cneo_s_rs, rhoz_cneo_s_gs, do_kpoints, has_unit_metric, requires_mo_derivs, mo_derivs, mo_loc_history, nkind, natom, nelectron_total, nelectron_spin, efield, neighbor_list_id, linres_control, xas_env, virial, cp_ddapc_env, cp_ddapc_ewald, outer_scf_history, outer_scf_ihistory, x_data, et_coupling, dftb_potential, results, se_taper, se_store_int_env, se_nddo_mpole, se_nonbond_env, admm_env, lri_env, lri_density, exstate_env, ec_env, harris_env, dispersion_env, gcp_env, vee, rho_external, external_vxc, mask, mp2_env, bs_env, kg_env, wanniercentres, atprop, ls_scf_env, do_transport, transport_env, v_hartree_rspace, s_mstruct_changed, rho_changed, potential_changed, forces_up_to_date, mscfg_env, almo_scf_env, gradient_history, variable_history, embed_pot, spin_embed_pot, polar_env, mos_last_converged, eeq, rhs, do_rixs, tb_tblite)
Get the QUICKSTEP environment.
Define the quickstep kind type and their sub types.
subroutine, public get_qs_kind(qs_kind, basis_set, basis_type, ncgf, nsgf, all_potential, tnadd_potential, gth_potential, sgp_potential, upf_potential, cneo_potential, se_parameter, dftb_parameter, xtb_parameter, dftb3_param, zatom, zeff, elec_conf, mao, lmax_dftb, alpha_core_charge, ccore_charge, core_charge, core_charge_radius, paw_proj_set, paw_atom, hard_radius, hard0_radius, max_rad_local, covalent_radius, vdw_radius, gpw_type_forced, harmonics, max_iso_not0, max_s_harm, grid_atom, ngrid_ang, ngrid_rad, lmax_rho0, dft_plus_u_atom, l_of_dft_plus_u, n_of_dft_plus_u, u_minus_j, u_of_dft_plus_u, j_of_dft_plus_u, alpha_of_dft_plus_u, beta_of_dft_plus_u, j0_of_dft_plus_u, occupation_of_dft_plus_u, dispersion, bs_occupation, magnetization, no_optimize, addel, laddel, naddel, orbitals, max_scf, eps_scf, smear, u_ramping, u_minus_j_target, eps_u_ramping, init_u_ramping_each_scf, reltmat, ghost, monovalent, floating, name, element_symbol, pao_basis_size, pao_model_file, pao_potentials, pao_descriptors, nelec)
Get attributes of an atomic kind.
subroutine, public local_rho_set_create(local_rho_set)
...
subroutine, public local_rho_set_release(local_rho_set)
...
Define the neighbor list data types and the corresponding functionality.
subroutine, public release_neighbor_list_sets(nlists)
releases an array of neighbor_list_sets
Generate the atomic neighbor lists.
subroutine, public atom2d_cleanup(atom2d)
free the internals of atom2d
subroutine, public pair_radius_setup(present_a, present_b, radius_a, radius_b, pair_radius, prmin)
...
subroutine, public build_neighbor_lists(ab_list, particle_set, atom, cell, pair_radius, subcells, mic, symmetric, molecular, subset_of_mol, current_subset, operator_type, nlname, atomb_to_keep)
Build simple pair neighbor lists.
subroutine, public atom2d_build(atom2d, distribution_1d, distribution_2d, atomic_kind_set, molecule_set, molecule_only, particle_set)
Build some distribution structure of atoms, refactored from build_qs_neighbor_lists.
subroutine, public rho0_s_grid_create(pw_env, rho0_mpole)
...
subroutine, public init_rho0(local_rho_set, qs_env, gapw_control, zcore)
...
subroutine, public allocate_rho_atom_internals(rho_atom_set, atomic_kind_set, qs_kind_set, dft_control, para_env)
...
subroutine, public tddfpt_sub_env_init(sub_env, qs_env, mos_occ, mos_active, kernel)
Split MPI communicator to create a set of parallel (sub)groups.
subroutine, public tddfpt_sub_env_release(sub_env)
Release parallel group environment.
subroutine, public tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
Replicate a globally distributed matrix across all sub-groups. At the end every sub-group will hold a...
subroutine, public tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
Create a DBCSR matrix based on a template matrix, distribution object, and the list of neighbours.
generate the tasks lists used by collocate and integrate routines
subroutine, public generate_qs_task_list(ks_env, task_list, basis_type, reorder_rs_grid_ranks, skip_load_balance_distributed, pw_env_external, sab_orb_external)
...
types for task lists
subroutine, public deallocate_task_list(task_list)
deallocates the components and the object itself
subroutine, public allocate_task_list(task_list)
allocates and initialised the components of the task_list_type
stores some data used in wavefunction fitting
Definition admm_types.F:120
Provides all information about an atomic kind.
Type defining parameters related to the simulation cell.
Definition cell_types.F:60
represent a blacs multidimensional parallel environment (for the mpi corrispective see cp_paratypes/m...
keeps the information about the structure of a full matrix
represent a full matrix
structure to store local (to a processor) ordered lists of integers.
distributes pairs on a 2d grid of processors
stores all the informations relevant to an mpi environment
contained for different pw related things
Provides all information about a quickstep kind.
calculation environment to calculate the ks matrix, holds all the needed vars. assumes that the core ...