(git:ed6f26b)
Loading...
Searching...
No Matches
qs_tddfpt2_subgroups.F
Go to the documentation of this file.
1!--------------------------------------------------------------------------------------------------!
2! CP2K: A general program to perform molecular dynamics simulations !
3! Copyright 2000-2025 CP2K developers group <https://cp2k.org> !
4! !
5! SPDX-License-Identifier: GPL-2.0-or-later !
6!--------------------------------------------------------------------------------------------------!
7
9 USE admm_types, ONLY: admm_type,&
14 USE cell_types, ONLY: cell_type
21 USE cp_dbcsr_api, ONLY: dbcsr_create,&
50 USE kinds, ONLY: default_string_length,&
51 dp
57 USE pw_env_methods, ONLY: pw_env_create,&
59 USE pw_env_types, ONLY: pw_env_release,&
64 USE qs_kind_types, ONLY: get_qs_kind,&
78 USE qs_rho0_methods, ONLY: init_rho0
84#include "./base/base_uses.f90"
85
86 IMPLICIT NONE
87
88 PRIVATE
89
90 CHARACTER(len=*), PARAMETER, PRIVATE :: moduleN = 'qs_tddfpt2_subgroups'
91 LOGICAL, PARAMETER, PRIVATE :: debug_this_module = .true.
92
96
97! **************************************************************************************************
98!> \brief Parallel (sub)group environment.
99!> \par History
100!> * 01.2017 created [Sergey Chulkov]
101! **************************************************************************************************
103 !> indicates that the global MPI communicator has been split into subgroups; if it is .FALSE.
104 !> certain components of the structure (blacs_env, para_env, admm_A, and mos_occ)
105 !> can still be accessed; in this case they simply point to the corresponding global variables
106 LOGICAL :: is_split = .false.
107 !> number of parallel groups
108 INTEGER :: ngroups = -1
109 !> group_distribution(0:ngroups-1) : a process with rank 'i' belongs to the parallel group
110 !> with index 'group_distribution(i)'
111 INTEGER, DIMENSION(:), ALLOCATABLE :: group_distribution
112 !> group-specific BLACS parallel environment
113 TYPE(cp_blacs_env_type), POINTER :: blacs_env => null()
114 !> group-specific MPI parallel environment
115 TYPE(mp_para_env_type), POINTER :: para_env => null()
116 !> occupied MOs stored in a matrix form [nao x nmo_occ(spin)] distributed across processes
117 !> in the parallel group
118 TYPE(cp_fm_type), ALLOCATABLE, DIMENSION(:) :: mos_occ
119 !> group-specific copy of the ADMM A matrix 'admm_type%A'
120 TYPE(cp_fm_type), POINTER :: admm_a => null()
121 !
122 !> indicates that a set of multi-grids has been allocated; if it is .FALSE. all the components
123 !> below point to the corresponding global variables and can be accessed
124 LOGICAL :: is_mgrid = .false.
125 !> group-specific DBCSR distribution
126 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist => null()
127 !> group-specific two-dimensional distribution of pairs of particles
128 TYPE(distribution_2d_type), POINTER :: dist_2d => null()
129 !> group-specific plane wave environment
130 TYPE(pw_env_type), POINTER :: pw_env => null()
131 !> lists of neighbours in auxiliary and primary basis sets
133 DIMENSION(:), POINTER :: sab_aux_fit => null(), sab_orb => null()
134 !> task lists in auxiliary and primary basis sets
135 TYPE(task_list_type), POINTER :: task_list_aux_fit => null(), task_list_orb => null()
136 !> soft task lists in auxiliary and primary basis sets
137 TYPE(task_list_type), POINTER :: task_list_aux_fit_soft => null(), task_list_orb_soft => null()
138 !> GAPW local atomic grids
139 TYPE(hartree_local_type), POINTER :: hartree_local => null()
140 TYPE(local_rho_type), POINTER :: local_rho_set => null()
141 TYPE(local_rho_type), POINTER :: local_rho_set_admm => null()
143
144! **************************************************************************************************
145!> \brief Structure to save global multi-grid related parameters.
146!> \par History
147!> * 09.2016 created [Sergey Chulkov]
148!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
149! **************************************************************************************************
150 TYPE mgrid_saved_parameters
151 !> create commensurate grids
152 LOGICAL :: commensurate_mgrids = .false.
153 !> create real-space grids
154 LOGICAL :: realspace_mgrids = .false.
155 !> do not perform load balancing
156 LOGICAL :: skip_load_balance = .false.
157 !> cutoff value at the finest grid level
158 REAL(kind=dp) :: cutoff = 0.0_dp
159 !> inverse scale factor
160 REAL(kind=dp) :: progression_factor = 0.0_dp
161 !> relative cutoff
162 REAL(kind=dp) :: relative_cutoff = 0.0_dp
163 !> list of explicitly given cutoff values
164 REAL(kind=dp), DIMENSION(:), POINTER :: e_cutoff => null()
165 END TYPE mgrid_saved_parameters
166
167CONTAINS
168
169! **************************************************************************************************
170!> \brief Split MPI communicator to create a set of parallel (sub)groups.
171!> \param sub_env parallel group environment (initialised on exit)
172!> \param qs_env Quickstep environment
173!> \param mos_occ ground state molecular orbitals in primary atomic basis set
174!> \param kernel Type of kernel (full/sTDA) that will be used
175!> \par History
176!> * 01.2017 (sub)group-related code has been moved here from the main subroutine tddfpt()
177!> [Sergey Chulkov]
178! **************************************************************************************************
179 SUBROUTINE tddfpt_sub_env_init(sub_env, qs_env, mos_occ, kernel)
180 TYPE(tddfpt_subgroup_env_type), INTENT(out) :: sub_env
181 TYPE(qs_environment_type), POINTER :: qs_env
182 TYPE(cp_fm_type), DIMENSION(:), INTENT(in) :: mos_occ
183 INTEGER, INTENT(in) :: kernel
184
185 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_sub_env_init'
186
187 INTEGER :: handle, ispin, nao, nao_aux, natom, &
188 nmo_occ, nspins
189 TYPE(admm_type), POINTER :: admm_env
190 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
191 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
192 TYPE(cp_fm_struct_type), POINTER :: fm_struct
193 TYPE(dft_control_type), POINTER :: dft_control
194 TYPE(mgrid_saved_parameters) :: mgrid_saved
195 TYPE(mp_para_env_type), POINTER :: para_env_global
196 TYPE(pw_env_type), POINTER :: pw_env_global
197 TYPE(qs_control_type), POINTER :: qs_control
198 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
199 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
200
201 CALL timeset(routinen, handle)
202
203 nspins = SIZE(mos_occ)
204
205 CALL get_qs_env(qs_env, blacs_env=blacs_env_global, dft_control=dft_control, &
206 para_env=para_env_global, pw_env=pw_env_global)
207
208 tddfpt_control => dft_control%tddfpt2_control
209 qs_control => dft_control%qs_control
210
211 ! ++ split mpi communicator if
212 ! a) the requested number of processors per group > 0
213 ! (means that the split has been requested explicitly), and
214 ! b) the number of subgroups is >= 2
215 sub_env%is_split = tddfpt_control%nprocs > 0 .AND. tddfpt_control%nprocs*2 <= para_env_global%num_pe
216
217 ALLOCATE (sub_env%mos_occ(nspins))
218 NULLIFY (sub_env%admm_A)
219
220 IF (sub_env%is_split) THEN
221 ALLOCATE (sub_env%group_distribution(0:para_env_global%num_pe - 1))
222
223 ALLOCATE (sub_env%para_env)
224 CALL sub_env%para_env%from_split(comm=para_env_global, ngroups=sub_env%ngroups, &
225 group_distribution=sub_env%group_distribution, subgroup_min_size=tddfpt_control%nprocs)
226
227 ! ++ create a new parallel environment based on the given sub-communicator)
228 NULLIFY (sub_env%blacs_env)
229
230 ! use the default (SQUARE) BLACS grid layout and non-repeatable BLACS collective operations
231 ! by omitting optional parameters 'blacs_grid_layout' and 'blacs_repeatable'.
232 ! Ideally we should take these parameters from the variables globenv%blacs_grid_layout and
233 ! globenv%blacs_repeatable, however the global environment is not available
234 ! from the subroutine 'qs_energies_properties'.
235 CALL cp_blacs_env_create(sub_env%blacs_env, sub_env%para_env)
236
237 NULLIFY (fm_struct)
238
239 DO ispin = 1, nspins
240 CALL cp_fm_get_info(mos_occ(ispin), nrow_global=nao, ncol_global=nmo_occ)
241 CALL cp_fm_struct_create(fm_struct, nrow_global=nao, ncol_global=nmo_occ, context=sub_env%blacs_env)
242 CALL cp_fm_create(sub_env%mos_occ(ispin), fm_struct)
243 CALL cp_fm_struct_release(fm_struct)
244 CALL tddfpt_fm_replicate_across_subgroups(fm_src=mos_occ(ispin), &
245 fm_dest_sub=sub_env%mos_occ(ispin), sub_env=sub_env)
246 END DO
247
248 IF (dft_control%do_admm) THEN
249 CALL get_qs_env(qs_env, admm_env=admm_env)
250 CALL cp_fm_get_info(admm_env%A, nrow_global=nao_aux, ncol_global=nao)
251 CALL cp_fm_struct_create(fm_struct, nrow_global=nao_aux, ncol_global=nao, context=sub_env%blacs_env)
252 ALLOCATE (sub_env%admm_A)
253 CALL cp_fm_create(sub_env%admm_A, fm_struct)
254 CALL cp_fm_struct_release(fm_struct)
255 CALL tddfpt_fm_replicate_across_subgroups(fm_src=admm_env%A, fm_dest_sub=sub_env%admm_A, sub_env=sub_env)
256 END IF
257 ELSE
258 CALL para_env_global%retain()
259 sub_env%para_env => para_env_global
260
261 CALL blacs_env_global%retain()
262 sub_env%blacs_env => blacs_env_global
263
264 sub_env%mos_occ(:) = mos_occ(:)
265
266 IF (dft_control%do_admm) THEN
267 CALL get_qs_env(qs_env, admm_env=admm_env)
268 sub_env%admm_A => admm_env%A
269 END IF
270 END IF
271
272 IF (kernel == tddfpt_kernel_full) THEN
273 ! ++ allocate a new plane wave environment
274 sub_env%is_mgrid = sub_env%is_split .OR. tddfpt_control%mgrid_is_explicit
275
276 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
277 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
278 NULLIFY (sub_env%task_list_orb, sub_env%task_list_aux_fit)
279 NULLIFY (sub_env%task_list_orb_soft, sub_env%task_list_aux_fit_soft)
280
281 IF (sub_env%is_mgrid) THEN
282 IF (tddfpt_control%mgrid_is_explicit) &
283 CALL init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
284
285 NULLIFY (sub_env%pw_env)
286
287 CALL pw_env_create(sub_env%pw_env)
288 CALL pw_env_rebuild(sub_env%pw_env, qs_env, sub_env%para_env)
289
290 CALL tddfpt_build_distribution_2d(distribution_2d=sub_env%dist_2d, dbcsr_dist=sub_env%dbcsr_dist, &
291 blacs_env=sub_env%blacs_env, qs_env=qs_env)
292
293 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb, sab=sub_env%sab_orb, basis_type="ORB", &
294 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
295 skip_load_balance=qs_control%skip_load_balance_distributed, &
296 reorder_grid_ranks=.true.)
297
298 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
299 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_orb_soft, sab=sub_env%sab_orb, basis_type="ORB_SOFT", &
300 distribution_2d=sub_env%dist_2d, pw_env=sub_env%pw_env, qs_env=qs_env, &
301 skip_load_balance=qs_control%skip_load_balance_distributed, &
302 reorder_grid_ranks=.true.)
303 END IF
304
305 IF (dft_control%do_admm) THEN
306 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit, sab=sub_env%sab_aux_fit, &
307 basis_type="AUX_FIT", distribution_2d=sub_env%dist_2d, &
308 pw_env=sub_env%pw_env, qs_env=qs_env, &
309 skip_load_balance=qs_control%skip_load_balance_distributed, &
310 reorder_grid_ranks=.false.)
311 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
312 CALL tddfpt_build_tasklist(task_list=sub_env%task_list_aux_fit_soft, sab=sub_env%sab_aux_fit, &
313 basis_type="AUX_FIT_SOFT", distribution_2d=sub_env%dist_2d, &
314 pw_env=sub_env%pw_env, qs_env=qs_env, &
315 skip_load_balance=qs_control%skip_load_balance_distributed, &
316 reorder_grid_ranks=.false.)
317 END IF
318 END IF
319
320 IF (tddfpt_control%mgrid_is_explicit) &
321 CALL restore_qs_mgrid(qs_control, mgrid_saved)
322 ELSE
323 CALL pw_env_retain(pw_env_global)
324 sub_env%pw_env => pw_env_global
325
326 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, &
327 sab_orb=sub_env%sab_orb, task_list=sub_env%task_list_orb)
328 IF (dft_control%do_admm) THEN
329 CALL get_admm_env(admm_env, sab_aux_fit=sub_env%sab_aux_fit, &
330 task_list_aux_fit=sub_env%task_list_aux_fit)
331 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
332 sub_env%task_list_aux_fit_soft => admm_env%admm_gapw_env%task_list
333 END IF
334 END IF
335 IF (qs_control%gapw .OR. qs_control%gapw_xc) THEN
336 CALL get_qs_env(qs_env, task_list_soft=sub_env%task_list_orb_soft)
337 END IF
338 END IF
339
340 ! GAPW initializations
341 IF (dft_control%qs_control%gapw) THEN
342 CALL get_qs_env(qs_env, &
343 atomic_kind_set=atomic_kind_set, &
344 natom=natom, &
345 qs_kind_set=qs_kind_set)
346
347 CALL local_rho_set_create(sub_env%local_rho_set)
348 CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
349 qs_kind_set, dft_control, sub_env%para_env)
350
351 CALL init_rho0(sub_env%local_rho_set, qs_env, dft_control%qs_control%gapw_control, &
352 zcore=0.0_dp)
353 CALL rho0_s_grid_create(sub_env%pw_env, sub_env%local_rho_set%rho0_mpole)
354 CALL hartree_local_create(sub_env%hartree_local)
355 CALL init_coulomb_local(sub_env%hartree_local, natom)
356 ELSEIF (dft_control%qs_control%gapw_xc) THEN
357 CALL get_qs_env(qs_env, &
358 atomic_kind_set=atomic_kind_set, &
359 qs_kind_set=qs_kind_set)
360 CALL local_rho_set_create(sub_env%local_rho_set)
361 CALL allocate_rho_atom_internals(sub_env%local_rho_set%rho_atom_set, atomic_kind_set, &
362 qs_kind_set, dft_control, sub_env%para_env)
363 END IF
364
365 ! ADMM/GAPW
366 IF (dft_control%do_admm) THEN
367 IF (dft_control%qs_control%gapw .OR. dft_control%qs_control%gapw_xc) THEN
368 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set)
369 CALL local_rho_set_create(sub_env%local_rho_set_admm)
370 CALL allocate_rho_atom_internals(sub_env%local_rho_set_admm%rho_atom_set, atomic_kind_set, &
371 admm_env%admm_gapw_env%admm_kind_set, &
372 dft_control, sub_env%para_env)
373 END IF
374 END IF
375
376 ELSE IF (kernel == tddfpt_kernel_stda) THEN
377 sub_env%is_mgrid = .false.
378 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
379 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
380 NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
381 NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
382 NULLIFY (sub_env%pw_env)
383 IF (sub_env%is_split) THEN
384 cpabort('Subsys option not available')
385 ELSE
386 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
387 END IF
388 ELSE IF (kernel == tddfpt_kernel_none) THEN
389 sub_env%is_mgrid = .false.
390 NULLIFY (sub_env%dbcsr_dist, sub_env%dist_2d)
391 NULLIFY (sub_env%sab_orb, sub_env%sab_aux_fit)
392 NULLIFY (sub_env%task_list_orb, sub_env%task_list_orb_soft)
393 NULLIFY (sub_env%task_list_aux_fit, sub_env%task_list_aux_fit_soft)
394 NULLIFY (sub_env%pw_env)
395 IF (sub_env%is_split) THEN
396 cpabort('Subsys option not available')
397 ELSE
398 CALL get_qs_env(qs_env, dbcsr_dist=sub_env%dbcsr_dist, sab_orb=sub_env%sab_orb)
399 END IF
400 ELSE
401 cpabort("Unknown kernel type")
402 END IF
403
404 CALL timestop(handle)
405
406 END SUBROUTINE tddfpt_sub_env_init
407
408! **************************************************************************************************
409!> \brief Release parallel group environment
410!> \param sub_env parallel group environment (modified on exit)
411!> \par History
412!> * 01.2017 created [Sergey Chulkov]
413! **************************************************************************************************
414 SUBROUTINE tddfpt_sub_env_release(sub_env)
415 TYPE(tddfpt_subgroup_env_type), INTENT(inout) :: sub_env
416
417 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_sub_env_release'
418
419 INTEGER :: handle, i
420
421 CALL timeset(routinen, handle)
422
423 IF (sub_env%is_mgrid) THEN
424 IF (ASSOCIATED(sub_env%task_list_aux_fit)) &
425 CALL deallocate_task_list(sub_env%task_list_aux_fit)
426
427 IF (ASSOCIATED(sub_env%task_list_aux_fit_soft)) &
428 CALL deallocate_task_list(sub_env%task_list_aux_fit_soft)
429
430 IF (ASSOCIATED(sub_env%task_list_orb)) &
431 CALL deallocate_task_list(sub_env%task_list_orb)
432
433 IF (ASSOCIATED(sub_env%task_list_orb_soft)) &
434 CALL deallocate_task_list(sub_env%task_list_orb_soft)
435
436 CALL release_neighbor_list_sets(sub_env%sab_aux_fit)
437 CALL release_neighbor_list_sets(sub_env%sab_orb)
438
439 IF (ASSOCIATED(sub_env%dbcsr_dist)) THEN
440 CALL dbcsr_distribution_release(sub_env%dbcsr_dist)
441 DEALLOCATE (sub_env%dbcsr_dist)
442 END IF
443
444 IF (ASSOCIATED(sub_env%dist_2d)) &
445 CALL distribution_2d_release(sub_env%dist_2d)
446 END IF
447
448 ! GAPW
449 IF (ASSOCIATED(sub_env%local_rho_set)) THEN
450 CALL local_rho_set_release(sub_env%local_rho_set)
451 END IF
452 IF (ASSOCIATED(sub_env%hartree_local)) THEN
453 CALL hartree_local_release(sub_env%hartree_local)
454 END IF
455 IF (ASSOCIATED(sub_env%local_rho_set_admm)) THEN
456 CALL local_rho_set_release(sub_env%local_rho_set_admm)
457 END IF
458
459 ! if TDDFPT-specific plane-wave environment has not been requested,
460 ! the pointers sub_env%dbcsr_dist, sub_env%sab_*, and sub_env%task_list_*
461 ! point to the corresponding ground-state variables from qs_env
462 ! and should not be deallocated
463
464 CALL pw_env_release(sub_env%pw_env)
465
466 sub_env%is_mgrid = .false.
467
468 IF (sub_env%is_split .AND. ASSOCIATED(sub_env%admm_A)) THEN
469 CALL cp_fm_release(sub_env%admm_A)
470 DEALLOCATE (sub_env%admm_A)
471 NULLIFY (sub_env%admm_A)
472 END IF
473
474 IF (sub_env%is_split) THEN
475 DO i = SIZE(sub_env%mos_occ), 1, -1
476 CALL cp_fm_release(sub_env%mos_occ(i))
477 END DO
478 END IF
479 DEALLOCATE (sub_env%mos_occ)
480
481 CALL cp_blacs_env_release(sub_env%blacs_env)
482 CALL mp_para_env_release(sub_env%para_env)
483
484 IF (ALLOCATED(sub_env%group_distribution)) &
485 DEALLOCATE (sub_env%group_distribution)
486
487 sub_env%is_split = .false.
488
489 CALL timestop(handle)
490
491 END SUBROUTINE tddfpt_sub_env_release
492
493! **************************************************************************************************
494!> \brief Replace the global multi-grid related parameters in qs_control by the ones given in the
495!> TDDFPT/MGRID subsection. The original parameters are stored into the 'mgrid_saved'
496!> variable.
497!> \param qs_control Quickstep control parameters (modified on exit)
498!> \param tddfpt_control TDDFPT control parameters
499!> \param mgrid_saved structure to hold global MGRID-related parameters (initialised on exit)
500!> \par History
501!> * 09.2016 created [Sergey Chulkov]
502!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
503!> \note the code to build the 'e_cutoff' list was taken from the subroutine read_mgrid_section()
504! **************************************************************************************************
505 SUBROUTINE init_tddfpt_mgrid(qs_control, tddfpt_control, mgrid_saved)
506 TYPE(qs_control_type), POINTER :: qs_control
507 TYPE(tddfpt2_control_type), POINTER :: tddfpt_control
508 TYPE(mgrid_saved_parameters), INTENT(out) :: mgrid_saved
509
510 CHARACTER(LEN=*), PARAMETER :: routinen = 'init_tddfpt_mgrid'
511
512 INTEGER :: handle, igrid, ngrids
513
514 CALL timeset(routinen, handle)
515
516 ! ++ save global plane-wave grid parameters to the variable 'mgrid_saved'
517 mgrid_saved%commensurate_mgrids = qs_control%commensurate_mgrids
518 mgrid_saved%realspace_mgrids = qs_control%realspace_mgrids
519 mgrid_saved%skip_load_balance = qs_control%skip_load_balance_distributed
520 mgrid_saved%cutoff = qs_control%cutoff
521 mgrid_saved%progression_factor = qs_control%progression_factor
522 mgrid_saved%relative_cutoff = qs_control%relative_cutoff
523 mgrid_saved%e_cutoff => qs_control%e_cutoff
524
525 ! ++ set parameters from 'tddfpt_control' as default ones for all newly allocated plane-wave grids
526 qs_control%commensurate_mgrids = tddfpt_control%mgrid_commensurate_mgrids
527 qs_control%realspace_mgrids = tddfpt_control%mgrid_realspace_mgrids
528 qs_control%skip_load_balance_distributed = tddfpt_control%mgrid_skip_load_balance
529 qs_control%cutoff = tddfpt_control%mgrid_cutoff
530 qs_control%progression_factor = tddfpt_control%mgrid_progression_factor
531 qs_control%relative_cutoff = tddfpt_control%mgrid_relative_cutoff
532
533 ALLOCATE (qs_control%e_cutoff(tddfpt_control%mgrid_ngrids))
534 ngrids = tddfpt_control%mgrid_ngrids
535 IF (ASSOCIATED(tddfpt_control%mgrid_e_cutoff)) THEN
536 ! following read_mgrid_section() there is a magic scale factor there (0.5_dp)
537 DO igrid = 1, ngrids
538 qs_control%e_cutoff(igrid) = tddfpt_control%mgrid_e_cutoff(igrid)*0.5_dp
539 END DO
540 ! ++ round 'qs_control%cutoff' upward to the nearest sub-grid's cutoff value;
541 ! here we take advantage of the fact that the array 'e_cutoff' has been sorted in descending order
542 DO igrid = ngrids, 1, -1
543 IF (qs_control%cutoff <= qs_control%e_cutoff(igrid)) THEN
544 qs_control%cutoff = qs_control%e_cutoff(igrid)
545 EXIT
546 END IF
547 END DO
548 ! igrid == 0 if qs_control%cutoff is larger than the largest manually provided cutoff value;
549 ! use the largest actual value
550 IF (igrid <= 0) &
551 qs_control%cutoff = qs_control%e_cutoff(1)
552 ELSE
553 qs_control%e_cutoff(1) = qs_control%cutoff
554 DO igrid = 2, ngrids
555 qs_control%e_cutoff(igrid) = qs_control%e_cutoff(igrid - 1)/qs_control%progression_factor
556 END DO
557 END IF
558
559 CALL timestop(handle)
560 END SUBROUTINE init_tddfpt_mgrid
561
562! **************************************************************************************************
563!> \brief Restore the global multi-grid related parameters stored in the 'mgrid_saved' variable.
564!> \param qs_control Quickstep control parameters (modified on exit)
565!> \param mgrid_saved structure that holds global MGRID-related parameters
566!> \par History
567!> * 09.2016 created [Sergey Chulkov]
568! **************************************************************************************************
569 SUBROUTINE restore_qs_mgrid(qs_control, mgrid_saved)
570 TYPE(qs_control_type), POINTER :: qs_control
571 TYPE(mgrid_saved_parameters), INTENT(in) :: mgrid_saved
572
573 CHARACTER(LEN=*), PARAMETER :: routinen = 'restore_qs_mgrid'
574
575 INTEGER :: handle
576
577 CALL timeset(routinen, handle)
578
579 IF (ASSOCIATED(qs_control%e_cutoff)) &
580 DEALLOCATE (qs_control%e_cutoff)
581
582 qs_control%commensurate_mgrids = mgrid_saved%commensurate_mgrids
583 qs_control%realspace_mgrids = mgrid_saved%realspace_mgrids
584 qs_control%skip_load_balance_distributed = mgrid_saved%skip_load_balance
585 qs_control%cutoff = mgrid_saved%cutoff
586 qs_control%progression_factor = mgrid_saved%progression_factor
587 qs_control%relative_cutoff = mgrid_saved%relative_cutoff
588 qs_control%e_cutoff => mgrid_saved%e_cutoff
589
590 CALL timestop(handle)
591 END SUBROUTINE restore_qs_mgrid
592
593! **************************************************************************************************
594!> \brief Distribute atoms across the two-dimensional grid of processors.
595!> \param distribution_2d new two-dimensional distribution of pairs of particles
596!> (allocated and initialised on exit)
597!> \param dbcsr_dist new DBCSR distribution (allocated and initialised on exit)
598!> \param blacs_env BLACS parallel environment
599!> \param qs_env Quickstep environment
600!> \par History
601!> * 09.2016 created [Sergey Chulkov]
602!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
603! **************************************************************************************************
604 SUBROUTINE tddfpt_build_distribution_2d(distribution_2d, dbcsr_dist, blacs_env, qs_env)
605 TYPE(distribution_2d_type), POINTER :: distribution_2d
606 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
607 TYPE(cp_blacs_env_type), POINTER :: blacs_env
608 TYPE(qs_environment_type), POINTER :: qs_env
609
610 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_build_distribution_2d'
611
612 INTEGER :: handle
613 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
614 TYPE(cell_type), POINTER :: cell
615 TYPE(molecule_kind_type), DIMENSION(:), POINTER :: molecule_kind_set
616 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
617 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
618 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
619 TYPE(section_vals_type), POINTER :: input
620
621 CALL timeset(routinen, handle)
622
623 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
624 molecule_kind_set=molecule_kind_set, molecule_set=molecule_set, &
625 particle_set=particle_set, qs_kind_set=qs_kind_set)
626
627 NULLIFY (distribution_2d)
628 CALL distribute_molecules_2d(cell=cell, &
629 atomic_kind_set=atomic_kind_set, &
630 particle_set=particle_set, &
631 qs_kind_set=qs_kind_set, &
632 molecule_kind_set=molecule_kind_set, &
633 molecule_set=molecule_set, &
634 distribution_2d=distribution_2d, &
635 blacs_env=blacs_env, &
636 force_env_section=input)
637
638 ALLOCATE (dbcsr_dist)
639 CALL cp_dbcsr_dist2d_to_dist(distribution_2d, dbcsr_dist)
640
641 CALL timestop(handle)
642 END SUBROUTINE tddfpt_build_distribution_2d
643
644! **************************************************************************************************
645!> \brief Build task and neighbour lists for the given plane wave environment and basis set.
646!> \param task_list new task list (allocated and initialised on exit)
647!> \param sab new list of neighbours (allocated and initialised on exit)
648!> \param basis_type type of the basis set
649!> \param distribution_2d two-dimensional distribution of pairs of particles
650!> \param pw_env plane wave environment
651!> \param qs_env Quickstep environment
652!> \param skip_load_balance do not perform load balancing
653!> \param reorder_grid_ranks re-optimise grid ranks and re-create the real-space grid descriptor
654!> as well as grids
655!> \par History
656!> * 09.2016 created [Sergey Chulkov]
657!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
658! **************************************************************************************************
659 SUBROUTINE tddfpt_build_tasklist(task_list, sab, basis_type, distribution_2d, pw_env, qs_env, &
660 skip_load_balance, reorder_grid_ranks)
661 TYPE(task_list_type), POINTER :: task_list
662 TYPE(neighbor_list_set_p_type), DIMENSION(:), &
663 POINTER :: sab
664 CHARACTER(len=*), INTENT(in) :: basis_type
665 TYPE(distribution_2d_type), POINTER :: distribution_2d
666 TYPE(pw_env_type), POINTER :: pw_env
667 TYPE(qs_environment_type), POINTER :: qs_env
668 LOGICAL, INTENT(in) :: skip_load_balance, reorder_grid_ranks
669
670 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_build_tasklist'
671
672 INTEGER :: handle, ikind, nkinds
673 LOGICAL, ALLOCATABLE, DIMENSION(:) :: orb_present
674 REAL(kind=dp) :: subcells
675 REAL(kind=dp), ALLOCATABLE, DIMENSION(:) :: orb_radius
676 REAL(kind=dp), ALLOCATABLE, DIMENSION(:, :) :: pair_radius
677 TYPE(atomic_kind_type), DIMENSION(:), POINTER :: atomic_kind_set
678 TYPE(cell_type), POINTER :: cell
679 TYPE(distribution_1d_type), POINTER :: local_particles
680 TYPE(gto_basis_set_type), POINTER :: orb_basis_set
681 TYPE(local_atoms_type), ALLOCATABLE, DIMENSION(:) :: atom2d
682 TYPE(molecule_type), DIMENSION(:), POINTER :: molecule_set
683 TYPE(particle_type), DIMENSION(:), POINTER :: particle_set
684 TYPE(qs_kind_type), DIMENSION(:), POINTER :: qs_kind_set
685 TYPE(qs_ks_env_type), POINTER :: ks_env
686 TYPE(section_vals_type), POINTER :: input
687
688 CALL timeset(routinen, handle)
689
690 CALL get_qs_env(qs_env, atomic_kind_set=atomic_kind_set, cell=cell, input=input, &
691 ks_env=ks_env, local_particles=local_particles, molecule_set=molecule_set, &
692 particle_set=particle_set, qs_kind_set=qs_kind_set)
693
694 nkinds = SIZE(atomic_kind_set)
695
696 IF (.NOT. (ASSOCIATED(sab))) THEN
697 ALLOCATE (atom2d(nkinds))
698 CALL atom2d_build(atom2d, local_particles, distribution_2d, atomic_kind_set, &
699 molecule_set, molecule_only=.false., particle_set=particle_set)
700
701 ALLOCATE (orb_present(nkinds))
702 ALLOCATE (orb_radius(nkinds))
703 ALLOCATE (pair_radius(nkinds, nkinds))
704
705 DO ikind = 1, nkinds
706 CALL get_qs_kind(qs_kind_set(ikind), basis_set=orb_basis_set, basis_type=basis_type)
707 IF (ASSOCIATED(orb_basis_set)) THEN
708 orb_present(ikind) = .true.
709 CALL get_gto_basis_set(gto_basis_set=orb_basis_set, kind_radius=orb_radius(ikind))
710 ELSE
711 orb_present(ikind) = .false.
712 orb_radius(ikind) = 0.0_dp
713 END IF
714 END DO
715
716 CALL pair_radius_setup(orb_present, orb_present, orb_radius, orb_radius, pair_radius)
717
718 NULLIFY (sab)
719 CALL section_vals_val_get(input, "DFT%SUBCELLS", r_val=subcells)
720 CALL build_neighbor_lists(sab, particle_set, atom2d, cell, pair_radius, &
721 mic=.false., subcells=subcells, molecular=.false., nlname="sab_orb")
722
723 CALL atom2d_cleanup(atom2d)
724 DEALLOCATE (atom2d, orb_present, orb_radius, pair_radius)
725 END IF
726
727 CALL allocate_task_list(task_list)
728 CALL generate_qs_task_list(ks_env, task_list, &
729 reorder_rs_grid_ranks=reorder_grid_ranks, basis_type=basis_type, &
730 skip_load_balance_distributed=skip_load_balance, &
731 pw_env_external=pw_env, sab_orb_external=sab)
732
733 CALL timestop(handle)
734 END SUBROUTINE tddfpt_build_tasklist
735
736! **************************************************************************************************
737!> \brief Create a DBCSR matrix based on a template matrix, distribution object, and the list of
738!> neighbours.
739!> \param matrix matrix to create
740!> \param template template matrix
741!> \param dbcsr_dist DBCSR distribution
742!> \param sab list of neighbours
743!> \par History
744!> * 09.2016 created [Sergey Chulkov]
745!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
746! **************************************************************************************************
747 SUBROUTINE tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
748 TYPE(dbcsr_type), POINTER :: matrix, template
749 TYPE(dbcsr_distribution_type), POINTER :: dbcsr_dist
750 TYPE(neighbor_list_set_p_type), DIMENSION(:), &
751 POINTER :: sab
752
753 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_dbcsr_create_by_dist'
754
755 CHARACTER :: matrix_type
756 CHARACTER(len=default_string_length) :: matrix_name
757 INTEGER :: handle
758 INTEGER, DIMENSION(:), POINTER :: col_blk_sizes, row_blk_sizes
759
760 CALL timeset(routinen, handle)
761
762 cpassert(ASSOCIATED(template))
763 CALL dbcsr_get_info(template, row_blk_size=row_blk_sizes, col_blk_size=col_blk_sizes, &
764 name=matrix_name, matrix_type=matrix_type)
765
766 IF (ASSOCIATED(matrix)) THEN
767 CALL dbcsr_release(matrix)
768 ELSE
769 ALLOCATE (matrix)
770 END IF
771
772 CALL dbcsr_create(matrix, matrix_name, dbcsr_dist, matrix_type, row_blk_sizes, col_blk_sizes)
773 CALL cp_dbcsr_alloc_block_from_nbl(matrix, sab)
774
775 CALL timestop(handle)
776
777 END SUBROUTINE tddfpt_dbcsr_create_by_dist
778
779! **************************************************************************************************
780!> \brief Replicate a globally distributed matrix across all sub-groups. At the end
781!> every sub-group will hold a local copy of the original globally distributed matrix.
782!>
783!> |--------------------|
784!> fm_src | 0 1 2 3 |
785!> |--------------------|
786!> / MPI ranks \
787!> |/_ _\|
788!> |--------------------| |--------------------|
789!> fm_dest_subgroup0 | 0 1 | | 2 3 | fm_dest_subgroup1
790!> |--------------------| |--------------------|
791!> subgroup 0 subgroup 1
792!>
793!> \param fm_src globally distributed matrix to replicate
794!> \param fm_dest_sub subgroup-specific copy of the replicated matrix
795!> \param sub_env subgroup environment
796!> \par History
797!> * 09.2016 created [Sergey Chulkov]
798!> * 01.2017 moved from qs_tddfpt2_methods [Sergey Chulkov]
799! **************************************************************************************************
800 SUBROUTINE tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
801 TYPE(cp_fm_type), INTENT(IN) :: fm_src, fm_dest_sub
802 TYPE(tddfpt_subgroup_env_type), INTENT(in) :: sub_env
803
804 CHARACTER(LEN=*), PARAMETER :: routinen = 'tddfpt_fm_replicate_across_subgroups'
805
806 INTEGER :: handle, igroup, igroup_local, ncols_global_dest, ncols_global_src, ngroups, &
807 nrows_global_dest, nrows_global_src
808 TYPE(cp_blacs_env_type), POINTER :: blacs_env_global
809 TYPE(cp_fm_type) :: fm_null
810 TYPE(mp_para_env_type), POINTER :: para_env_global
811
812 IF (sub_env%is_split) THEN
813 CALL timeset(routinen, handle)
814
815 CALL cp_fm_get_info(fm_src, nrow_global=nrows_global_src, ncol_global=ncols_global_src, &
816 context=blacs_env_global, para_env=para_env_global)
817 CALL cp_fm_get_info(fm_dest_sub, nrow_global=nrows_global_dest, ncol_global=ncols_global_dest)
818
819 IF (debug_this_module) THEN
820 cpassert(nrows_global_src == nrows_global_dest)
821 cpassert(ncols_global_src == ncols_global_dest)
822 END IF
823
824 igroup_local = sub_env%group_distribution(para_env_global%mepos)
825 ngroups = sub_env%ngroups
826
827 DO igroup = 0, ngroups - 1
828 IF (igroup == igroup_local) THEN
829 CALL cp_fm_copy_general(fm_src, fm_dest_sub, para_env_global)
830 ELSE
831 CALL cp_fm_copy_general(fm_src, fm_null, para_env_global)
832 END IF
833 END DO
834
835 CALL timestop(handle)
836 END IF
838END MODULE qs_tddfpt2_subgroups
839
Types and set/get functions for auxiliary density matrix methods.
Definition admm_types.F:15
subroutine, public get_admm_env(admm_env, mo_derivs_aux_fit, mos_aux_fit, sab_aux_fit, sab_aux_fit_asymm, sab_aux_fit_vs_orb, matrix_s_aux_fit, matrix_s_aux_fit_kp, matrix_s_aux_fit_vs_orb, matrix_s_aux_fit_vs_orb_kp, task_list_aux_fit, matrix_ks_aux_fit, matrix_ks_aux_fit_kp, matrix_ks_aux_fit_im, matrix_ks_aux_fit_dft, matrix_ks_aux_fit_hfx, matrix_ks_aux_fit_dft_kp, matrix_ks_aux_fit_hfx_kp, rho_aux_fit, rho_aux_fit_buffer, admm_dm)
Get routine for the ADMM env.
Definition admm_types.F:593
Define the atomic kind types and their sub types.
subroutine, public get_gto_basis_set(gto_basis_set, name, aliases, norm_type, kind_radius, ncgf, nset, nsgf, cgf_symbol, sgf_symbol, norm_cgf, set_radius, lmax, lmin, lx, ly, lz, m, ncgf_set, npgf, nsgf_set, nshell, cphi, pgf_radius, sphi, scon, zet, first_cgf, first_sgf, l, last_cgf, last_sgf, n, gcc, maxco, maxl, maxpgf, maxsgf_set, maxshell, maxso, nco_sum, npgf_sum, nshell_sum, maxder, short_kind_radius, npgf_seg_sum)
...
Handles all functions related to the CELL.
Definition cell_types.F:15
methods related to the blacs parallel environment
subroutine, public cp_blacs_env_release(blacs_env)
releases the given blacs_env
subroutine, public cp_blacs_env_create(blacs_env, para_env, blacs_grid_layout, blacs_repeatable, row_major, grid_2d)
allocates and initializes a type that represent a blacs context
Defines control structures, which contain the parameters and the settings for the DFT-based calculati...
subroutine, public dbcsr_distribution_release(dist)
...
subroutine, public dbcsr_get_info(matrix, nblkrows_total, nblkcols_total, nfullrows_total, nfullcols_total, nblkrows_local, nblkcols_local, nfullrows_local, nfullcols_local, my_prow, my_pcol, local_rows, local_cols, proc_row_dist, proc_col_dist, row_blk_size, col_blk_size, row_blk_offset, col_blk_offset, distribution, name, matrix_type, group)
...
subroutine, public dbcsr_release(matrix)
...
DBCSR operations in CP2K.
subroutine, public cp_dbcsr_dist2d_to_dist(dist2d, dist)
Creates a DBCSR distribution from a distribution_2d.
represent the structure of a full matrix
subroutine, public cp_fm_struct_create(fmstruct, para_env, context, nrow_global, ncol_global, nrow_block, ncol_block, descriptor, first_p_pos, local_leading_dimension, template_fmstruct, square_blocks, force_block)
allocates and initializes a full matrix structure
subroutine, public cp_fm_struct_release(fmstruct)
releases a full matrix structure
represent a full matrix distributed on many processors
Definition cp_fm_types.F:15
subroutine, public cp_fm_copy_general(source, destination, para_env)
General copy of a fm matrix to another fm matrix. Uses non-blocking MPI rather than ScaLAPACK.
subroutine, public cp_fm_get_info(matrix, name, nrow_global, ncol_global, nrow_block, ncol_block, nrow_local, ncol_local, row_indices, col_indices, local_data, context, nrow_locals, ncol_locals, matrix_struct, para_env)
returns all kind of information about the full matrix
subroutine, public cp_fm_create(matrix, matrix_struct, name, use_sp)
creates a new full matrix with the given structure
stores a lists of integer that are local to a processor. The idea is that these integers represent ob...
stores a mapping of 2D info (e.g. matrix) on a 2D processor distribution (i.e. blacs grid) where cpus...
subroutine, public distribution_2d_release(distribution_2d)
...
Distribution methods for atoms, particles, or molecules.
subroutine, public distribute_molecules_2d(cell, atomic_kind_set, particle_set, qs_kind_set, molecule_kind_set, molecule_set, distribution_2d, blacs_env, force_env_section)
Distributes the particle pairs creating a 2d distribution optimally suited for quickstep.
subroutine, public init_coulomb_local(hartree_local, natom)
...
subroutine, public hartree_local_release(hartree_local)
...
subroutine, public hartree_local_create(hartree_local)
...
collects all constants needed in input so that they can be used without circular dependencies
integer, parameter, public tddfpt_kernel_none
integer, parameter, public tddfpt_kernel_full
integer, parameter, public tddfpt_kernel_stda
objects that represent the structure of input sections and the data contained in an input section
subroutine, public section_vals_val_get(section_vals, keyword_name, i_rep_section, i_rep_val, n_rep_val, val, l_val, i_val, r_val, c_val, l_vals, i_vals, r_vals, c_vals, explicit)
returns the requested value
Defines the basic variable types.
Definition kinds.F:23
integer, parameter, public dp
Definition kinds.F:34
integer, parameter, public default_string_length
Definition kinds.F:57
Interface to the message passing library MPI.
subroutine, public mp_para_env_release(para_env)
releases the para object (to be called when you don't want anymore the shared copy of this object)
Define the molecule kind structure types and the corresponding functionality.
Define the data structure for the molecule information.
Define the data structure for the particle information.
methods of pw_env that have dependence on qs_env
subroutine, public pw_env_rebuild(pw_env, qs_env, external_para_env)
rebuilds the pw_env data (necessary if cell or cutoffs change)
subroutine, public pw_env_create(pw_env)
creates a pw_env, if qs_env is given calls pw_env_rebuild
container for various plainwaves related things
subroutine, public pw_env_retain(pw_env)
retains the pw_env (see doc/ReferenceCounting.html)
subroutine, public pw_env_release(pw_env, para_env)
releases the given pw_env (see doc/ReferenceCounting.html)
subroutine, public get_qs_env(qs_env, atomic_kind_set, qs_kind_set, cell, super_cell, cell_ref, use_ref_cell, kpoints, dft_control, mos, sab_orb, sab_all, qmmm, qmmm_periodic, sac_ae, sac_ppl, sac_lri, sap_ppnl, sab_vdw, sab_scp, sap_oce, sab_lrc, sab_se, sab_xtbe, sab_tbe, sab_core, sab_xb, sab_xtb_pp, sab_xtb_nonbond, sab_almo, sab_kp, sab_kp_nosym, particle_set, energy, force, matrix_h, matrix_h_im, matrix_ks, matrix_ks_im, matrix_vxc, run_rtp, rtp, matrix_h_kp, matrix_h_im_kp, matrix_ks_kp, matrix_ks_im_kp, matrix_vxc_kp, kinetic_kp, matrix_s_kp, matrix_w_kp, matrix_s_ri_aux_kp, matrix_s, matrix_s_ri_aux, matrix_w, matrix_p_mp2, matrix_p_mp2_admm, rho, rho_xc, pw_env, ewald_env, ewald_pw, active_space, mpools, input, para_env, blacs_env, scf_control, rel_control, kinetic, qs_charges, vppl, rho_core, rho_nlcc, rho_nlcc_g, ks_env, ks_qmmm_env, wf_history, scf_env, local_particles, local_molecules, distribution_2d, dbcsr_dist, molecule_kind_set, molecule_set, subsys, cp_subsys, oce, local_rho_set, rho_atom_set, task_list, task_list_soft, rho0_atom_set, rho0_mpole, rhoz_set, ecoul_1c, rho0_s_rs, rho0_s_gs, do_kpoints, has_unit_metric, requires_mo_derivs, mo_derivs, mo_loc_history, nkind, natom, nelectron_total, nelectron_spin, efield, neighbor_list_id, linres_control, xas_env, virial, cp_ddapc_env, cp_ddapc_ewald, outer_scf_history, outer_scf_ihistory, x_data, et_coupling, dftb_potential, results, se_taper, se_store_int_env, se_nddo_mpole, se_nonbond_env, admm_env, lri_env, lri_density, exstate_env, ec_env, harris_env, dispersion_env, gcp_env, vee, rho_external, external_vxc, mask, mp2_env, bs_env, kg_env, wanniercentres, atprop, ls_scf_env, do_transport, transport_env, v_hartree_rspace, s_mstruct_changed, rho_changed, potential_changed, forces_up_to_date, mscfg_env, almo_scf_env, gradient_history, variable_history, embed_pot, spin_embed_pot, polar_env, mos_last_converged, eeq, rhs)
Get the QUICKSTEP environment.
Define the quickstep kind type and their sub types.
subroutine, public get_qs_kind(qs_kind, basis_set, basis_type, ncgf, nsgf, all_potential, tnadd_potential, gth_potential, sgp_potential, upf_potential, se_parameter, dftb_parameter, xtb_parameter, dftb3_param, zatom, zeff, elec_conf, mao, lmax_dftb, alpha_core_charge, ccore_charge, core_charge, core_charge_radius, paw_proj_set, paw_atom, hard_radius, hard0_radius, max_rad_local, covalent_radius, vdw_radius, gpw_type_forced, harmonics, max_iso_not0, max_s_harm, grid_atom, ngrid_ang, ngrid_rad, lmax_rho0, dft_plus_u_atom, l_of_dft_plus_u, n_of_dft_plus_u, u_minus_j, u_of_dft_plus_u, j_of_dft_plus_u, alpha_of_dft_plus_u, beta_of_dft_plus_u, j0_of_dft_plus_u, occupation_of_dft_plus_u, dispersion, bs_occupation, magnetization, no_optimize, addel, laddel, naddel, orbitals, max_scf, eps_scf, smear, u_ramping, u_minus_j_target, eps_u_ramping, init_u_ramping_each_scf, reltmat, ghost, floating, name, element_symbol, pao_basis_size, pao_model_file, pao_potentials, pao_descriptors, nelec)
Get attributes of an atomic kind.
subroutine, public local_rho_set_create(local_rho_set)
...
subroutine, public local_rho_set_release(local_rho_set)
...
Define the neighbor list data types and the corresponding functionality.
subroutine, public release_neighbor_list_sets(nlists)
releases an array of neighbor_list_sets
Generate the atomic neighbor lists.
subroutine, public atom2d_cleanup(atom2d)
free the internals of atom2d
subroutine, public pair_radius_setup(present_a, present_b, radius_a, radius_b, pair_radius, prmin)
...
subroutine, public build_neighbor_lists(ab_list, particle_set, atom, cell, pair_radius, subcells, mic, symmetric, molecular, subset_of_mol, current_subset, operator_type, nlname, atomb_to_keep)
Build simple pair neighbor lists.
subroutine, public atom2d_build(atom2d, distribution_1d, distribution_2d, atomic_kind_set, molecule_set, molecule_only, particle_set)
Build some distribution structure of atoms, refactored from build_qs_neighbor_lists.
subroutine, public rho0_s_grid_create(pw_env, rho0_mpole)
...
subroutine, public init_rho0(local_rho_set, qs_env, gapw_control, zcore)
...
subroutine, public allocate_rho_atom_internals(rho_atom_set, atomic_kind_set, qs_kind_set, dft_control, para_env)
...
subroutine, public tddfpt_sub_env_init(sub_env, qs_env, mos_occ, kernel)
Split MPI communicator to create a set of parallel (sub)groups.
subroutine, public tddfpt_sub_env_release(sub_env)
Release parallel group environment.
subroutine, public tddfpt_fm_replicate_across_subgroups(fm_src, fm_dest_sub, sub_env)
Replicate a globally distributed matrix across all sub-groups. At the end every sub-group will hold a...
subroutine, public tddfpt_dbcsr_create_by_dist(matrix, template, dbcsr_dist, sab)
Create a DBCSR matrix based on a template matrix, distribution object, and the list of neighbours.
generate the tasks lists used by collocate and integrate routines
subroutine, public generate_qs_task_list(ks_env, task_list, reorder_rs_grid_ranks, skip_load_balance_distributed, soft_valid, basis_type, pw_env_external, sab_orb_external)
...
types for task lists
subroutine, public deallocate_task_list(task_list)
deallocates the components and the object itself
subroutine, public allocate_task_list(task_list)
allocates and initialised the components of the task_list_type
stores some data used in wavefunction fitting
Definition admm_types.F:120
Provides all information about an atomic kind.
Type defining parameters related to the simulation cell.
Definition cell_types.F:55
represent a blacs multidimensional parallel environment (for the mpi corrispective see cp_paratypes/m...
keeps the information about the structure of a full matrix
represent a full matrix
structure to store local (to a processor) ordered lists of integers.
distributes pairs on a 2d grid of processors
stores all the informations relevant to an mpi environment
contained for different pw related things
Provides all information about a quickstep kind.
calculation environment to calculate the ks matrix, holds all the needed vars. assumes that the core ...