DMRGBlockContainer.hpp
1 #ifndef __DMRG_BLOCK_CONTAINER_HPP__
2 #define __DMRG_BLOCK_CONTAINER_HPP__
3 
11 #include <petsctime.h>
12 #include <slepceps.h>
13 #include <petscmat.h>
14 #include <vector>
15 #include <map>
16 #include <set>
17 #include <iostream>
18 #include <sstream>
19 #include <fstream>
20 #include <iomanip>
21 #include <string>
22 #include "DMRGKron.hpp"
23 #include "MiscTools.hpp"
24 
26 PETSC_EXTERN PetscErrorCode Makedir(
27  const std::string& dir_name
28  );
29 
31 #define CheckInitialization(init,mpi_comm) \
32 {\
33  if(!init) SETERRQ(mpi_comm,1,"DMRGBlockContainer object not initialized."\
34  "Call Initialize() first.");\
35 }
36 
38 #define PrintLines() \
39 { \
40  printf("-----------------------------------------\n"); \
41 }
42 
44 #define PRINTLINES() \
45 { \
46  printf("=========================================\n"); \
47 }
48 
50 #define PrintBlocks(LEFT,RIGHT) \
51 { \
52  printf(" [%lld]-* *-[%lld]\n", LLD(LEFT), LLD(RIGHT)); \
53 }
54 
56 #define ASSIGN_VAR(MAP,VAR) \
57 { \
58  const auto it = MAP.find(#VAR); \
59  if(it!=MAP.end()) { \
60  VAR = it->second; \
61  } \
62  else \
63  SETERRQ2(PETSC_COMM_SELF,1,"Key %s not found in %s",#VAR,#MAP); \
64 }
65 
67 #define PRINT_VAR(VAR) \
68  std::cout << " " << std::setw(20) << #VAR << " = " << VAR << std::endl;
69 
71 #define OpIdxToStr(OPTYPE,IDX) \
72  (OpToStr(OPTYPE)+std::to_string(IDX))
73 
75 #define GetOpMats(OPMATS,CORRSIDEOPS,OPID) \
76  (OPMATS.at(OpIdxToStr(CORRSIDEOPS.at(OPID).OpType,CORRSIDEOPS.at(OPID).idx)))
77 
79 #define MAX_SWEEP_IDX 10000
80 
82 struct Eigen_t
83 {
84  PetscScalar eigval;
85  PetscInt seqIdx;
86  PetscInt epsIdx;
87  PetscInt blkIdx;
88 };
89 
91 bool greater_eigval(const Eigen_t &e1, const Eigen_t &e2) { return e1.eigval > e2.eigval; }
92 
94 bool less_blkIdx(const Eigen_t &e1, const Eigen_t &e2) { return e1.blkIdx < e2.blkIdx; }
95 
97 struct Op {
99  PetscInt idx;
102  PetscErrorCode PrintInfo() const {
103  std::cout << " Op" << OpIdxToStr(OpType, idx) << std::endl;
104  return(0);
105  }
106 };
107 
166 template<class Block, class Hamiltonian> class DMRGBlockContainer
167 {
168 
169 private:
170 
172  typedef enum
173  {
174  BlockSys = 0,
175  BlockEnv = 1
176  }
177  Block_t;
178 
180  typedef enum
181  {
182  WarmupStep,
183  SweepStep,
184  NullStep=-1
185  }
186  Step_t;
187 
189  struct StepData
190  {
191  PetscInt NumSites_Sys;
192  PetscInt NumSites_Env;
193  PetscInt NumSites_SysEnl;
194  PetscInt NumSites_EnvEnl;
195  PetscInt NumStates_Sys;
196  PetscInt NumStates_Env;
197  PetscInt NumStates_SysEnl;
198  PetscInt NumStates_EnvEnl;
199  PetscInt NumStates_SysRot;
200  PetscInt NumStates_EnvRot;
201  PetscInt NumStates_H;
202  PetscScalar GSEnergy;
203  PetscReal TruncErr_Sys;
204  PetscReal TruncErr_Env;
205  };
206 
208  struct TimingsData
209  {
210  PetscLogDouble tEnlr;
211  PetscLogDouble tKron;
212  PetscLogDouble tDiag;
213  PetscLogDouble tRdms;
214  PetscLogDouble tRotb;
215  PetscLogDouble Total;
216  };
217 
220  struct Correlator
221  {
222  PetscInt idx;
223  std::vector< Op > SysOps;
224  std::vector< Op > EnvOps;
225  std::string name;
226  std::string desc1;
227  std::string desc2;
228  std::string desc3;
231  PetscErrorCode PrintInfo() const {
232  std::cout << " Correlator " << idx << ": " << name << std::endl;
233  std::cout << " " << desc1 << std::endl;
234  std::cout << " " << desc2 << std::endl;
235  std::cout << " " << desc3 << std::endl;
236  return(0);
237  }
238  };
239 
242  {
243  std::map< PetscInt, Mat > rdmd_list;
244  Mat RotMatT;
246  PetscReal TruncErr;
250  {
251  PetscErrorCode ierr = 0;
252  for(auto& rdmd: rdmd_list){
253  ierr = MatDestroy(&(rdmd.second)); CPP_CHKERR(ierr);
254  }
255  ierr = MatDestroy(&RotMatT); CPP_CHKERR(ierr);
256  }
257  };
258 
259 public:
260 
262  explicit DMRGBlockContainer(const MPI_Comm& mpi_comm): mpi_comm(mpi_comm){}
263 
266  {
267  PetscErrorCode ierr = Destroy(); CPP_CHKERR(ierr);
268  }
269 
272  PetscErrorCode Initialize()
273  {
274  if(init) SETERRQ(mpi_comm,1,"DMRG object has already been initialized.");
275  PetscErrorCode ierr = 0;
276  char path[PETSC_MAX_PATH_LEN];
277 
278  /* Get MPI attributes */
279  ierr = MPI_Comm_size(mpi_comm, &mpi_size); CHKERRQ(ierr);
280  ierr = MPI_Comm_rank(mpi_comm, &mpi_rank); CHKERRQ(ierr);
281 
282  /* Whether to do a checkpoint restart.
283  The path to be provided should point to the directory of the
284  previous run. */
285  ierr = PetscOptionsGetString(NULL,NULL,"-restart_dir",path,PETSC_MAX_PATH_LEN,&restart); CHKERRQ(ierr);
286  ierr = PetscOptionsGetBool(NULL,NULL,"-restart_options",&restart_options,NULL); CHKERRQ(ierr);
287  if(restart)
288  {
289  restart_dir = std::string(path);
290  if(restart_dir.back()!='/') restart_dir += "/";
291 
292  /* Verify the existence of restart_sweep_file */
293  PetscInt ridx; /* Index of the sweep containing the restart */
294  if(!mpi_rank) {
295  PetscBool flg;
296  std::string file;
297 
298  /* Find the first Sweep folder */
299  ridx = 0;
300  flg = PETSC_TRUE;
301  while(true && ridx < MAX_SWEEP_IDX) {
302  file = restart_dir + SweepDir(ridx);
303  ierr = PetscTestDirectory(file.c_str(), 'r', &flg); CHKERRQ(ierr);
304  if(flg) break;
305  else ridx++;
306  if(ridx == MAX_SWEEP_IDX) break;
307  }
308 
309  if(ridx==MAX_SWEEP_IDX)
310  SETERRQ1(PETSC_COMM_SELF,1,"No Sweep directory was found in %s",
311  restart_dir.c_str());
312 
313  /* Find the last Sweep folder with a valid Sweep.dat file */
314  while(true && ridx < MAX_SWEEP_IDX) {
315  file = restart_dir + SweepDir(ridx+1) + "Sweep.dat";
316  ierr = PetscTestFile(file.c_str(), 'r', &flg); CHKERRQ(ierr);
317  if(flg) ridx++;
318  else break;
319  }
320  }
321  ierr = MPI_Bcast(&ridx, 1, MPIU_INT, 0, mpi_comm); CHKERRQ(ierr);
322 
323  /* Append the correct sweep directory to restart dir */
324  restart_dir = restart_dir + SweepDir(ridx);
325 
326  /* Restart the Hamiltonian */
327  ierr = SetOptionsFromFile(mpi_comm, restart_dir + "Hamiltonian.dat"); CHKERRQ(ierr);
328 
329  /* Read Sweep.dat */
330  std::string filename = restart_dir + "Sweep.dat";
331  ierr = RetrieveInfoFile<PetscInt>(mpi_comm,filename,restart_sweep_dict); CHKERRQ(ierr);
332 
333  /* Assign the contents of Sweep.dat */
334  ASSIGN_VAR(restart_sweep_dict, GlobIdx ); /* 1 */
335  ASSIGN_VAR(restart_sweep_dict, LoopIdx ); /* 2 */
336  ASSIGN_VAR(restart_sweep_dict, num_sys_blocks ); /* 3 */
337  ASSIGN_VAR(restart_sweep_dict, num_env_blocks ); /* 4 */
338  ASSIGN_VAR(restart_sweep_dict, sys_ninit ); /* 5 */
339  ASSIGN_VAR(restart_sweep_dict, env_ninit ); /* 6 */
340 
341  /* Since the value was saved before getting incremented: */
342  ++LoopIdx;
343 
344  if(!mpi_rank){
345  PrintLines();
346  std::cout << "WARNING:\nThe following variables have been"
347  << " forcefully set:" << std::endl;
348  PRINT_VAR(GlobIdx ); /* 1 */
349  PRINT_VAR(LoopIdx ); /* 2 */
350  PRINT_VAR(num_sys_blocks ); /* 3 */
351  PRINT_VAR(num_env_blocks ); /* 4 */
352  PRINT_VAR(sys_ninit ); /* 5 */
353  PRINT_VAR(env_ninit ); /* 6 */
354  }
355  }
356 
357  if(restart && restart_options) {
358  ierr = SetOptionsFromFile(mpi_comm, restart_dir + "PetscOptions.dat"); CHKERRQ(ierr);
359  if(!mpi_rank) std::cout << "since the -restart_options flag was enabled." << std::endl;
360 
361  }
362 
363  /* Initialize Hamiltonian object */
364  ierr = Ham.SetFromOptions(); CHKERRQ(ierr);
365 
366  /* Initialize SingleSite which is used as added site */
367  ierr = SingleSite.Initialize(mpi_comm, 1, PETSC_DEFAULT); CHKERRQ(ierr);
368 
369  num_sites = Ham.NumSites();
370 
371  if((num_sites) < 2) SETERRQ1(mpi_comm,1,"There must be at least two total sites. Got %lld.", LLD(num_sites));
372  if((num_sites) % 2) SETERRQ1(mpi_comm,1,"Total number of sites must be even. Got %lld.", LLD(num_sites));
373 
374  /* Get some info from command line */
375  ierr = PetscOptionsGetBool(NULL,NULL,"-verbose",&verbose,NULL); CHKERRQ(ierr);
376  ierr = PetscOptionsGetBool(NULL,NULL,"-no_symm",&no_symm,NULL); CHKERRQ(ierr);
377  ierr = PetscOptionsGetBool(NULL,NULL,"-do_shell",&do_shell,NULL); CHKERRQ(ierr);
378  ierr = PetscOptionsGetBool(NULL,NULL,"-dry_run",&dry_run,NULL); CHKERRQ(ierr);
379  ierr = PetscOptionsGetReal(NULL,NULL,"-qn_sector",&qn_sector,NULL); CHKERRQ(ierr);
380 
381  /* Setup the scratch space to save temporary data*/
382  PetscBool opt_do_scratch_dir = PETSC_FALSE;
383  ierr = PetscOptionsGetString(NULL,NULL,"-scratch_dir",path,PETSC_MAX_PATH_LEN,&opt_do_scratch_dir); CHKERRQ(ierr);
384  if(opt_do_scratch_dir){
385  scratch_dir = std::string(path);
386  if(scratch_dir.back()!='/') scratch_dir += '/';
387  } else {
388  scratch_dir = "./scratch_dir/";
389  if(!mpi_rank){
390  ierr = Makedir(scratch_dir); CHKERRQ(ierr);
391  }
392  }
393  /* NOTE: Scratch dir is mandatory */
394  do_scratch_dir = PETSC_TRUE;
395  // ierr = PetscOptionsGetBool(NULL,NULL,"-do_scratch_dir",&do_scratch_dir,NULL); CHKERRQ(ierr);
396 
397  /* Setup the location to save basic data */
398  std::string data_dir;
399  PetscBool opt_data_dir = PETSC_FALSE;
400  ierr = PetscOptionsGetString(NULL,NULL,"-data_dir",path,PETSC_MAX_PATH_LEN,&opt_data_dir); CHKERRQ(ierr);
401  if(opt_data_dir){
402  data_dir = std::string(path);
403  if(data_dir.back()!='/') data_dir += '/';
404  } else {
405  data_dir = "./data_dir/";
406  if(!mpi_rank){
407  ierr = Makedir(data_dir); CHKERRQ(ierr);
408  }
409  }
410  ierr = PetscFOpen(mpi_comm, (data_dir+std::string("DMRGSteps.json")).c_str(), "w", &fp_step); CHKERRQ(ierr);
411  ierr = SaveStepHeaders(); CHKERRQ(ierr);
412  if(!mpi_rank) fprintf(fp_step,"[\n");
413 
414  ierr = PetscFOpen(mpi_comm, (data_dir+std::string("Timings.json")).c_str(), "w", &fp_timings); CHKERRQ(ierr);
415  ierr = SaveTimingsHeaders(); CHKERRQ(ierr);
416  if(!mpi_rank) fprintf(fp_timings,"[\n");
417 
418  ierr = PetscFOpen(mpi_comm, (data_dir+std::string("EntanglementSpectra.json")).c_str(), "w", &fp_entanglement); CHKERRQ(ierr);
419  if(!mpi_rank) fprintf(fp_entanglement,"[\n");
420 
421  ierr = PetscFOpen(mpi_comm, (data_dir+std::string("DMRGRun.json")).c_str(), "w", &fp_data); CHKERRQ(ierr);
422  if(!mpi_rank){
423  fprintf(fp_data,"{\n");
424  Ham.SaveOut(fp_data);
425  fprintf(fp_data,",\n");
426  fprintf(fp_data," \"QNSector\": %g", qn_sector);
427  fflush(fp_data);
428  }
429 
430  ierr = PetscFOpen(mpi_comm, (data_dir+std::string("Correlations.json")).c_str(), "w", &fp_corr); CHKERRQ(ierr);
431 
432  /* do_save_prealloc: default value is FALSE */
433  ierr = PetscOptionsGetBool(NULL,NULL,"-do_save_prealloc",&do_save_prealloc,NULL); CHKERRQ(ierr);
434  if(do_save_prealloc){
435  ierr = PetscFOpen(mpi_comm, (data_dir+std::string("HamiltonianPrealloc.json")).c_str(), "w", &fp_prealloc); CHKERRQ(ierr);
436  if(!mpi_rank) fprintf(fp_prealloc,"[\n");
437  }
438 
439  /* Print some info to stdout */
440  if(!mpi_rank){
441  printf( "=========================================\n"
442  "DENSITY MATRIX RENORMALIZATION GROUP\n"
443  "-----------------------------------------\n");
444  Ham.PrintOut();
445  printf( "-----------------------------------------\n");
446  printf( "DIRECTORIES\n");
447  if(do_scratch_dir) printf(
448  " Scratch: %s\n", scratch_dir.c_str());
449  printf( " Data: %s\n", data_dir.c_str());
450  if(restart) printf(
451  " Restart: %s\n", restart_dir.c_str());
452  printf( "=========================================\n");
453  }
454 
455  /* Setup the modes for performing the warmup and sweeps */
456  {
457  PetscBool opt_mstates = PETSC_FALSE,
458  opt_mwarmup = PETSC_FALSE,
459  opt_nsweeps = PETSC_FALSE,
460  opt_msweeps = PETSC_FALSE,
461  opt_maxnsweeps = PETSC_FALSE;
462  PetscInt mstates, num_msweeps=1000;
463  msweeps.resize(num_msweeps);
464 
465  ierr = PetscOptionsGetInt(NULL,NULL,"-mstates",&mstates,&opt_mstates); CHKERRQ(ierr);
466  ierr = PetscOptionsGetInt(NULL,NULL,"-mwarmup",&mwarmup,&opt_mwarmup); CHKERRQ(ierr);
467  ierr = PetscOptionsGetInt(NULL,NULL,"-nsweeps",&nsweeps,&opt_nsweeps); CHKERRQ(ierr);
468  ierr = PetscOptionsGetIntArray(NULL,NULL,"-msweeps",&msweeps.at(0),&num_msweeps,&opt_msweeps); CHKERRQ(ierr);
469  msweeps.resize(num_msweeps);
470 
471  PetscInt num_maxnsweeps = 1000;
472  maxnsweeps.resize(num_maxnsweeps);
473  ierr = PetscOptionsGetIntArray(NULL,NULL,"-maxnsweeps",&maxnsweeps.at(0),&num_maxnsweeps,&opt_maxnsweeps); CHKERRQ(ierr);
474  maxnsweeps.resize(num_maxnsweeps);
475 
481  // if(!opt_mstates && !opt_mwarmup)
482  // SETERRQ(mpi_comm,1,"Either -mstates or -mwarmup has to be specified.");
483 
487  if(opt_mstates && !opt_mwarmup)
488  mwarmup = mstates;
489 
493  if(opt_nsweeps && opt_msweeps)
494  SETERRQ(mpi_comm,1,"-msweeps and -nsweeps cannot both be specified at the same time.");
495 
499  if(opt_nsweeps && opt_msweeps)
500  SETERRQ(mpi_comm,1,"-nsweeps and -maxnsweeps cannot both be specified at the same time.");
501 
505  if(opt_maxnsweeps && (num_maxnsweeps != num_msweeps))
506  SETERRQ2(mpi_comm,1,"-msweeps and -maxnsweeps must have the same number of items. "
507  "Got %lld and %lld, respectively.", num_msweeps, num_maxnsweeps);
508 
514  if(opt_nsweeps && !opt_msweeps)
515  {
516  sweep_mode = SWEEP_MODE_NSWEEPS;
517  }
518  else if(opt_msweeps && !opt_nsweeps)
519  {
520  if(opt_maxnsweeps)
521  {
525  sweep_mode = SWEEP_MODE_TOLERANCE_TEST;
526  }
527  else
528  {
532  sweep_mode = SWEEP_MODE_MSWEEPS;
533  }
534  }
535  else if(!opt_msweeps && !opt_nsweeps)
536  {
537  sweep_mode = SWEEP_MODE_NULL;
538  }
539  else
540  {
541  SETERRQ(mpi_comm,1,"Invalid parameters specified for choosing sweep mode.");
542  }
543 
544  /* printout some info on warmup */
545  if(!mpi_rank && !restart){
546  std::cout
547  << "WARMUP\n"
548  << " NumStates to keep: " << mwarmup << "\n";
549  }
550 
551  /* print some info on sweeps */
552  if(!mpi_rank)
553  {
554  std::cout
555  << "SWEEP\n"
556  << " Sweep mode: " << SweepModeToString.at(sweep_mode)
557  << std::endl;
558 
559  if(sweep_mode==SWEEP_MODE_NSWEEPS)
560  {
561  std::cout << " Number of sweeps: " << nsweeps << std::endl;
562  }
563  else if(sweep_mode==SWEEP_MODE_MSWEEPS)
564  {
565  std::cout << " NumStates to keep: ";
566  for(const PetscInt& mstates: msweeps) std::cout << " " << mstates;
567  std::cout << std::endl;
568  }
569  else if(sweep_mode==SWEEP_MODE_TOLERANCE_TEST)
570  {
571  std::cout << " NumStates to keep, maxiter: ";
572  for(size_t i = 0; i < msweeps.size(); ++i)
573  std::cout << " (" << msweeps.at(i) << "," << maxnsweeps.at(i) << ")";
574  std::cout << std::endl;
575  }
576  else if(sweep_mode==SWEEP_MODE_NULL)
577  {
578 
579  }
580  PRINTLINES();
581  }
582  }
583 
584  LoopType = WarmupStep; /*??????*/
585  init = PETSC_TRUE;
586  return(0);
587  }
588 
590  PetscErrorCode Destroy()
591  {
592  if(!init) return(0);
593  PetscInt ierr = 0;
594  ierr = SingleSite.Destroy(); CHKERRQ(ierr);
595  for(Block blk: sys_blocks) { ierr = blk.Destroy(); CHKERRQ(ierr); }
596  for(Block blk: env_blocks) { ierr = blk.Destroy(); CHKERRQ(ierr); }
597 
598  if(!mpi_rank && !data_tabular) fprintf(fp_step,"\n]\n");
599  if(!mpi_rank && data_tabular) fprintf(fp_step,"\n ]\n}\n");
600  ierr = PetscFClose(mpi_comm, fp_step); CHKERRQ(ierr);
601 
602  if(!mpi_rank && !data_tabular) fprintf(fp_timings,"\n]\n");
603  if(!mpi_rank && data_tabular) fprintf(fp_timings,"\n ]\n}\n");
604  ierr = PetscFClose(mpi_comm, fp_timings); CHKERRQ(ierr);
605 
606  if(!mpi_rank) fprintf(fp_entanglement,"\n]\n");
607  ierr = PetscFClose(mpi_comm, fp_entanglement); CHKERRQ(ierr);
608 
609  ierr = SaveLoopsData();
610  if(!mpi_rank) fprintf(fp_data,"\n}\n");
611  ierr = PetscFClose(mpi_comm, fp_data); CHKERRQ(ierr);
612 
613  if(!mpi_rank) fprintf(fp_corr,"\n ]\n}\n");
614  ierr = PetscFClose(mpi_comm, fp_corr); CHKERRQ(ierr);
615 
616  if(do_save_prealloc){
617  if(!mpi_rank) fprintf(fp_prealloc,"\n]\n");
618  ierr = PetscFClose(mpi_comm, fp_prealloc); CHKERRQ(ierr);
619  }
620 
621  init = PETSC_FALSE;
622  return(0);
623  }
624 
627  PetscErrorCode SetUpCorrelation(
628  const std::vector< Op >& OpList,
629  const std::string& name,
630  const std::string& desc
631  )
632  {
633  CheckInitialization(init,mpi_comm);
634 
635  /* Verify that the function is called before warm up and after initialization */
636  if(LoopType==SweepStep)
637  SETERRQ(mpi_comm,1,"Setup correlation functions should be called before starting the sweeps.");
638 
639  /* Generate the measurement object */
640  Correlator m;
641  m.idx = measurements.size();
642  m.name = name;
643  m.desc1 = desc;
644  m.desc2 += "< ";
645  for(const Op& op: OpList) m.desc2 += OpToStr(op.OpType) + "_{" + std::to_string(op.idx) + "} ";
646  m.desc2 += ">";
647 
648  /* Convert the input operators list into a measurement struct with site numbering according to blocks */
649  for(const Op& op: OpList){
650  if(0 <= op.idx && op.idx < num_sites/2){
651  m.SysOps.push_back(op);
652  }
653  else if(num_sites/2 <= op.idx && op.idx < num_sites ){
654  m.EnvOps.push_back({op.OpType, num_sites - 1 - op.idx});
655  }
656  else {
657  SETERRQ2(mpi_comm,1,"Operator index must be in the range [0,%D). Got %D.", num_sites, op.idx);
658  }
659  }
660 
661  /* Reflection symmetry: if the resulting SysOps is empty, swap with EnvOps */
662  if(m.SysOps.empty())
663  {
664  m.SysOps = m.EnvOps;
665  m.EnvOps.clear();
666  }
667 
668  /* Also printout the description in terms of local block indices */
669  m.desc3 += "< ( ";
670  for(const Op& op: m.SysOps) m.desc3 += OpToStr(op.OpType) + "_{" + std::to_string(op.idx) + "} ";
671  if(m.SysOps.size()==0) m.desc3 += "1 ";
672  m.desc3 += ") ⊗ ( ";
673  for(const Op& op: m.EnvOps) m.desc3 += OpToStr(op.OpType) + "_{" + std::to_string(op.idx) + "} ";
674  if(m.EnvOps.size()==0) m.desc3 += "1 ";
675  m.desc3 += ") >";
676 
677  /* Printout some information */
678  // if(!mpi_rank) m.PrintInfo();
679 
680  measurements.push_back(m);
681  return(0);
682  }
683 
687  PetscErrorCode Warmup()
688  {
689  CheckInitialization(init,mpi_comm);
690  if(dry_run) return(0);
691 
692  if(mwarmup==0 && !restart) {
693  if(!mpi_rank) std::cout
694  << "WARNING: Nothing left to do since mwarmup is zero." << std::endl;
695  return(0);
696  }
697 
698  PetscErrorCode ierr = 0;
699 
700  /* Initialize timings */
701  ierr = PetscTime(&t0abs); CHKERRQ(ierr);
702 
703  if(warmed_up) SETERRQ(mpi_comm,1,"Warmup has already been called, and it can only be called once.");
704  if(!mpi_rank) printf("WARMUP\n");
705 
706  /* Initialize array of blocks */
707  num_sys_blocks = num_sites - 1;
708  sys_blocks.resize(num_sys_blocks);
709 
710  /* Initialize directories for saving the block operators */
711  if(do_scratch_dir)
712  {
713  PetscBool flg;
714  ierr = PetscTestDirectory(scratch_dir.c_str(), 'r', &flg); CHKERRQ(ierr);
715  if(!flg) SETERRQ1(mpi_comm,1,"Directory %s does not exist. "
716  "Please verify that -scratch_dir is specified correctly.",scratch_dir.c_str());
717 
718  /* Create the subdirectories from root process */
719  if(!mpi_rank)
720  {
721  /* Subdirectories for the enlarged blocks */
722  ierr = Makedir(scratch_dir + "EnlSys/"); CHKERRQ(ierr);
723  ierr = Makedir(scratch_dir + "EnlEnv/"); CHKERRQ(ierr);
724 
725  /* Subdirectories for the blocks of the current sweep */
726  ierr = Makedir(scratch_dir + SweepDir(LoopIdx)); CHKERRQ(ierr);
727  for(PetscInt iblock = 0; iblock < num_sys_blocks; ++iblock)
728  {
729  std::string path = scratch_dir + SweepDir(LoopIdx) + BlockDir("Sys",iblock);
730  ierr = Makedir(path); CHKERRQ(ierr);
731  }
732  }
733  }
734 
737  if(restart)
738  {
739  if(!mpi_rank) std::cout << "Loading blocks from file..." << std::endl;
740  for(PetscInt iblock = 0; iblock < sys_ninit; ++iblock){
741  std::string path_read = restart_dir + BlockDir("Sys",iblock);
742  std::string path_write = scratch_dir + SweepDir(LoopIdx) + BlockDir("Sys",iblock);
743  if(!mpi_rank)
744  std::cout << " Reading Block " << iblock << " from: "
745  << path_read << std::endl;
746  ierr = sys_blocks[iblock].InitializeFromDisk(mpi_comm, path_read); CHKERRQ(ierr);
747  ierr = sys_blocks[iblock].SetDiskStorage(path_read, path_write); CHKERRQ(ierr);
748  }
749  for(PetscInt iblock = sys_ninit; iblock < num_sys_blocks; ++iblock){
750  std::string path = scratch_dir + SweepDir(LoopIdx) + BlockDir("Sys",iblock);
751  ierr = sys_blocks[iblock].Initialize(mpi_comm); CHKERRQ(ierr);
752  ierr = sys_blocks[iblock].SetDiskStorage(path, path); CHKERRQ(ierr);
753  }
754  if(!mpi_rank) PRINTLINES();
755  warmed_up = PETSC_TRUE;
756  return(0);
757  }
758 
759  /* Assign the directories for saving the blocks */
760  if(do_scratch_dir)
761  {
762  for(PetscInt iblock = 0; iblock < num_sys_blocks; ++iblock){
763  std::string path = scratch_dir + SweepDir(LoopIdx) + BlockDir("Sys",iblock);
764  ierr = sys_blocks[iblock].Initialize(mpi_comm); CHKERRQ(ierr);
765  ierr = sys_blocks[iblock].SetDiskStorage(path, path); CHKERRQ(ierr);
766  }
767  }
768 
769  /* Initialize the 0th system block with one site */
770  ierr = sys_blocks[sys_ninit++].Initialize(mpi_comm, 1, PETSC_DEFAULT); CHKERRQ(ierr);
771 
772  /* Create a set of small but exact initial blocks */
773  {
774  if(num_sites % 2) SETERRQ1(mpi_comm,1,"Total number of sites must be even. Got %d.", num_sites);
775  if(AddSite.NumSites() != 1) SETERRQ1(mpi_comm,1,"Routine assumes an additional site of 1. Got %d.", AddSite.NumSites());
776 
777  /* Number of sites in a single cluster, whose multiples form a full lattice ensuring that the total size is even */
778  PetscInt nsites_cluster = Ham.NumEnvSites();
779  if (nsites_cluster % 2) nsites_cluster *= 2;
780 
781  /* Prepare an exact representation of blocks of sites incremented up to the cluster size */
782  if(!mpi_rank){
783  if(verbose) PrintLines();
784  printf(" Preparing initial blocks.\n");
785  }
786  while(sys_ninit < nsites_cluster){
787  PetscInt NumSitesTotal = sys_blocks[sys_ninit-1].NumSites() + AddSite.NumSites();
788  ierr = KronEye_Explicit(sys_blocks[sys_ninit-1], AddSite, Ham.H(NumSitesTotal), sys_blocks[sys_ninit]); CHKERRQ(ierr);
789  ++sys_ninit;
790  }
791 
792  {
793  if(!mpi_rank && verbose) printf(" sys_ninit: %lld\n", LLD(sys_ninit));
794  for(PetscInt isys = 0; isys < sys_ninit; ++isys){
795  if(!mpi_rank && verbose) printf(" block %lld, num_sites %lld\n", LLD(isys), LLD(sys_blocks[isys].NumSites()));
796  }
797  }
798 
799  if(sys_ninit >= num_sites/2)
800  {
801  SETERRQ(mpi_comm,1,"No DMRG Steps were performed since all site operators were created exactly. "
802  " Please change the system dimensions.");
803  }
804 
805  /* Continuously enlarge the system block until it reaches half the total system size and use the largest
806  available environment block that forms a full lattice (multiple of nsites_cluster) */
807  LoopType = WarmupStep;
808  StepIdx = 0;
809  while(sys_ninit < num_sites/2)
810  {
811  PetscInt full_cluster = (((sys_ninit+2) / nsites_cluster)+1) * nsites_cluster;
812  PetscInt env_numsites = full_cluster - sys_ninit - 2;
813 
814  /* Increment env_numsites up to the highest number of env_blocks available */
815  PetscInt env_add = ((sys_ninit - env_numsites) / nsites_cluster) * nsites_cluster;
816  env_numsites += env_add;
817  full_cluster += env_add;
818 
819  if(env_numsites < 1 || env_numsites > sys_ninit)
820  SETERRQ1(mpi_comm,1,"Incorrect number of sites. Got %lld.", LLD(env_numsites));
821 
822  if(!mpi_rank){
823  if(verbose) PrintLines();
824  printf(" %s %lld/%lld/%lld\n", "WARMUP", LLD(LoopIdx), LLD(StepIdx), LLD(GlobIdx));
825  PrintBlocks(sys_ninit,env_numsites);
826  }
827  if(do_scratch_dir){
828  std::set< PetscInt > SysIdx = {sys_ninit-1, env_numsites-1};
829  ierr = SysBlocksActive(SysIdx); CHKERRQ(ierr);
830  }
831  ierr = SingleDMRGStep(
832  sys_blocks[sys_ninit-1], sys_blocks[env_numsites-1], mwarmup,
833  sys_blocks[sys_ninit], sys_blocks[env_numsites], PetscBool(sys_ninit+1==num_sites/2)); CHKERRQ(ierr);
834 
835  ++sys_ninit;
836 
837  #if defined(PETSC_USE_DEBUG)
838  if(!mpi_rank && verbose) printf(" Number of system blocks: %lld\n", LLD(sys_ninit));
839  #endif
840  }
841  }
842 
843  if(sys_ninit != num_sites/2)
844  SETERRQ2(mpi_comm,1,"Expected sys_ninit = num_sites/2 = %lld. Got %lld.",LLD(num_sites/2), LLD(sys_ninit));
845  /* Destroy environment blocks (if any) */
846  for(PetscInt ienv = 0; ienv < env_ninit; ++ienv){
847  ierr = env_blocks[0].Destroy(); CHKERRQ(ierr);
848  }
849  env_ninit = 0;
850  warmed_up = PETSC_TRUE;
851 
852  if(verbose){
853  PetscPrintf(mpi_comm,
854  " Initialized system blocks: %lld\n"
855  " Target number of sites: %lld\n\n", LLD(sys_ninit), LLD(num_sites));
856  }
857  ierr = SaveSweepsData(); CHKERRQ(ierr);
858  if(!mpi_rank) PRINTLINES();
859  ++LoopIdx;
860  return(0);
861  }
862 
864  PetscErrorCode Sweeps()
865  {
866  if(dry_run || (mwarmup==0 && !restart)) return(0);
867  PetscErrorCode ierr;
868  /* Restart: The iteration variable msweep_idx is a class member so that
869  it can be recorded at the end of each sweep.
870  The starting value is zero by default but may be obtained from
871  file if both restart and restart_options are enabled. */
872  PetscInt start_idx = 0;
873 
874  if(restart && restart_options)
875  {
876  /* TODO: Set the value of start_idx from recorded msweep_idx */
877  /* OR: Do it from inside the if statements??? */
878  const auto it = restart_sweep_dict.find("msweep_idx");
879  if(it!=restart_sweep_dict.end())
880  start_idx = it->second;
881  else
882  SETERRQ(PETSC_COMM_SELF,1,"msweep_idx not found.");
883 
884  /* Determine whether to increment start_idx */
885  if(sweep_mode==SWEEP_MODE_NSWEEPS || sweep_mode==SWEEP_MODE_MSWEEPS) {
886  /* Always increment start_idx with these two modes */
887  ++start_idx;
888  }
889  else if(sweep_mode==SWEEP_MODE_TOLERANCE_TEST) {
890  /* Increment the start_idx only if the previous SWEEP
891  ended in a break */
892  const auto it = restart_sweep_dict.find("cont");
893  PetscBool cont_prev;
894  if(it!=restart_sweep_dict.end())
895  cont_prev = PetscBool(it->second);
896  else if(start_idx==-1)
897  /* If the previous step was a WARMUP step then cont==FALSE so that
898  start_idx is incremented to zero */
899  cont_prev = PETSC_FALSE;
900  else
901  SETERRQ(PETSC_COMM_SELF,1,"cont not found.");
902  if(!cont_prev) start_idx++;
903  }
904  if(!mpi_rank) {
905  std::cout << "WARNING: Sweeps will start at idx = "
906  << start_idx << std::endl;
907  if(((sweep_mode==SWEEP_MODE_NSWEEPS &&
908  start_idx>=nsweeps) ||
909  (sweep_mode==SWEEP_MODE_MSWEEPS &&
910  start_idx>=PetscInt(msweeps.size())) ||
911  (sweep_mode==SWEEP_MODE_TOLERANCE_TEST &&
912  start_idx>=PetscInt(msweeps.size())) ))
913  std::cout << "WARNING: Nothing left to do with restart." << std::endl;
914  }
915  }
916 
917  if(sweep_mode==SWEEP_MODE_NSWEEPS)
918  {
919  for(msweep_idx=start_idx; msweep_idx < nsweeps; ++msweep_idx)
920  {
921  ierr = SingleSweep(mwarmup); CHKERRQ(ierr);
922  }
923  }
924  else if(sweep_mode==SWEEP_MODE_MSWEEPS)
925  {
926  for(msweep_idx=start_idx; msweep_idx < PetscInt(msweeps.size()); ++msweep_idx)
927  {
928  ierr = SingleSweep(msweeps.at(msweep_idx)); CHKERRQ(ierr);
929  }
930  }
931  else if(sweep_mode==SWEEP_MODE_TOLERANCE_TEST)
932  {
933  for(msweep_idx=start_idx; msweep_idx < PetscInt(msweeps.size()); ++msweep_idx)
934  {
935  PetscInt mstates = msweeps.at(msweep_idx);
936  PetscInt max_iter = maxnsweeps.at(msweep_idx);
937  PetscInt iter = 0;
938  if(max_iter==0) continue;
939 
940  PetscScalar prev_gse;
941  PetscReal max_trn;
942  PetscReal diff_gse;
943  bool cont;
944 
945  do
946  {
947  prev_gse = gse;
948  ierr = SingleSweep(mstates); CHKERRQ(ierr);
949  diff_gse = PetscAbsScalar(gse-prev_gse);
950  max_trn = *std::max_element(trunc_err.begin(), trunc_err.end());
951  max_trn = std::max(max_trn,0.0);
952  iter++;
953  cont = (iter<max_iter) && (diff_gse > max_trn);
954 
955  if(!mpi_rank)
956  {
957  std::cout
958  << "SWEEP_MODE_TOLERANCE_TEST\n"
959  << " Iterations / Max Iterations: "
960  << iter << "/" << max_iter << "\n"
961  << " Difference in ground state energy: "
962  << diff_gse << "\n"
963  << " Largest truncation error: "
964  << max_trn << "\n"
965  << " " << (cont?"CONTINUE":"BREAK")
966  << std::endl;
967  PRINTLINES();
968  }
969 
970  /* Also append the result to Sweep.dat. The file is at LoopIdx-1 since
971  we have already incremented LoopIdx at the end of SingleSweep() */
972  if(!mpi_rank)
973  {
974  std::string sweep_data_fn = scratch_dir + SweepDir(LoopIdx-1) + "Sweep.dat";
975  std::ofstream sweep_data_file(sweep_data_fn, std::ios_base::app);
976  sweep_data_file << std::setw(20) << "cont" << " " << int(cont) << "\n";
977  sweep_data_file.close();
978  }
979  }
980  while(cont);
981  }
982  }
983  else if(sweep_mode==SWEEP_MODE_NULL)
984  {
985 
986  }
987  else
988  {
989  SETERRQ(mpi_comm,1,"Invalid sweep mode.");
990  }
991 
992  return(0);
993  }
994 
996  PetscErrorCode SingleSweep(
997  const PetscInt& MStates,
998  const PetscInt& MinBlock = PETSC_DEFAULT
999  )
1000  {
1001  CheckInitialization(init,mpi_comm);
1002 
1003  PetscErrorCode ierr;
1004  if(!warmed_up) SETERRQ(mpi_comm,1,"Warmup must be called first before performing sweeps.");
1005  if(!mpi_rank) printf("SWEEP MStates=%lld\n", LLD(MStates));
1006 
1007  /* Setup the attributes that will contain some information about this sweep */
1008  trunc_err.clear();
1009 
1010  /* Set a minimum number of blocks (min_block). Decide whether to set it statically or let
1011  the number correspond to the least number of sites needed to exactly build MStates. */
1012  PetscInt min_block = MinBlock==PETSC_DEFAULT ? 1 : MinBlock;
1013  if(min_block < 1) SETERRQ1(mpi_comm,1,"MinBlock must at least be 1. Got %d.", min_block);
1014 
1015  /* Update the directories for saving the block operators */
1016  if(do_scratch_dir)
1017  {
1018  /* Create the subdirectories from root process */
1019  if(!mpi_rank)
1020  {
1021  /* Subdirectories for the blocks of the current sweep */
1022  ierr = Makedir(scratch_dir + SweepDir(LoopIdx)); CHKERRQ(ierr);
1023  for(PetscInt iblock = 0; iblock < num_sys_blocks; ++iblock)
1024  {
1025  std::string path = scratch_dir + SweepDir(LoopIdx) + BlockDir("Sys",iblock);
1026  ierr = Makedir(path); CHKERRQ(ierr);
1027  }
1028  }
1029  for(PetscInt iblock = 0; iblock < num_sys_blocks; ++iblock)
1030  {
1031  std::string read_path = scratch_dir + SweepDir(LoopIdx-1) + BlockDir("Sys",iblock);
1032  std::string write_path = scratch_dir + SweepDir(LoopIdx) + BlockDir("Sys",iblock);
1033  ierr = sys_blocks[iblock].SetDiskStorage(read_path, write_path); CHKERRQ(ierr);
1034  }
1035  }
1036 
1037  /* Starting from the midpoint, perform a center to right sweep */
1038  LoopType = SweepStep;
1039  StepIdx = 0;
1040  for(PetscInt iblock = num_sites/2; iblock < num_sites - min_block - 2; ++iblock)
1041  {
1042  const PetscInt insys = iblock-1, inenv = num_sites - iblock - 3;
1043  const PetscInt outsys = iblock, outenv = num_sites - iblock - 2;
1044  if(!mpi_rank){
1045  if(verbose) PrintLines();
1046  printf(" %s %lld/%lld/%lld\n", "SWEEP", LLD(LoopIdx), LLD(StepIdx), LLD(GlobIdx));
1047  PrintBlocks(insys+1,inenv+1);
1048  }
1049  if(do_scratch_dir){
1050  std::set< PetscInt > SysIdx = {insys, inenv};
1051  ierr = SysBlocksActive(SysIdx); CHKERRQ(ierr);
1052  }
1053  ierr = SingleDMRGStep(sys_blocks[insys], sys_blocks[inenv], MStates,
1054  sys_blocks[outsys], sys_blocks[outenv]); CHKERRQ(ierr);
1055  }
1056 
1057  /* Since we ASSUME REFLECTION SYMMETRY, the remainder of the sweep can be done as follows:
1058  Starting from the right-most min_block, perform a right to left sweep up to the MIDPOINT */
1059  for(PetscInt iblock = min_block; iblock < num_sites/2; ++iblock)
1060  {
1061  const PetscInt insys = num_sites - iblock - 3, inenv = iblock-1;
1062  const PetscInt outsys = num_sites - iblock - 2, outenv = iblock;
1063  if(!mpi_rank){
1064  if(verbose) PrintLines();
1065  printf(" %s %lld/%lld/%lld\n", "SWEEP", LLD(LoopIdx), LLD(StepIdx), LLD(GlobIdx));
1066  PrintBlocks(insys+1,inenv+1);
1067  }
1068  if(do_scratch_dir){
1069  std::set< PetscInt > SysIdx = {insys, inenv};
1070  ierr = SysBlocksActive(SysIdx); CHKERRQ(ierr);
1071  }
1072  ierr = SingleDMRGStep(sys_blocks[insys], sys_blocks[inenv], MStates,
1073  sys_blocks[outsys], sys_blocks[outenv], PetscBool(outsys==outenv)); CHKERRQ(ierr);
1074  }
1075  sweeps_mstates.push_back(MStates);
1076 
1080  for(PetscInt iblock = 0; iblock < num_sys_blocks; ++iblock)
1081  {
1082  ierr = sys_blocks[iblock].EnsureSaved(); CHKERRQ(ierr);
1083  }
1084  ierr = SaveSweepsData(); CHKERRQ(ierr);
1085  ++LoopIdx;
1086  if(!mpi_rank) PRINTLINES();
1087  return(0);
1088  };
1089 
1091  const Block& SysBlock(const PetscInt& BlockIdx) const {
1092  if(BlockIdx >= sys_ninit) throw std::runtime_error("Attempted to access uninitialized system block.");
1093  return sys_blocks[BlockIdx];
1094  }
1095 
1097  const Block& EnvBlock(const PetscInt& BlockIdx) const {
1098  if(BlockIdx >= env_ninit) throw std::runtime_error("Attempted to access uninitialized environment block.");
1099  return env_blocks[BlockIdx];
1100  }
1101 
1103  const Block& EnvBlock() const{ return env_blocks[0]; }
1104 
1106  PetscInt NumSites() const { return num_sites; }
1107 
1109  PetscBool Verbose() const { return verbose; }
1110 
1112  const Hamiltonian& HamiltonianRef() const { return Ham; }
1113 
1114 private:
1115 
1117  typedef enum
1118  {
1122  SWEEP_MODE_TOLERANCE_TEST
1124  } SweepMode_t;
1125 
1127  const std::map< SweepMode_t, std::string > SweepModeToString = {
1128  {SWEEP_MODE_NULL, "SWEEP_MODE_NULL"},
1129  {SWEEP_MODE_NSWEEPS, "SWEEP_MODE_NSWEEPS"},
1130  {SWEEP_MODE_MSWEEPS, "SWEEP_MODE_MSWEEPS"},
1131  {SWEEP_MODE_TOLERANCE_TEST, "SWEEP_MODE_TOLERANCE_TEST"}
1132  };
1133 
1135  MPI_Comm mpi_comm = PETSC_COMM_SELF;
1136 
1138  PetscMPIInt mpi_rank;
1139 
1141  PetscMPIInt mpi_size;
1142 
1144  PetscBool init = PETSC_FALSE;
1145 
1147  PetscBool verbose = PETSC_FALSE;
1148 
1150  PetscBool restart = PETSC_FALSE;
1151 
1157  PetscBool restart_options = PETSC_FALSE;
1158 
1161  std::string restart_dir;
1162 
1164  std::map<std::string, PetscInt> restart_sweep_dict;
1165 
1167  PetscBool dry_run = PETSC_FALSE;
1168 
1170  PetscBool warmed_up = PETSC_FALSE;
1171 
1173  PetscBool no_symm = PETSC_FALSE;
1174 
1176  PetscReal qn_sector = 0.0;
1177 
1179  SweepMode_t sweep_mode = SWEEP_MODE_NULL;
1180 
1182  PetscInt mwarmup = 0;
1183 
1185  PetscInt nsweeps = 0;
1186 
1188  std::vector< PetscInt > msweeps;
1189 
1191  std::vector< PetscInt > maxnsweeps;
1192 
1195  std::vector<PetscInt> sweeps_mstates;
1196 
1200  PetscInt msweep_idx = -1;
1201 
1203  PetscInt num_sites;
1204 
1207  PetscInt num_sys_blocks;
1208 
1211  PetscInt num_env_blocks = 1;
1212 
1215  std::vector< Block > sys_blocks;
1216 
1218  PetscInt sys_ninit = 0;
1219 
1223  std::vector< Block > env_blocks;
1224 
1226  PetscInt env_ninit = 0;
1227 
1229  Hamiltonian Ham;
1230 
1234 
1236  Block& AddSite = SingleSite;
1237 
1239  std::string scratch_dir = ".";
1240 
1243  PetscBool do_scratch_dir = PETSC_TRUE;
1244 
1246  PetscBool data_tabular = PETSC_TRUE;
1247 
1249  PetscBool do_save_prealloc = PETSC_FALSE;
1250 
1252  PetscBool do_shell = PETSC_TRUE;
1253 
1255  FILE *fp_step = NULL;
1256 
1258  FILE *fp_timings = NULL;
1259 
1261  FILE *fp_entanglement = NULL;
1262 
1264  FILE *fp_data = NULL;
1265 
1267  FILE *fp_prealloc = NULL;
1268 
1270  FILE *fp_corr = NULL;
1271 
1273  PetscInt GlobIdx = 0;
1274 
1276  Step_t LoopType = NullStep;
1277 
1279  PetscInt LoopIdx = 0;
1280 
1282  PetscInt StepIdx = 0;
1283 
1285  std::vector< Correlator > measurements;
1286 
1288  PetscBool corr_headers_printed = PETSC_FALSE;
1289 
1291  PetscBool corr_printed_first = PETSC_FALSE;
1292 
1294  PetscScalar gse = 0.0;
1295 
1297  std::vector<PetscReal> trunc_err;
1298 
1300  PetscLogDouble t0abs = 0.0;
1301 
1304  PetscErrorCode SingleDMRGStep(
1305  Block& SysBlock,
1306  Block& EnvBlock,
1307  const PetscInt& MStates,
1308  Block& SysBlockOut,
1309  Block& EnvBlockOut,
1310  PetscBool do_measurements=PETSC_FALSE
1311  )
1312  {
1313  PetscErrorCode ierr;
1314  PetscLogDouble t0, tenlr, tkron, tdiag, trdms, trotb;
1315  TimingsData timings_data;
1316  t0 = t0abs;
1317 
1318  /* Fill-in data from input blocks */
1319  StepData step_data;
1320  step_data.NumSites_Sys = SysBlock.NumSites();
1321  step_data.NumSites_Env = EnvBlock.NumSites();
1322  step_data.NumStates_Sys = SysBlock.NumStates();
1323  step_data.NumStates_Env = EnvBlock.NumStates();
1324 
1325  /* Check whether the system and environment blocks are the same */
1326  Mat H = nullptr; /* Hamiltonian matrix */
1327  const PetscBool flg = PetscBool(&SysBlock==&EnvBlock);
1328 
1329  /* (Block) Add one site to each block */
1330  Block SysBlockEnl, EnvBlockEnl;
1331  PetscInt NumSitesSysEnl = SysBlock.NumSites() + AddSite.NumSites();
1332  const std::vector< Hamiltonians::Term > TermsSys = Ham.H(NumSitesSysEnl);
1333  ierr = KronEye_Explicit(SysBlock, AddSite, TermsSys, SysBlockEnl); CHKERRQ(ierr);
1334  ierr = SysBlock.EnsureSaved(); CHKERRQ(ierr);
1335  if(!flg){
1336  PetscInt NumSitesEnvEnl = EnvBlock.NumSites() + AddSite.NumSites();
1337  const std::vector< Hamiltonians::Term > TermsEnv = Ham.H(NumSitesEnvEnl);
1338  ierr = KronEye_Explicit(EnvBlock, AddSite, TermsEnv, EnvBlockEnl); CHKERRQ(ierr);
1339  ierr = EnvBlock.EnsureSaved(); CHKERRQ(ierr);
1340  ierr = EnvBlockEnl.InitializeSave(scratch_dir + "EnlEnv/"); CHKERRQ(ierr);
1341  } else {
1342  EnvBlockEnl = SysBlockEnl;
1343  }
1344  ierr = SysBlockEnl.InitializeSave(scratch_dir + "EnlSys/"); CHKERRQ(ierr);
1345 
1346  ierr = PetscTime(&tenlr); CHKERRQ(ierr);
1347  timings_data.tEnlr = tenlr-t0;
1348  if(!mpi_rank && verbose) printf("* Add One Site: %12.6f s\n", timings_data.tEnlr);
1349 
1350  step_data.NumSites_SysEnl = SysBlockEnl.NumSites();
1351  step_data.NumSites_EnvEnl = EnvBlockEnl.NumSites();
1352  step_data.NumStates_SysEnl = SysBlockEnl.NumStates();
1353  step_data.NumStates_EnvEnl = EnvBlockEnl.NumStates();
1354 
1355  #if defined(PETSC_USE_DEBUG)
1356  {
1357  PetscBool flg = PETSC_FALSE;
1358  ierr = PetscOptionsGetBool(NULL,NULL,"-print_qn",&flg,NULL); CHKERRQ(ierr);
1359  if(flg){
1360  /* Print the enlarged system block's quantum numbers for each state */
1361  ierr = PetscPrintf(mpi_comm," SysBlockEnl "); CHKERRQ(ierr);
1362  ierr = SysBlockEnl.Magnetization.PrintQNs(); CHKERRQ(ierr);
1363  ierr = PetscPrintf(mpi_comm," EnvBlockEnl "); CHKERRQ(ierr);
1364  ierr = EnvBlockEnl.Magnetization.PrintQNs(); CHKERRQ(ierr);
1365  }
1366  }
1367  #endif
1368 
1369  /* Prepare the Hamiltonian taking both enlarged blocks together */
1370  PetscInt NumSitesTotal = SysBlockEnl.NumSites() + EnvBlockEnl.NumSites();
1371  const std::vector< Hamiltonians::Term > Terms = Ham.H(NumSitesTotal);
1372 
1373  std::vector<PetscReal> QNSectors = {qn_sector};
1374  if(no_symm) {
1375  QNSectors = {};
1376  }
1377  KronBlocks_t KronBlocks(SysBlockEnl, EnvBlockEnl, QNSectors, fp_prealloc, GlobIdx);
1378  step_data.NumStates_H = KronBlocks.NumStates();
1379  #if defined(PETSC_USE_DEBUG)
1380  {
1381  PetscBool flg = PETSC_FALSE;
1382  ierr = PetscOptionsGetBool(NULL,NULL,"-print_H_kron",&flg,NULL); CHKERRQ(ierr);
1383  if(flg && !mpi_rank){
1384  std::cout << "***** Kron_Explicit *****" << std::endl;
1385  std::cout << "SysBlockEnl qn_list: ";
1386  for(auto i: SysBlockEnl.Magnetization.List()) std::cout << i << " ";
1387  std::cout << std::endl;
1388 
1389  std::cout << "SysBlockEnl qn_size: ";
1390  for(auto i: SysBlockEnl.Magnetization.Sizes()) std::cout << i << " ";
1391  std::cout << std::endl;
1392 
1393  std::cout << "SysBlockEnl qn_offset: ";
1394  for(auto i: SysBlockEnl.Magnetization.Offsets()) std::cout << i << " ";
1395  std::cout << std::endl;
1396 
1397  std::cout << std::endl;
1398 
1399  std::cout << "EnvBlockEnl qn_list: ";
1400  for(auto i: EnvBlockEnl.Magnetization.List()) std::cout << i << " ";
1401  std::cout << std::endl;
1402 
1403  std::cout << "EnvBlockEnl qn_size: ";
1404  for(auto i: EnvBlockEnl.Magnetization.Sizes()) std::cout << i << " ";
1405  std::cout << std::endl;
1406 
1407  std::cout << "EnvBlockEnl qn_offset: ";
1408  for(auto i: EnvBlockEnl.Magnetization.Offsets()) std::cout << i << " ";
1409  std::cout << std::endl;
1410 
1411  PetscInt i = 0;
1412  std::cout << "KronBlocks: \n";
1413  for(KronBlock_t kb: KronBlocks.data())
1414  {
1415  std::cout << "( "
1416  << std::get<0>(kb) << ", "
1417  << std::get<1>(kb) << ", "
1418  << std::get<2>(kb) << ", "
1419  << std::get<3>(kb) << ", "
1420  << KronBlocks.Offsets()[i++] <<" )\n";
1421  }
1422  std::cout << "*************************" << std::endl;
1423  }
1424  if(flg){
1425  if(!mpi_rank){std::cout << "***** SysBlockEnl *****" << std::endl;}
1426  for(const Mat& mat: SysBlockEnl.Sz())
1427  {
1428  MatPeek(mat,"Sz");
1429  }
1430  for(const Mat& mat: SysBlockEnl.Sp())
1431  {
1432  MatPeek(mat,"Sp");
1433  }
1434  if(!mpi_rank){std::cout << "***** EnvBlockEnl *****" << std::endl;}
1435  for(const Mat& mat: EnvBlockEnl.Sz())
1436  {
1437  MatPeek(mat,"Sz");
1438  }
1439  for(const Mat& mat: EnvBlockEnl.Sp())
1440  {
1441  MatPeek(mat,"Sp");
1442  }
1443  if(!mpi_rank){std::cout << "***********************" << std::endl;}
1444  }
1445  }
1446  #endif
1447 
1448  ierr = KronBlocks.KronSumSetRedistribute(PETSC_TRUE); CHKERRQ(ierr);
1449  ierr = KronBlocks.KronSumSetToleranceFromOptions(); CHKERRQ(ierr);
1450  ierr = KronBlocks.KronSumSetShellMatrix(do_shell); CHKERRQ(ierr);
1451  ierr = KronBlocks.KronSumConstruct(Terms, H); CHKERRQ(ierr);
1452  if(!H) SETERRQ(mpi_comm,1,"H is null.");
1453 
1454  ierr = PetscTime(&tkron); CHKERRQ(ierr);
1455  timings_data.tKron = tkron-tenlr;
1456  if(!mpi_rank && verbose) printf("* Build Superblock H: %12.6f s\n", timings_data.tKron);
1457 
1458  #if defined(PETSC_USE_DEBUG)
1459  {
1460  PetscBool flg = PETSC_FALSE;
1461  ierr = PetscOptionsGetBool(NULL,NULL,"-print_H",&flg,NULL); CHKERRQ(ierr);
1462  if(flg){ ierr = MatPeek(H,"H"); CHKERRQ(ierr); }
1463  flg = PETSC_FALSE;
1464  ierr = PetscOptionsGetBool(NULL,NULL,"-print_H_terms",&flg,NULL); CHKERRQ(ierr);
1465  if(flg){
1466  if(!mpi_rank) printf(" H(%lld)\n", LLD(NumSitesTotal));
1467  for(const Hamiltonians::Term& term: Terms)
1468  {
1469  if(!mpi_rank) printf("%.2f %2s(%2lld) %2s(%2lld)\n", term.a, (OpString.find(term.Iop)->second).c_str(), LLD(term.Isite),
1470  (OpString.find(term.Jop)->second).c_str(), LLD(term.Jsite) );
1471  }
1472  }
1473  ierr = MPI_Barrier(mpi_comm); CHKERRQ(ierr);
1474  }
1475  #endif
1476 
1477  /* Solve for the ground state */
1478 
1479  #if defined(PETSC_USE_COMPLEX)
1480  SETERRQ(mpi_comm,PETSC_ERR_SUP,"This function is only implemented for scalar-type=real.");
1481  /* Using both gsv_r and gsv_i but assuming that gsv_i = 0 */
1482  #endif
1483 
1484  Vec gsv_r, gsv_i;
1485  PetscScalar gse_r, gse_i;
1486  ierr = MatCreateVecs(H, &gsv_r, nullptr); CHKERRQ(ierr);
1487  ierr = MatCreateVecs(H, &gsv_i, nullptr); CHKERRQ(ierr);
1488  {
1489  EPS eps;
1490  ierr = EPSCreate(mpi_comm, &eps); CHKERRQ(ierr);
1491  ierr = EPSSetOperators(eps, H, nullptr); CHKERRQ(ierr);
1492  ierr = EPSSetProblemType(eps, EPS_HEP); CHKERRQ(ierr);
1493  ierr = EPSSetWhichEigenpairs(eps, EPS_SMALLEST_REAL); CHKERRQ(ierr);
1494  ierr = EPSSetOptionsPrefix(eps,"H_"); CHKERRQ(ierr);
1495  ierr = EPSSetFromOptions(eps); CHKERRQ(ierr);
1496  ierr = EPSSolve(eps); CHKERRQ(ierr);
1497  ierr = EPSGetEigenpair(eps, 0, &gse_r, &gse_i, gsv_r, gsv_i); CHKERRQ(ierr);
1498  ierr = EPSDestroy(&eps); CHKERRQ(ierr);
1499  }
1500  step_data.GSEnergy = gse_r;
1501 
1502  if(do_shell){
1503  ierr = MatDestroy_KronSumShell(&H); CHKERRQ(ierr);
1504  }
1505  ierr = MatDestroy(&H); CHKERRQ(ierr);
1506 
1507  ierr = PetscTime(&tdiag); CHKERRQ(ierr);
1508  timings_data.tDiag = tdiag-tkron;
1509  if(!mpi_rank && verbose) printf("* Solve Ground State: %12.6f s\n", timings_data.tDiag);
1510 
1511  #if defined(PETSC_USE_DEBUG)
1512  {
1513  PetscBool flg = PETSC_FALSE;
1514  ierr = PetscOptionsGetBool(NULL,NULL,"-print_H_gs",&flg,NULL); CHKERRQ(ierr);
1515  if(flg){
1516  ierr = PetscPrintf(mpi_comm, "\n Ground State Energy: %g + %gj\n", gse_r, gse_i); CHKERRQ(ierr);
1517  ierr = VecPeek(gsv_r, " gsv_r"); CHKERRQ(ierr);
1518  }
1519  }
1520  #endif
1521 
1522  if(no_symm){
1523  ierr = MPI_Barrier(mpi_comm); CHKERRQ(ierr);
1524  SETERRQ(mpi_comm,PETSC_ERR_SUP,"Unsupported option: no_symm.");
1525  }
1526 
1527  /* Calculate the reduced density matrices in block-diagonal form, and from this we can calculate the
1528  (transposed) rotation matrix */
1529  BasisTransformation *BT_L, *BT_R;
1530  BT_L = new BasisTransformation;
1531  BT_R = new BasisTransformation;
1532  ierr = GetTruncation(KronBlocks, gsv_r, MStates, *BT_L, *BT_R); CHKERRQ(ierr);
1533 
1534  /* TODO: Add an option to accept flg for redundant blocks */
1535  /* TODO: Retrieve reduced density matrices from this function */
1536 
1537  /* TODO: Perform evaluation of inter-block correlation functions here */
1538  /* TODO: Perform evaluation of intra-block correlation functions */
1539 
1540  PetscLogDouble tcorr0,tcorr1;
1541  ierr = PetscTime(&tcorr0); CHKERRQ(ierr);
1542 
1543  ierr = CalculateCorrelations_BlockDiag(KronBlocks, gsv_r, *BT_L, do_measurements); CHKERRQ(ierr);
1544 
1545  ierr = PetscTime(&tcorr1); CHKERRQ(ierr);
1546  if(do_measurements && !mpi_rank && verbose)
1547  printf(" * Calc. of Correlators %12.6f s\n", tcorr1-tcorr0);
1548 
1549  ierr = VecDestroy(&gsv_r); CHKERRQ(ierr);
1550  ierr = VecDestroy(&gsv_i); CHKERRQ(ierr);
1551 
1552  /* (Block) Initialize the new blocks
1553  copy enlarged blocks to out blocks but overwrite the matrices */
1554  ierr = SysBlockOut.Destroy(); CHKERRQ(ierr);
1555  ierr = EnvBlockOut.Destroy(); CHKERRQ(ierr);
1556 
1557  ierr = PetscTime(&trdms); CHKERRQ(ierr);
1558  timings_data.tRdms = trdms-tdiag;
1559  if(!mpi_rank && verbose) printf("* Eigendec. of RDMs: %12.6f s\n", timings_data.tRdms);
1560 
1561  ierr = SysBlockOut.Initialize(SysBlockEnl.NumSites(), BT_L->QN); CHKERRQ(ierr);
1562  ierr = SysBlockOut.RotateOperators(SysBlockEnl, BT_L->RotMatT); CHKERRQ(ierr);
1563  ierr = SysBlockEnl.Destroy(); CHKERRQ(ierr);
1564  if(!flg){
1565  ierr = EnvBlockOut.Initialize(EnvBlockEnl.NumSites(), BT_R->QN); CHKERRQ(ierr);
1566  ierr = EnvBlockOut.RotateOperators(EnvBlockEnl, BT_R->RotMatT); CHKERRQ(ierr);
1567  ierr = EnvBlockEnl.Destroy(); CHKERRQ(ierr);
1568  }
1569 
1570  step_data.NumStates_SysRot = SysBlockOut.NumStates();
1571  step_data.NumStates_EnvRot = EnvBlockOut.NumStates();
1572  step_data.TruncErr_Sys = BT_L->TruncErr;
1573  step_data.TruncErr_Env = BT_R->TruncErr;
1574 
1575  #if defined(PETSC_USE_DEBUG)
1576  {
1577  PetscBool flg = PETSC_FALSE;
1578  ierr = PetscOptionsGetBool(NULL,NULL,"-print_qn",&flg,NULL); CHKERRQ(ierr);
1579  if(flg){
1580  /* Print the enlarged system block's quantum numbers for each state */
1581  ierr = PetscPrintf(mpi_comm," SysBlockOut "); CHKERRQ(ierr);
1582  ierr = SysBlockOut.Magnetization.PrintQNs(); CHKERRQ(ierr);
1583  ierr = PetscPrintf(mpi_comm," EnvBlockOut "); CHKERRQ(ierr);
1584  ierr = EnvBlockOut.Magnetization.PrintQNs(); CHKERRQ(ierr);
1585  }
1586  }
1587  #endif
1588 
1589  ierr = PetscTime(&trotb); CHKERRQ(ierr);
1590  timings_data.tRotb = trotb-trdms;
1591  if(!mpi_rank && verbose) printf("* Rotation of Operators: %12.6f s\n", timings_data.tRotb);
1592 
1593  timings_data.Total = trotb - t0;
1594  ierr = PetscTime(&t0abs); CHKERRQ(ierr);
1595 
1596  if(!mpi_rank && verbose){
1597  const PetscReal pEnlr = 100*(timings_data.tEnlr)/timings_data.Total;
1598  const PetscReal pKron = 100*(timings_data.tKron)/timings_data.Total;
1599  const PetscReal pDiag = 100*(timings_data.tDiag)/timings_data.Total;
1600  const PetscReal pRdms = 100*(timings_data.tRdms)/timings_data.Total;
1601  const PetscReal pRotb = 100*(timings_data.tRotb)/timings_data.Total;
1602  printf("\n");
1603  printf(" Sys Block In:\n");
1604  printf(" NumStates: %lld\n", LLD(SysBlock.Magnetization.NumStates()));
1605  printf(" NumSites: %lld\n", LLD(SysBlock.NumSites()));
1606  printf(" Env Block In:\n");
1607  printf(" NumStates: %lld\n", LLD(EnvBlock.Magnetization.NumStates()));
1608  printf(" NumSites: %lld\n", LLD(EnvBlock.NumSites()));
1609  printf(" Sys Block Enl:\n");
1610  printf(" NumStates: %lld\n", LLD(SysBlockEnl.Magnetization.NumStates()));
1611  printf(" NumSites: %lld\n", LLD(SysBlockEnl.NumSites()));
1612  printf(" Env Block Enl:\n");
1613  printf(" NumStates: %lld\n", LLD(EnvBlockEnl.Magnetization.NumStates()));
1614  printf(" NumSites: %lld\n", LLD(EnvBlockEnl.NumSites()));
1615  printf(" Superblock:\n");
1616  printf(" NumStates: %lld\n", LLD(KronBlocks.NumStates()));
1617  printf(" NumSites: %lld\n", LLD(NumSitesTotal));
1618  printf(" QNSector: %-10.10g\n", qn_sector);
1619  printf(" Energy: %-10.10g\n", gse_r);
1620  printf(" Energy/site: %-10.10g\n", gse_r/PetscReal(NumSitesTotal));
1621  printf(" Sys Block Out\n"
1622  " NumStates: %lld\n"
1623  " TrunError: %g\n", LLD(BT_L->QN.NumStates()), BT_L->TruncErr);
1624  printf(" Env Block Out\n"
1625  " NumStates: %lld\n"
1626  " TrunError: %g\n", LLD(BT_R->QN.NumStates()), BT_R->TruncErr);
1627  printf("\n");
1628  printf(" Total Time: %12.6f s\n", timings_data.Total);
1629  printf(" Add One Site: %12.6f s \t%6.2f %%\n", timings_data.tEnlr, pEnlr);
1630  printf(" Build Superblock H: %12.6f s \t%6.2f %%\n", timings_data.tKron, pKron);
1631  printf(" Solve Ground State: %12.6f s \t%6.2f %%\n", timings_data.tDiag, pDiag);
1632  printf(" Eigendec. of RDMs: %12.6f s \t%6.2f %%\n", timings_data.tRdms, pRdms);
1633  printf(" Rotation of Operators: %12.6f s \t%6.2f %%\n", timings_data.tRotb, pRotb);
1634  printf("\n");
1635  }
1636 
1637  /* Store some results to class attributes */
1638  gse = gse_r;
1639  trunc_err.push_back(BT_L->TruncErr);
1640 
1641  /* Save data */
1642  ierr = SaveStepData(step_data); CHKERRQ(ierr);
1643  ierr = SaveTimingsData(timings_data); CHKERRQ(ierr);
1644 
1645  /* Delete context */
1646  delete BT_L;
1647  delete BT_R;
1648 
1649  /* Increment counters */
1650  ++GlobIdx;
1651  ++StepIdx;
1652  return(0);
1653  }
1654 
1656  PetscErrorCode GetTruncation(
1657  const KronBlocks_t& KronBlocks,
1658  const Vec& gsv_r,
1659  const PetscInt& MStates,
1660  BasisTransformation& BT_L,
1661  BasisTransformation& BT_R
1662  )
1663  {
1664  PetscErrorCode ierr;
1665 
1666  if(no_symm) SETERRQ(mpi_comm,PETSC_ERR_SUP,"Unsupported option: no_symm.");
1667  #if defined(PETSC_USE_COMPLEX)
1668  SETERRQ(mpi_comm,PETSC_ERR_SUP,"This function is only implemented for scalar-type=real.");
1669  /* Error due to re-use of *v buffer for *vT */
1670  #endif
1671 
1672  /* Send the whole vector to the root process */
1673  Vec gsv_r_loc;
1674  VecScatter ctx;
1675  ierr = VecScatterCreateToZero(gsv_r, &ctx, &gsv_r_loc); CHKERRQ(ierr);
1676  ierr = VecScatterBegin(ctx, gsv_r, gsv_r_loc, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1677  ierr = VecScatterEnd(ctx, gsv_r, gsv_r_loc, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(ierr);
1678 
1679  #if defined(PETSC_USE_DEBUG)
1680  PetscBool flg = PETSC_FALSE;
1681  ierr = PetscOptionsGetBool(NULL,NULL,"-print_trunc",&flg,NULL); CHKERRQ(ierr);
1682  if(false){
1683  for(PetscMPIInt irank = 0; irank < mpi_size; ++irank){
1684  if(irank==mpi_rank){std::cout << "[" << mpi_rank << "]<<" << std::endl;
1685 
1686  ierr = PetscPrintf(PETSC_COMM_SELF, "gsv_r_loc\n"); CHKERRQ(ierr);
1687  ierr = VecView(gsv_r_loc, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
1688 
1689  std::cout << ">>[" << mpi_rank << "]" << std::endl;}\
1690  ierr = MPI_Barrier(mpi_comm); CHKERRQ(ierr);}
1691  }
1692  #endif
1693 
1694  std::vector< Eigen_t > eigen_L, eigen_R; /* Container for eigenvalues of the RDMs */
1695  std::vector< EPS > eps_list_L, eps_list_R; /* Container for EPS objects */
1696  std::vector< Vec > rdmd_vecs_L, rdmd_vecs_R; /* Container for the corresponding vectors */
1697 
1698  /* Do eigendecomposition on root process */
1699  if(!mpi_rank)
1700  {
1701  /* Verify the vector length */
1702  PetscInt size;
1703  ierr = VecGetSize(gsv_r_loc, &size);
1704  if(KronBlocks.NumStates() != size) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect vector length. "
1705  "Expected %d. Got %d.", KronBlocks.NumStates(), size);
1706 
1707  #if defined(PETSC_USE_DEBUG)
1708  if(flg) printf("\n\n");
1709  #endif
1710 
1711  PetscScalar *v;
1712  ierr = VecGetArray(gsv_r_loc, &v); CHKERRQ(ierr);
1713 
1714  /* Loop through the L-R pairs forming the target sector in KronBlocks */
1715  for(PetscInt idx = 0; idx < KronBlocks.size(); ++idx)
1716  {
1717  const PetscInt Istart = KronBlocks.Offsets(idx);
1718  const PetscInt Iend = KronBlocks.Offsets(idx+1);
1719  const PetscInt Idx_L = KronBlocks.LeftIdx(idx);
1720  const PetscInt Idx_R = KronBlocks.RightIdx(idx);
1721  const PetscInt N_L = KronBlocks.LeftBlockRef().Magnetization.Sizes(Idx_L);
1722  const PetscInt N_R = KronBlocks.RightBlockRef().Magnetization.Sizes(Idx_R);
1723 
1724  /* Verify the segment length */
1725  if(Iend - Istart != N_L * N_R) SETERRQ2(PETSC_COMM_SELF,1, "Incorrect segment length. "
1726  "Expected %d. Got %d.", N_L * N_R, Iend - Istart);
1727 
1728  /* Initialize and fill sequential dense matrices containing the diagonal blocks of the
1729  reduced density matrices */
1730  Mat Psi, PsiT, rdmd_L, rdmd_R;
1731  ierr = MatCreateSeqDense(PETSC_COMM_SELF, N_R, N_L, &v[Istart], &PsiT); CHKERRQ(ierr);
1732  ierr = MatHermitianTranspose(PsiT, MAT_INITIAL_MATRIX, &Psi); CHKERRQ(ierr);
1733  ierr = MatMatMult(Psi, PsiT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &rdmd_L);
1734  ierr = MatMatMult(PsiT, Psi, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &rdmd_R);
1735  /* TODO: Check for possible bleeding of memory due to ownership of *v */
1736  ierr = MatDestroy(&Psi); CHKERRQ(ierr);
1737  ierr = MatDestroy(&PsiT); CHKERRQ(ierr);
1738 
1739  /* Verify the sizes of the reduced density matrices */
1740  {
1741  PetscInt Nrows, Ncols;
1742  ierr = MatGetSize(rdmd_L, &Nrows, &Ncols); CHKERRQ(ierr);
1743  if(Nrows != N_L) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect Nrows in L. Expected %d. Got %d.", N_L, Nrows);
1744  if(Ncols != N_L) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect Ncols in L. Expected %d. Got %d.", N_L, Ncols);
1745  ierr = MatGetSize(rdmd_R, &Nrows, &Ncols); CHKERRQ(ierr);
1746  if(Nrows != N_R) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect Nrows in R. Expected %d. Got %d.", N_R, Nrows);
1747  if(Ncols != N_R) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect Ncols in R. Expected %d. Got %d.", N_R, Ncols);
1748  }
1749 
1750  /* Solve the full eigenspectrum of the reduced density matrices */
1751  EPS eps_L, eps_R;
1752  ierr = EigRDM_BlockDiag(rdmd_L, idx, Idx_L, eigen_L, eps_L); CHKERRQ(ierr);
1753  ierr = EigRDM_BlockDiag(rdmd_R, idx, Idx_R, eigen_R, eps_R); CHKERRQ(ierr);
1754 
1755  #if defined(PETSC_USE_DEBUG)
1756  if(flg){
1757  printf(" KB QN: %-6g Left :%3lld Right: %3lld\n", KronBlocks.QN(idx), LLD(Idx_L), LLD(Idx_R)) ;
1758  ierr = MatPeek(rdmd_L, "rdmd_L"); CHKERRQ(ierr);
1759  ierr = MatPeek(rdmd_R, "rdmd_R"); CHKERRQ(ierr);
1760  printf("\n");
1761  }
1762  #endif
1763 
1764  eps_list_L.push_back(eps_L);
1765  eps_list_R.push_back(eps_R);
1766  BT_L.rdmd_list[Idx_L] = rdmd_L;
1767  BT_R.rdmd_list[Idx_R] = rdmd_R;
1768 
1769  /* Prepare the vectors for getting the eigenvectors */
1770  Vec v_L, v_R;
1771  ierr = MatCreateVecs(rdmd_L, NULL, &v_L); CHKERRQ(ierr);
1772  rdmd_vecs_L.push_back(v_L);
1773  ierr = MatCreateVecs(rdmd_R, NULL, &v_R); CHKERRQ(ierr);
1774  rdmd_vecs_R.push_back(v_R);
1775  }
1776 
1777  #if defined(PETSC_USE_DEBUG)
1778  if(flg){
1779  printf("\nBefore sorting\n");
1780  for(const Eigen_t& eig: eigen_L) printf(" L: %-16.10g seq: %-5lld eps: %-5lld blk: %-5lld\n",
1781  eig.eigval, LLD(eig.seqIdx), LLD(eig.epsIdx), LLD(eig.blkIdx));
1782  printf("\n");
1783  for(const Eigen_t& eig: eigen_R) printf(" R: %-16.10g seq: %-5lld eps: %-5lld blk: %-5lld\n",
1784  eig.eigval, LLD(eig.seqIdx), LLD(eig.epsIdx), LLD(eig.blkIdx));
1785  printf("\n\n");
1786  }
1787  #endif
1788 
1789  /* Dump unsorted (grouped) entanglement spectra to file */
1790  ierr = SaveEntanglementSpectra(
1791  eigen_L, KronBlocks.LeftBlockRef().Magnetization.ListRef(),
1792  eigen_R, KronBlocks.RightBlockRef().Magnetization.ListRef()); CHKERRQ(ierr);
1793 
1794  /* Sort the eigenvalue lists in descending order */
1795  std::stable_sort(eigen_L.begin(), eigen_L.end(), greater_eigval);
1796  std::stable_sort(eigen_R.begin(), eigen_R.end(), greater_eigval);
1797 
1798  #if defined(PETSC_USE_DEBUG)
1799  if(flg){
1800  printf("\nAfter sorting\n");
1801  for(const Eigen_t& eig: eigen_L) printf(" L: %-16.10g seq: %-5lld eps: %-5lld blk: %-5lld\n",
1802  eig.eigval, LLD(eig.seqIdx), LLD(eig.epsIdx), LLD(eig.blkIdx));
1803  printf("\n");
1804  for(const Eigen_t& eig: eigen_R) printf(" R: %-16.10g seq: %-5lld eps: %-5lld blk: %-5lld\n",
1805  eig.eigval, LLD(eig.seqIdx), LLD(eig.epsIdx), LLD(eig.blkIdx));
1806  printf("\n\n");
1807  }
1808  #endif
1809  ierr = VecRestoreArray(gsv_r_loc, &v); CHKERRQ(ierr);
1810 
1811  }
1812  /* Broadcast the number of eigenstates from 0 to all processes */
1813  PetscInt NEigenStates_L = eigen_L.size(); /* Number of eigenstates in the left block reduced density matrix */
1814  PetscInt NEigenStates_R = eigen_R.size(); /* Number of eigenstates in the right block reduced density matrix */
1815  ierr = MPI_Bcast(&NEigenStates_L, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1816  ierr = MPI_Bcast(&NEigenStates_R, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1817 
1818  /* Decide how many states to retain */
1819  const PetscInt m_L = PetscMin(MStates, NEigenStates_L);
1820  const PetscInt m_R = PetscMin(MStates, NEigenStates_R);
1821 
1822  /* The number of states present in the enlarged blocks */
1823  const PetscInt NStates_L = KronBlocks.LeftBlockRef().Magnetization.NumStates();
1824  const PetscInt NStates_R = KronBlocks.RightBlockRef().Magnetization.NumStates();
1825 
1826  /* The rotation matrices take have the dimension m x NStates */
1827  ierr = MatCreate(mpi_comm, &BT_L.RotMatT); CHKERRQ(ierr);
1828  ierr = MatCreate(mpi_comm, &BT_R.RotMatT); CHKERRQ(ierr);
1829  Mat& RotMatT_L = BT_L.RotMatT;
1830  Mat& RotMatT_R = BT_R.RotMatT;
1831  ierr = MatSetSizes(RotMatT_L, PETSC_DECIDE, PETSC_DECIDE, m_L, NStates_L); CHKERRQ(ierr);
1832  ierr = MatSetSizes(RotMatT_R, PETSC_DECIDE, PETSC_DECIDE, m_R, NStates_R); CHKERRQ(ierr);
1833  ierr = MatSetFromOptions(RotMatT_L); CHKERRQ(ierr);
1834  ierr = MatSetFromOptions(RotMatT_R); CHKERRQ(ierr);
1835  ierr = MatSetUp(RotMatT_L); CHKERRQ(ierr);
1836  ierr = MatSetUp(RotMatT_R); CHKERRQ(ierr);
1837 
1838  #if defined(PETSC_USE_DEBUG)
1839  if(flg && !mpi_rank) printf(" m_L: %-lld m_R: %-lld\n\n", LLD(m_L), LLD(m_R));
1840  #endif
1841 
1842  std::vector< PetscReal > qn_list_L, qn_list_R;
1843  std::vector< PetscInt > qn_size_L, qn_size_R;
1844  PetscInt numBlocks_L, numBlocks_R;
1845  PetscReal& TruncErr_L = BT_L.TruncErr;
1846  PetscReal& TruncErr_R = BT_R.TruncErr;
1847  if(!mpi_rank)
1848  {
1849  /* Take only the first m states and sort in ascending order of blkIdx */
1850  eigen_L.resize(m_L);
1851  eigen_R.resize(m_R);
1852  std::stable_sort(eigen_L.begin(), eigen_L.end(), less_blkIdx);
1853  std::stable_sort(eigen_R.begin(), eigen_R.end(), less_blkIdx);
1854 
1855  #if defined(PETSC_USE_DEBUG)
1856  if(flg) {
1857  printf("\n\n");
1858  for(const Eigen_t& eig: eigen_L) printf(" L: %-16.10g seq: %-5lld eps: %-5lld blk: %-5lld\n",
1859  eig.eigval, LLD(eig.seqIdx), LLD(eig.epsIdx), LLD(eig.blkIdx));
1860  printf("\n");
1861  for(const Eigen_t& eig: eigen_R) printf(" R: %-16.10g seq: %-5lld eps: %-5lld blk: %-5lld\n",
1862  eig.eigval, LLD(eig.seqIdx), LLD(eig.epsIdx), LLD(eig.blkIdx));
1863  printf("\n\n");
1864  }
1865  #endif
1866 
1867  /* Calculate the elements of the rotation matrices and the QN object */
1868  ierr = FillRotation_BlockDiag(eigen_L, eps_list_L, rdmd_vecs_L, KronBlocks.LeftBlockRef(), RotMatT_L); CHKERRQ(ierr);
1869  ierr = FillRotation_BlockDiag(eigen_R, eps_list_R, rdmd_vecs_R, KronBlocks.RightBlockRef(), RotMatT_R); CHKERRQ(ierr);
1870 
1871  /* Calculate the truncation error */
1872  TruncErr_L = 1.0;
1873  for(const Eigen_t &eig: eigen_L) TruncErr_L -= (eig.eigval > 0) * eig.eigval;
1874  TruncErr_R = 1.0;
1875  for(const Eigen_t &eig: eigen_R) TruncErr_R -= (eig.eigval > 0) * eig.eigval;
1876 
1877  /* Calculate the quantum numbers lists */
1878  {
1879  std::map< PetscReal, PetscInt > BlockIdxs;
1880  for(const Eigen_t &eig: eigen_L) BlockIdxs[ eig.blkIdx ] += 1;
1881  for(const auto& idx: BlockIdxs) qn_list_L.push_back( KronBlocks.LeftBlockRef().Magnetization.List(idx.first) );
1882  for(const auto& idx: BlockIdxs) qn_size_L.push_back( idx.second );
1883  numBlocks_L = qn_list_L.size();
1884  }
1885 
1886  {
1887  std::map< PetscReal, PetscInt > BlockIdxs;
1888  for(const Eigen_t &eig: eigen_R) BlockIdxs[ eig.blkIdx ] += 1;
1889  for(const auto& idx: BlockIdxs) qn_list_R.push_back( KronBlocks.RightBlockRef().Magnetization.List(idx.first) );
1890  for(const auto& idx: BlockIdxs) qn_size_R.push_back( idx.second );
1891  numBlocks_R = qn_list_R.size();
1892  }
1893 
1894  #if defined(PETSC_USE_DEBUG)
1895  if(flg){
1896  for(PetscInt i = 0; i < numBlocks_L; ++i) printf(" %g %lld\n", qn_list_L[i], LLD(qn_size_L[i]));
1897  printf("\n");
1898  for(PetscInt i = 0; i < numBlocks_R; ++i) printf(" %g %lld\n", qn_list_R[i], LLD(qn_size_R[i]));
1899  }
1900  #endif
1901  }
1902 
1903  /* Broadcast the truncation errors to all processes */
1904  ierr = MPI_Bcast(&TruncErr_L, 1, MPIU_SCALAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1905  ierr = MPI_Bcast(&TruncErr_R, 1, MPIU_SCALAR, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1906 
1907  /* Broadcast the number of quantum blocks */
1908  ierr = MPI_Bcast(&numBlocks_L, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1909  ierr = MPI_Bcast(&numBlocks_R, 1, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1910 
1911  /* Broadcast the information on quantum number blocks */
1912  if(mpi_rank) qn_list_L.resize(numBlocks_L);
1913  if(mpi_rank) qn_size_L.resize(numBlocks_L);
1914  ierr = MPI_Bcast(qn_list_L.data(), numBlocks_L, MPIU_REAL, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1915  ierr = MPI_Bcast(qn_size_L.data(), numBlocks_L, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1916  if(mpi_rank) qn_list_R.resize(numBlocks_R);
1917  if(mpi_rank) qn_size_R.resize(numBlocks_R);
1918  ierr = MPI_Bcast(qn_list_R.data(), numBlocks_R, MPIU_REAL, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1919  ierr = MPI_Bcast(qn_size_R.data(), numBlocks_R, MPIU_INT, 0, PETSC_COMM_WORLD); CHKERRQ(ierr);
1920 
1921  /* Assemble the rotation matrix */
1922  ierr = MatAssemblyBegin(RotMatT_L, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1923  ierr = MatAssemblyBegin(RotMatT_R, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1924  ierr = MatAssemblyEnd(RotMatT_L, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1925  ierr = MatAssemblyEnd(RotMatT_R, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1926 
1927  #if defined(PETSC_USE_DEBUG)
1928  if(flg){
1929  ierr = MatPeek(RotMatT_L, "RotMatT_L"); CHKERRQ(ierr);
1930  ierr = MatPeek(RotMatT_R, "RotMatT_R"); CHKERRQ(ierr);
1931  }
1932  #endif
1933 
1934  ierr = BT_L.QN.Initialize(mpi_comm, qn_list_L, qn_size_L); CHKERRQ(ierr);
1935  ierr = BT_R.QN.Initialize(mpi_comm, qn_list_R, qn_size_R); CHKERRQ(ierr);
1936 
1937  for(EPS& eps: eps_list_L){
1938  ierr = EPSDestroy(&eps); CHKERRQ(ierr);
1939  }
1940  for(EPS& eps: eps_list_R){
1941  ierr = EPSDestroy(&eps); CHKERRQ(ierr);
1942  }
1943  // for(Mat& mat: rdmd_list_L){
1944  // ierr = MatDestroy(&mat); CHKERRQ(ierr);
1945  // }
1946  // for(Mat& mat: rdmd_list_R){
1947  // ierr = MatDestroy(&mat); CHKERRQ(ierr);
1948  // }
1949  for(Vec& vec: rdmd_vecs_L){
1950  ierr = VecDestroy(&vec); CHKERRQ(ierr);
1951  }
1952  for(Vec& vec: rdmd_vecs_R){
1953  ierr = VecDestroy(&vec); CHKERRQ(ierr);
1954  }
1955  ierr = VecScatterDestroy(&ctx); CHKERRQ(ierr);
1956  ierr = VecDestroy(&gsv_r_loc); CHKERRQ(ierr);
1957 
1958  return(0);
1959  }
1960 
1962  PetscErrorCode EigRDM_BlockDiag(
1963  const Mat& matin,
1964  const PetscInt& seqIdx,
1965  const PetscInt& blkIdx,
1966  std::vector< Eigen_t >& eigList,
1967  EPS& eps
1968  )
1969  {
1970  PetscErrorCode ierr;
1971  /* Require that input matrix be square */
1972  PetscInt Nrows, Ncols;
1973  ierr = MatGetSize(matin, &Nrows, &Ncols); CHKERRQ(ierr);
1974  if(Nrows!=Ncols) SETERRQ2(PETSC_COMM_SELF,1,"Input must be square matrix. Got size %d x %d.", Nrows, Ncols);
1975 
1976  ierr = EPSCreate(PETSC_COMM_SELF, &eps); CHKERRQ(ierr);
1977  ierr = EPSSetOperators(eps, matin, nullptr); CHKERRQ(ierr);
1978  ierr = EPSSetProblemType(eps, EPS_HEP); CHKERRQ(ierr);
1979  ierr = EPSSetWhichEigenpairs(eps, EPS_LARGEST_REAL); CHKERRQ(ierr);
1980  ierr = EPSSetType(eps, EPSLAPACK);
1981  ierr = EPSSetTolerances(eps, 1.0e-16, PETSC_DEFAULT); CHKERRQ(ierr);
1982  ierr = EPSSolve(eps); CHKERRQ(ierr);
1983 
1984  /* Verify convergence */
1985  PetscInt nconv;
1986  ierr = EPSGetConverged(eps, &nconv); CHKERRQ(ierr);
1987  if(nconv != Nrows) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect number of converged eigenpairs. "
1988  "Expected %d. Got %d.", Nrows, nconv); CHKERRQ(ierr);
1989 
1990  /* Get the converged eigenvalue */
1991  for(PetscInt epsIdx = 0; epsIdx < nconv; ++epsIdx)
1992  {
1993  PetscScalar eigr=0.0, eigi=0.0;
1994  ierr = EPSGetEigenvalue(eps, epsIdx, &eigr, &eigi); CHKERRQ(ierr);
1995 
1996  /* Verify that the eigenvalue is real */
1997  if(eigi != 0.0) SETERRQ1(PETSC_COMM_SELF,1,"Imaginary part of eigenvalue must be zero. "
1998  "Got %g\n", eigi);
1999 
2000  eigList.push_back({ eigr, seqIdx, epsIdx, blkIdx });
2001  }
2002  return(0);
2003  }
2004 
2006  PetscErrorCode FillRotation_BlockDiag(
2007  const std::vector< Eigen_t >& eigen_list,
2008  const std::vector< EPS >& eps_list,
2009  const std::vector< Vec >& rdmd_vecs,
2010  const Block& BlockRef,
2011  Mat& RotMatT
2012  )
2013  {
2014  PetscErrorCode ierr;
2015 
2016  #if defined(PETSC_USE_COMPLEX)
2017  SETERRQ(mpi_comm,PETSC_ERR_SUP,"This function is only implemented for scalar-type=real.");
2018  #endif
2019 
2020  /* Allocate space for column indices using the maximum size */
2021  std::vector< PetscInt> qnsize = BlockRef.Magnetization.Sizes();
2022  std::vector< PetscInt>::iterator it = std::max_element(qnsize.begin(), qnsize.end());
2023  PetscInt max_qnsize = PetscInt(*it);
2024  PetscInt *idx;
2025  ierr = PetscCalloc1(max_qnsize+1, &idx); CHKERRQ(ierr);
2026 
2027  PetscScalar eigr, eigi, *vals;
2028  PetscInt prev_blkIdx = -1;
2029  PetscInt startIdx = 0;
2030  PetscInt numStates = 0;
2031  PetscInt rowCtr = 0;
2032  for(const Eigen_t &eig: eigen_list)
2033  {
2034  /* Retrieve the eigenpair from the Eigen_t object */
2035  const PetscInt seqIdx = eig.seqIdx;
2036  ierr = EPSGetEigenpair(eps_list[seqIdx], eig.epsIdx, &eigr, &eigi, rdmd_vecs[seqIdx], nullptr); CHKERRQ(ierr);
2037  ierr = VecGetArray(rdmd_vecs[seqIdx], &vals); CHKERRQ(ierr);
2038  /* Verify that eigr is the same eigenvalue as eig.eigval */
2039  if(eigr != eig.eigval) SETERRQ2(PETSC_COMM_SELF,1,"Incorrect eigenvalue. Expected %g. Got %g.", eig.eigval, eigr);
2040 
2041  /* Determine the block indices, updating only when the block index changes */
2042  if(prev_blkIdx != eig.blkIdx){
2043  startIdx = BlockRef.Magnetization.Offsets(eig.blkIdx); assert(startIdx!=-1);
2044  numStates = BlockRef.Magnetization.Sizes(eig.blkIdx); assert(numStates!=-1);
2045  for(PetscInt i = 0; i < numStates+1; ++i) idx[i] = startIdx + i;
2046  prev_blkIdx = eig.blkIdx;
2047  }
2048 
2049  /* Set the value of the rotation matrix to the values of the eigenvector from the root process */
2050  ierr = MatSetValues(RotMatT, 1, &rowCtr, numStates, idx, vals, INSERT_VALUES); CHKERRQ(ierr);
2051 
2052  ierr = VecRestoreArray(rdmd_vecs[seqIdx], &vals); CHKERRQ(ierr);
2053  ++rowCtr;
2054  }
2055  ierr = PetscFree(idx); CHKERRQ(ierr);
2056  return(0);
2057  }
2058 
2063  KronBlocks_t& KronBlocks,
2064  const Vec& gsv_r,
2065  const BasisTransformation& BT,
2066  const PetscBool flg=PETSC_TRUE
2067  )
2068  {
2069  if(!mpi_rank)
2070  {
2071  if(!corr_headers_printed)
2072  {
2073  fprintf(fp_corr, "{\n");
2074  fprintf(fp_corr, " \"info\" :\n");
2075  fprintf(fp_corr, " [\n");
2076 
2077  for(size_t icorr=0; icorr<measurements.size(); ++icorr){
2078  if(icorr) fprintf(fp_corr, ",\n");
2079  Correlator& c = measurements[icorr];
2080  fprintf(fp_corr, " {\n");
2081  fprintf(fp_corr, " \"corrIdx\" : %lld,\n", LLD(c.idx));
2082  fprintf(fp_corr, " \"name\" : \"%s\",\n", c.name.c_str());
2083  fprintf(fp_corr, " \"desc1\" : \"%s\",\n", c.desc1.c_str());
2084  fprintf(fp_corr, " \"desc2\" : \"%s\",\n", c.desc2.c_str());
2085  fprintf(fp_corr, " \"desc3\" : \"%s\"\n", c.desc3.c_str());
2086  fprintf(fp_corr, " }");
2087  }
2088 
2089  fprintf(fp_corr, "\n");
2090  fprintf(fp_corr, " ],\n");
2091  fprintf(fp_corr, " \"values\" :\n");
2092  fprintf(fp_corr, " [\n");
2093  fflush(fp_corr);
2094 
2095  corr_headers_printed = PETSC_TRUE;
2096  }
2097  }
2098 
2099  if(!flg) return(0);
2100  PetscErrorCode ierr;
2101  std::vector< PetscScalar > CorrValues(measurements.size());
2102 
2103  PetscBool debug = PETSC_FALSE; /* FIXME: Remove later */
2104  if(debug && !mpi_rank) std::cout << "\n\n====" << __FUNCTION__ << "====" << std::endl;
2105 
2106  #if 1
2107  /* Explicitly build the operators in the Kronecker product space */
2108  std::vector< Correlator > CorrSysEnv = measurements;
2109  #else
2110  /* Separately handle the case of a correlator of operators living only on the system block */
2111  /* Classify the correlators accordingly */
2112  std::vector< Correlator > CorrSys; /* For operators residing in the system block */
2113  std::vector< Correlator > CorrSysEnv; /* For operators residing in the system and environment blocks */
2114 
2115  for(const Correlator& m: measurements){
2116  if(m.SysOps.size()!=0 && m.EnvOps.size()==0){
2117  CorrSys.push_back(m);
2118  }
2119  else if (m.SysOps.size()==0 && m.EnvOps.size()!=0){
2120  SETERRQ(mpi_comm,1,"Reflection symmetry is imposed. Empty SysOps and non-empty EnvOps not permitted.");
2121  }
2122  else if (m.SysOps.size()!=0 && m.EnvOps.size()!=0){
2123  CorrSysEnv.push_back(m);
2124  }
2125  else {
2126  /* TODO: Skip correlators with no operators and simply return the */
2127  }
2128  }
2129 
2130  /*---- For correlators in the system block ----*/
2131  /* Send the elements of the reduced density matrix (diagonals) to their corresponding processors
2132  Since each processor has a complete copy of KronBlocks, each one is aware of how much data
2133  it will receive. */
2134  if(CorrSys.size() > 0)
2135  {
2136 
2137  std::vector< PetscInt > Sizes = KronBlocks.LeftBlockRef().Magnetization.Sizes();
2138  std::vector< PetscInt > Offsets = KronBlocks.LeftBlockRef().Magnetization.Offsets();
2139  PetscInt NumSectors = KronBlocks.LeftBlockRef().Magnetization.NumSectors();
2140  PetscInt NumStates = KronBlocks.LeftBlockRef().Magnetization.NumStates();
2141 
2142  /* Verify the sizes of the reduced density matrix diagonals */
2143  if(!mpi_rank){
2144  for(PetscInt i=0; i<NumSectors; ++i){
2145  PetscInt M,N;
2146  if(BT.rdmd_list.find(i)==BT.rdmd_list.end()) continue;
2147  ierr = MatGetSize(BT.rdmd_list.at(i), &M, &N); CHKERRQ(ierr);
2148  if(M!=N)
2149  SETERRQ2(PETSC_COMM_SELF,1,"Matrix must be square. Got %D x %D instead.",M,N);
2150  if(M!=Sizes.at(i))
2151  SETERRQ2(mpi_comm,1,"Incorrect size. Expected %D. Got %D.", M, Sizes.at(i));
2152  }
2153  }
2154 
2155  /* Load only needed operator matrices manually */
2156  ierr = KronBlocks.LeftBlockRefMod().EnsureSaved(); CHKERRQ(ierr);
2157 
2158  /* TODO: Implement in a subcommunicator */
2159  MPI_Comm& sub_comm = mpi_comm;
2160  {
2161  PetscMPIInt sub_rank, sub_size;
2162  ierr = MPI_Comm_rank(sub_comm, &sub_rank); CHKERRQ(ierr);
2163  ierr = MPI_Comm_size(sub_comm, &sub_size); CHKERRQ(ierr);
2164 
2165  /* Prepare an MPI dense matrix for the reduced density matrix */
2166  Mat rho;
2167  ierr = MatCreateDense(sub_comm,PETSC_DECIDE,PETSC_DECIDE,NumStates,NumStates,NULL,&rho); CHKERRQ(ierr);
2168  ierr = MatSetOption(rho,MAT_ROW_ORIENTED,PETSC_TRUE); CHKERRQ(ierr);
2169  if(!mpi_rank)
2170  {
2171  PetscScalar *v;
2172  PetscInt *idx, max_size = 0;
2173  for(const PetscInt& s: Sizes) max_size = (max_size < s) ? s : max_size;
2174  ierr = PetscCalloc1(max_size,&idx); CHKERRQ(ierr);
2175  for(PetscInt iblk=0; iblk<NumSectors; ++iblk)
2176  {
2177  /* Manually take care of the possiblity that a blk was not included in the sectors used.
2178  Although this is unlikely to happen when imposing symmetry and the target magnetization is zero */
2179  if(BT.rdmd_list.find(iblk) == BT.rdmd_list.end()) continue;
2180  PetscInt size = Sizes[iblk];
2181  PetscInt shift = Offsets[iblk];
2182  for(PetscInt is=0; is<size; ++is)
2183  {
2184  idx[is] = is + shift;
2185  }
2186  ierr = MatDenseGetArray(BT.rdmd_list.at(iblk), &v); CHKERRQ(ierr);
2187  for(PetscInt is=0; is<size; ++is)
2188  {
2189  PetscInt row = is + shift;
2190  ierr = MatSetValues(rho,1,&row,size,idx,v+is*size,INSERT_VALUES); CHKERRQ(ierr);
2191  }
2192  ierr = MatDenseRestoreArray(BT.rdmd_list.at(iblk), &v); CHKERRQ(ierr);
2193  }
2194  ierr = PetscFree(idx); CHKERRQ(ierr);
2195  }
2196  ierr = MatAssemblyBegin(rho, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
2197  ierr = MatAssemblyEnd(rho, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
2198 
2199  std::map< std::string, Mat > OpMats;
2200  std::vector< Mat > AllOpProds; /* Store only the created operator products to avoid duplication */
2201  std::vector< Mat > OpProds; /* Points to a matrix in AllOpProds or OpMats */
2202 
2203  ierr = CalculateOperatorProducts(sub_comm,CorrSys,BlockSys,
2204  KronBlocks.LeftBlockRefMod(),OpMats,AllOpProds,OpProds); CHKERRQ(ierr);
2205 
2206  /* Allocate space to store the local trace of each correlator */
2207  PetscScalar *tr_rho;
2208  ierr = PetscCalloc1(CorrSys.size(),&tr_rho); CHKERRQ(ierr);
2209 
2210  /* Obtain the trace of the matrix products with the reduced density matrix */
2211  PetscInt ncols_rho, ncols_corr, rstart, rend;
2212  const PetscScalar *vals_rho, *vals_corr;
2213  const PetscInt *cols_corr;
2214  ierr = MatGetOwnershipRange(rho,&rstart,&rend); CHKERRQ(ierr);
2215  for(PetscInt irow=rstart; irow<rend; ++irow)
2216  {
2217  ierr = MatGetRow(rho,irow,&ncols_rho,NULL,&vals_rho); CHKERRQ(ierr);
2218  for(size_t icorr=0; icorr<CorrSys.size(); ++icorr)
2219  {
2220  ierr = MatGetRow(OpProds.at(icorr),irow,&ncols_corr,&cols_corr,&vals_corr); CHKERRQ(ierr);
2221  PetscScalar y = 0;
2222  for(PetscInt icol=0; icol<ncols_corr; ++icol)
2223  {
2224  y += vals_corr[icol]*vals_rho[cols_corr[icol]];
2225  }
2226  tr_rho[icorr] += y;
2227  ierr = MatRestoreRow(OpProds.at(icorr),irow,&ncols_corr,&cols_corr,&vals_corr); CHKERRQ(ierr);
2228  }
2229  ierr = MatRestoreRow(rho,irow,&ncols_rho,NULL,&vals_rho); CHKERRQ(ierr);
2230  }
2231 
2232  /* Do an MPI_Allreduce to sum all values */
2233  ierr = MPI_Allreduce(MPI_IN_PLACE, tr_rho, PetscMPIInt(CorrSys.size()), MPIU_SCALAR, MPI_SUM, sub_comm); CHKERRQ(ierr);
2234 
2235  for(size_t icorr=0; icorr<CorrSys.size(); ++icorr)
2236  {
2237  CorrValues[CorrSys[icorr].idx] = tr_rho[icorr];
2238  }
2239 
2240  ierr = PetscFree(tr_rho); CHKERRQ(ierr);
2241  for(Mat& op_prod: AllOpProds){
2242  ierr = MatDestroy(&op_prod); CHKERRQ(ierr);
2243  }
2244  for(auto& op_mat: OpMats){
2245  ierr = MatDestroy(&(op_mat.second)); CHKERRQ(ierr);
2246  }
2247 
2248  ierr = MatDestroy(&rho); CHKERRQ(ierr);
2249  }
2250  }
2251  #endif
2252  /* TODO: Also calculate CorrSys Quantities using the CorrSysEnv routine */
2253 
2254  /*---- For correlators in the system and environment block ----*/
2255  if(CorrSysEnv.size() > 0)
2256  {
2257  if(debug && !mpi_rank) std::cout << "\nCorrSysEnv" << std::endl;
2258  if(debug && !mpi_rank) for(const Correlator& m: CorrSysEnv) m.PrintInfo();
2259 
2260  /* Prepare products of the system and environment block operators */
2261  std::map< std::string, Mat > OpMats;
2262  std::vector< Mat > AllOpProds; /* Store only the created operator products to avoid duplication */
2263  std::vector< Mat > OpProdsSys, OpProdsEnv; /* Points to a matrix in AllOpProds or OpMats */
2264 
2265  ierr = KronBlocks.LeftBlockRefMod().EnsureSaved(); CHKERRQ(ierr);
2266  ierr = KronBlocks.RightBlockRefMod().EnsureSaved(); CHKERRQ(ierr);
2267 
2268  ierr = CalculateOperatorProducts(MPI_COMM_NULL, CorrSysEnv, BlockSys,
2269  KronBlocks.LeftBlockRefMod(), OpMats, AllOpProds, OpProdsSys); CHKERRQ(ierr);
2270 
2271  ierr = CalculateOperatorProducts(MPI_COMM_NULL, CorrSysEnv, BlockEnv,
2272  KronBlocks.RightBlockRefMod(), OpMats, AllOpProds, OpProdsEnv); CHKERRQ(ierr);
2273 
2274  for(size_t icorr=0; icorr < CorrSysEnv.size(); ++icorr)
2275  {
2276  /* FIXME: Assume Sz-type inputs.
2277  TODO: implement a guesser based on the constituent operator types in the correlator */
2278 
2279  /* Prepare the kron shell matrix */
2280  Mat KronOp = NULL;
2281  Vec Op_Vec;
2282  PetscScalar Vec_Op_Vec;
2283  int OpTypeSys = OpSz;
2284  int OpTypeEnv = OpSz;
2285  for(const Op& op : CorrSysEnv[icorr].SysOps) OpTypeSys += int(op.OpType);
2286  for(const Op& op : CorrSysEnv[icorr].EnvOps) OpTypeEnv += int(op.OpType);
2287  ierr = KronBlocks.KronConstruct(OpProdsSys.at(icorr), Op_t(OpTypeSys),
2288  OpProdsEnv.at(icorr), Op_t(OpTypeEnv), KronOp); CHKERRQ(ierr);
2289  ierr = MatCreateVecs(KronOp, NULL, &Op_Vec); CHKERRQ(ierr);
2290  ierr = MatMult(KronOp, gsv_r, Op_Vec); CHKERRQ(ierr);
2291  ierr = VecDot(Op_Vec, gsv_r, &Vec_Op_Vec); CHKERRQ(ierr);
2292  ierr = MatDestroy_KronSumShell(&KronOp); CHKERRQ(ierr);
2293  ierr = MatDestroy(&KronOp); CHKERRQ(ierr);
2294  ierr = VecDestroy(&Op_Vec); CHKERRQ(ierr);
2295 
2296  CorrValues[CorrSysEnv[icorr].idx] = Vec_Op_Vec;
2297  }
2298 
2299  for(Mat& op_prod: AllOpProds){
2300  ierr = MatDestroy(&op_prod); CHKERRQ(ierr);
2301  }
2302  for(auto& op_mat: OpMats){
2303  ierr = MatDestroy(&(op_mat.second)); CHKERRQ(ierr);
2304  }
2305  }
2306 
2307  if(debug && !mpi_rank){
2308  std::cout << "\nValues" << std::endl;
2309  for(size_t icorr=0; icorr<measurements.size(); ++icorr){
2310  measurements[icorr].PrintInfo();
2311  std::cout << " = " << CorrValues[icorr] << std::endl << std::endl;
2312  }
2313  }
2314 
2315  /* Print results to file */
2316  if(!mpi_rank)
2317  {
2318  if(!corr_printed_first)
2319  {
2320  corr_printed_first = PETSC_TRUE;
2321  }
2322  else
2323  {
2324  fprintf(fp_corr, ",\n");
2325  }
2326  fprintf(fp_corr, " [");
2327  for(size_t icorr=0; icorr<measurements.size(); ++icorr){
2328  if(icorr) fprintf(fp_corr, ",");
2329  fprintf(fp_corr, " %g", CorrValues[icorr]);
2330  }
2331  fprintf(fp_corr, " ]");
2332  fflush(fp_corr);
2333  }
2334 
2335  if(debug && !mpi_rank) std::cout << "\n\n" << std::endl;
2336  return(0);
2337  }
2338 
2341  const MPI_Comm& comm_in,
2342  const std::vector< Correlator >& Corr,
2343  const Block_t& BlockType,
2344  Block& BlockRef,
2346  std::map< std::string, Mat >& OpMats,
2347  std::vector< Mat >& AllOpProds,
2348  std::vector< Mat >& OpProds
2349  )
2350  {
2351  PetscErrorCode ierr;
2352  MPI_Comm comm = (comm_in==MPI_COMM_NULL) ? mpi_comm : comm_in;
2353 
2354  for(size_t icorr=0; icorr<Corr.size(); ++icorr)
2355  {
2356  const Correlator& c = Corr.at(icorr);
2357  const std::vector< Op >& OpsList = (BlockType == BlockSys)?c.SysOps:c.EnvOps;
2358  for(const Op& o : OpsList)
2359  {
2360  std::string key = OpIdxToStr(o.OpType,o.idx);
2361  if(OpMats.find(key)==OpMats.end())
2362  {
2363  OpMats[key] = NULL;
2364  ierr = BlockRef.RetrieveOperator(
2365  OpToStr(o.OpType), o.idx, OpMats[key], comm); CHKERRQ(ierr);
2366  }
2367  }
2368  if(OpsList.empty())
2369  {
2370  std::string key = "eye";
2371  if(OpMats.find(key)==OpMats.end())
2372  {
2373  Mat eye = NULL;
2374  PetscInt nstates = BlockRef.NumStates();
2375  ierr = MatEyeCreate(comm, eye, nstates); CHKERRQ(ierr);
2376  OpMats[key] = eye;
2377  }
2378  }
2379  }
2380 
2381  /* Prepare the operator products for each correlator */
2382  OpProds.resize(Corr.size());
2383  for(size_t icorr=0; icorr<Corr.size(); ++icorr)
2384  {
2385  const Correlator& c = Corr.at(icorr);
2386  const std::vector< Op >& OpsList = (BlockType == BlockSys)?c.SysOps:c.EnvOps;
2387  if(OpsList.size()==1)
2388  {
2389  /* Point to a matrix in OpMats */
2390  OpProds.at(icorr) = GetOpMats(OpMats,OpsList,0);
2391  }
2392  else if(OpsList.size()>1)
2393  {
2394  /* Generate the operator products */
2395  Mat Prod0=NULL, Prod1=NULL;
2396  /* Perform the first multiplication */
2397  ierr = MatMatMult(GetOpMats(OpMats,OpsList,0),GetOpMats(OpMats,OpsList,1),
2398  MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Prod1); CHKERRQ(ierr);
2399 
2400  /* Iterate over a swap and multiply*/
2401  PetscInt nmults = OpsList.size()-1;
2402  for(PetscInt imult=1; imult<nmults; ++imult)
2403  {
2404  ierr = MatDestroy(&Prod0); CHKERRQ(ierr);
2405  Prod0 = Prod1;
2406  Prod1 = NULL;
2407  ierr = MatMatMult(Prod0,GetOpMats(OpMats,OpsList,imult+1),MAT_INITIAL_MATRIX,PETSC_DEFAULT,&Prod1); CHKERRQ(ierr);
2408  }
2409  ierr = MatDestroy(&Prod0); CHKERRQ(ierr);
2410  AllOpProds.push_back(Prod1);
2411  OpProds.at(icorr) = Prod1;
2412  }
2413  else if(OpsList.empty())
2414  {
2415  /* Populate with an identity instead of throwing an error */
2416  OpProds.at(icorr) = OpMats["eye"];
2417  }
2418  else
2419  {
2420  SETERRQ1(mpi_comm,1,"%s should be non-empty.",(BlockType == BlockSys)?"SysOps":"EnvOps");
2421  }
2422  }
2423 
2424  return(0);
2425  }
2426 
2428  PetscErrorCode SysBlocksActive(const std::set< PetscInt >& SysIdx)
2429  {
2430  PetscErrorCode ierr;
2431  PetscInt sys_idx = 0;
2432  std::set< PetscInt >::iterator act_it;
2433  for(act_it = SysIdx.begin(); act_it != SysIdx.end(); ++act_it){
2434  for(PetscInt idx = sys_idx; idx < *act_it; ++idx){
2435  ierr = sys_blocks[idx].EnsureSaved(); CHKERRQ(ierr);
2436  }
2437  ierr = sys_blocks[*act_it].EnsureRetrieved(); CHKERRQ(ierr);
2438  sys_idx = *act_it+1;
2439  }
2440  for(PetscInt idx = sys_idx; idx < sys_ninit; ++idx){
2441  ierr = sys_blocks[idx].EnsureSaved(); CHKERRQ(ierr);
2442  }
2443  return(0);
2444  }
2445 
2456  std::string BlockDir(
2457  const std::string& BlockType,
2458  const PetscInt& iblock
2459  )
2460  {
2461  std::ostringstream oss;
2462  oss << BlockType << "_" << std::setfill('0') << std::setw(9) << iblock << "/";
2463  return oss.str();
2464  }
2474  std::string SweepDir(
2475  const PetscInt& isweep
2476  )
2477  {
2478  std::ostringstream oss;
2479  oss << "Sweep_" << std::setfill('0') << std::setw(9) << isweep << "/";
2480  return oss.str();
2481  }
2482 
2483  /* Save headers for tabular step */
2484  PetscErrorCode SaveStepHeaders()
2485  {
2486  if(mpi_rank || !data_tabular) return(0);
2487  fprintf(fp_step,"{\n");
2488  fprintf(fp_step," \"headers\" : [");
2489  fprintf(fp_step, "\"GlobIdx\", "); /* 01 */
2490  fprintf(fp_step, "\"LoopType\", "); /* 02 */
2491  fprintf(fp_step, "\"LoopIdx\", "); /* 03 */
2492  fprintf(fp_step, "\"StepIdx\", "); /* 04 */
2493  fprintf(fp_step, "\"NSites_Sys\", "); /* 05 */
2494  fprintf(fp_step, "\"NSites_Env\", "); /* 06 */
2495  fprintf(fp_step, "\"NSites_SysEnl\", "); /* 07 */
2496  fprintf(fp_step, "\"NSites_EnvEnl\", "); /* 08 */
2497  fprintf(fp_step, "\"NStates_Sys\", "); /* 09 */
2498  fprintf(fp_step, "\"NStates_Env\", "); /* 10 */
2499  fprintf(fp_step, "\"NStates_SysEnl\", "); /* 11 */
2500  fprintf(fp_step, "\"NStates_EnvEnl\", "); /* 12 */
2501  fprintf(fp_step, "\"NStates_SysRot\", "); /* 13 */
2502  fprintf(fp_step, "\"NStates_EnvRot\", "); /* 14 */
2503  fprintf(fp_step, "\"NumStates_H\", "); /* 15 */
2504  fprintf(fp_step, "\"TruncErr_Sys\", "); /* 16 */
2505  fprintf(fp_step, "\"TruncErr_Env\", "); /* 17 */
2506  fprintf(fp_step, "\"GSEnergy\""); /* 18 */
2507  fprintf(fp_step," ],\n");
2508  fprintf(fp_step," \"table\" : ");
2509  fflush(fp_step);
2510  return(0);
2511  }
2512 
2514  PetscErrorCode SaveStepData(
2515  const StepData& data
2516  )
2517  {
2518  if(mpi_rank) return(0);
2519  fprintf(fp_step,"%s", GlobIdx ? ",\n" : "");
2520  if(data_tabular){
2521  fprintf(fp_step," [ ");
2522  fprintf(fp_step,"%lld, ", LLD(GlobIdx)); /* 01 */
2523  fprintf(fp_step,"%s, ", LoopType ? "\"Sweep\"" : "\"Warmup\""); /* 02 */
2524  fprintf(fp_step,"%lld, ", LLD(LoopIdx)); /* 03 */
2525  fprintf(fp_step,"%lld, ", LLD(StepIdx)); /* 04 */
2526  fprintf(fp_step,"%lld, ", LLD(data.NumSites_Sys)); /* 05 */
2527  fprintf(fp_step,"%lld, ", LLD(data.NumSites_Env)); /* 06 */
2528  fprintf(fp_step,"%lld, ", LLD(data.NumSites_SysEnl)); /* 07 */
2529  fprintf(fp_step,"%lld, ", LLD(data.NumSites_EnvEnl)); /* 08 */
2530  fprintf(fp_step,"%lld, ", LLD(data.NumStates_Sys)); /* 09 */
2531  fprintf(fp_step,"%lld, ", LLD(data.NumStates_Env)); /* 10 */
2532  fprintf(fp_step,"%lld, ", LLD(data.NumStates_SysEnl)); /* 11 */
2533  fprintf(fp_step,"%lld, ", LLD(data.NumStates_EnvEnl)); /* 12 */
2534  fprintf(fp_step,"%lld, ", LLD(data.NumStates_SysRot)); /* 13 */
2535  fprintf(fp_step,"%lld, ", LLD(data.NumStates_EnvRot)); /* 14 */
2536  fprintf(fp_step,"%lld, ", LLD(data.NumStates_H)); /* 15 */
2537  fprintf(fp_step,"%.12g, ", data.TruncErr_Sys); /* 16 */
2538  fprintf(fp_step,"%.12g, ", data.TruncErr_Env); /* 17 */
2539  fprintf(fp_step,"%.12g", data.GSEnergy); /* 18 */
2540  fprintf(fp_step,"]");
2541  fflush(fp_step);
2542  return(0);
2543  }
2544  fprintf(fp_step," {\n");
2545  fprintf(fp_step," \"GlobIdx\": %lld,\n", LLD(GlobIdx));
2546  fprintf(fp_step," \"LoopType\": \"%s\",\n", LoopType ? "Sweep" : "Warmup");
2547  fprintf(fp_step," \"LoopIdx\": %lld,\n", LLD(LoopIdx));
2548  fprintf(fp_step," \"StepIdx\": %lld,\n", LLD(StepIdx));
2549  fprintf(fp_step," \"NSites_Sys\": %lld,\n", LLD(data.NumSites_Sys));
2550  fprintf(fp_step," \"NSites_Env\": %lld,\n", LLD(data.NumSites_Env));
2551  fprintf(fp_step," \"NSites_SysEnl\": %lld,\n", LLD(data.NumSites_SysEnl));
2552  fprintf(fp_step," \"NSites_EnvEnl\": %lld,\n", LLD(data.NumSites_EnvEnl));
2553  fprintf(fp_step," \"NStates_Sys\": %lld,\n", LLD(data.NumStates_Sys));
2554  fprintf(fp_step," \"NStates_Env\": %lld,\n", LLD(data.NumStates_Env));
2555  fprintf(fp_step," \"NStates_SysEnl\": %lld,\n", LLD(data.NumStates_SysEnl));
2556  fprintf(fp_step," \"NStates_EnvEnl\": %lld,\n", LLD(data.NumStates_EnvEnl));
2557  fprintf(fp_step," \"NStates_SysRot\": %lld,\n", LLD(data.NumStates_SysRot));
2558  fprintf(fp_step," \"NStates_EnvRot\": %lld,\n", LLD(data.NumStates_EnvRot));
2559  fprintf(fp_step," \"TruncErr_Sys\": %.20g,\n", data.TruncErr_Sys);
2560  fprintf(fp_step," \"TruncErr_Env\": %.20g,\n", data.TruncErr_Env);
2561  fprintf(fp_step," \"GSEnergy\": %.20g\n", data.GSEnergy);
2562  fprintf(fp_step," }");
2563  fflush(fp_step);
2564  return(0);
2565  }
2566 
2567  /* Save headers for tabular step */
2568  PetscErrorCode SaveTimingsHeaders()
2569  {
2570  if(mpi_rank || !data_tabular) return(0);
2571  fprintf(fp_timings,"{\n");
2572  fprintf(fp_timings," \"headers\" : [");
2573  fprintf(fp_timings,"\"GlobIdx\", ");
2574  fprintf(fp_timings,"\"Total\", ");
2575  fprintf(fp_timings,"\"Enlr\", ");
2576  fprintf(fp_timings,"\"Kron\", ");
2577  fprintf(fp_timings,"\"Diag\", ");
2578  fprintf(fp_timings,"\"Rdms\", ");
2579  fprintf(fp_timings,"\"Rotb\" ");
2580  fprintf(fp_timings,"],\n");
2581  fprintf(fp_timings," \"table\" : ");
2582  fflush(fp_timings);
2583  return(0);
2584  }
2585 
2587  PetscErrorCode SaveTimingsData(
2588  const TimingsData& data
2589  )
2590  {
2591  if(mpi_rank) return(0);
2592  fprintf(fp_timings,"%s", GlobIdx ? ",\n" : "");
2593  if(data_tabular){
2594  fprintf(fp_timings," [ ");
2595  fprintf(fp_timings,"%lld, ", LLD(GlobIdx));
2596  fprintf(fp_timings,"%.9g, ", data.Total);
2597  fprintf(fp_timings,"%.9g, ", data.tEnlr);
2598  fprintf(fp_timings,"%.9g, ", data.tKron);
2599  fprintf(fp_timings,"%.9g, ", data.tDiag);
2600  fprintf(fp_timings,"%.9g, ", data.tRdms);
2601  fprintf(fp_timings,"%.9g ", data.tRotb);
2602  fprintf(fp_timings,"]");
2603  fflush(fp_timings);
2604  return(0);
2605  }
2606  fprintf(fp_timings," {\n");
2607  fprintf(fp_timings," \"Total\": %.9g,\n", data.Total);
2608  fprintf(fp_timings," \"Enlr\": %.9g,\n", data.tEnlr);
2609  fprintf(fp_timings," \"Kron\": %.9g,\n", data.tKron);
2610  fprintf(fp_timings," \"Diag\": %.9g,\n", data.tDiag);
2611  fprintf(fp_timings," \"Rdms\": %.9g,\n", data.tRdms);
2612  fprintf(fp_timings," \"Rotb\": %.9g\n", data.tRotb);
2613  fprintf(fp_timings," }");
2614  fflush(fp_timings);
2615  return(0);
2616  }
2617 
2619  PetscErrorCode SaveEntanglementSpectra(
2620  const std::vector< Eigen_t >& eigen_L,
2621  const std::vector< PetscReal >& qn_L,
2622  const std::vector< Eigen_t >& eigen_R,
2623  const std::vector< PetscReal >& qn_R
2624  )
2625  {
2626  if(mpi_rank) return(0);
2627  fprintf(fp_entanglement, "%s", GlobIdx ? ",\n" : "");
2628  fprintf(fp_entanglement, " {\n");
2629  fprintf(fp_entanglement, " \"GlobIdx\": %lld,\n", LLD(GlobIdx));
2630  fprintf(fp_entanglement, " \"Sys\": [\n");
2631  {
2632  PetscInt idx_prev = 999999999;
2633  for(const Eigen_t &eig: eigen_L){
2634  if(idx_prev!=eig.blkIdx){
2635  if(idx_prev != 999999999) fprintf(fp_entanglement," ]},\n");
2636  fprintf(fp_entanglement," {");
2637  fprintf(fp_entanglement,"\"sector\": %g, \"vals\": [ %g",qn_L[eig.blkIdx], eig.eigval);
2638  } else {
2639  fprintf(fp_entanglement,", %g", eig.eigval);
2640  }
2641  idx_prev = eig.blkIdx;
2642  }
2643  fprintf(fp_entanglement," ]}\n");
2644  }
2645  fprintf(fp_entanglement," ],\n");
2646  fprintf(fp_entanglement," \"Env\": [\n");
2647  {
2648  PetscInt idx_prev = 999999999;
2649  for(const Eigen_t &eig: eigen_R){
2650  if(idx_prev!=eig.blkIdx){
2651  if(idx_prev != 999999999) fprintf(fp_entanglement," ]},\n");
2652  fprintf(fp_entanglement," {");
2653  fprintf(fp_entanglement,"\"sector\": %g, \"vals\": [ %g",qn_R[eig.blkIdx], eig.eigval);
2654  } else {
2655  fprintf(fp_entanglement,", %g", eig.eigval);
2656  }
2657  idx_prev = eig.blkIdx;
2658  }
2659  fprintf(fp_entanglement," ]}\n");
2660  }
2661  fprintf(fp_entanglement," ]\n");
2662  fprintf(fp_entanglement," }");
2663  fflush(fp_entanglement);
2664  return(0);
2665  }
2666 
2668  PetscErrorCode SaveLoopsData()
2669  {
2670  if(mpi_rank) return(0);
2671  fprintf(fp_data,",\n");
2672  fprintf(fp_data," \"Warmup\": {\n");
2673  fprintf(fp_data," \"MStates\": %lld\n", LLD(mwarmup));
2674  fprintf(fp_data," },\n");
2675  fprintf(fp_data," \"Sweeps\": {\n");
2676  fprintf(fp_data," \"MStates\": [");
2677 
2678  PetscInt nsweeps = sweeps_mstates.size();
2679  if(nsweeps>0) fprintf(fp_data," %lld", LLD(sweeps_mstates[0]));
2680  for(PetscInt i=1;i<nsweeps;++i) fprintf(fp_data,", %lld", LLD(sweeps_mstates[i]));
2681 
2682  fprintf(fp_data," ]\n");
2683  fprintf(fp_data," }");
2684  fflush(fp_data);
2685  return(0);
2686  }
2687 
2689  PetscErrorCode SaveAsOptions(const std::string& filename)
2690  {
2691  if(mpi_rank) return(0);
2692  PetscErrorCode ierr;
2693  char val[4096];
2694  PetscBool set;
2695  std::vector< std::string > keys = {
2696  // "-no_symm",
2697  // "-do_shell",
2698  // "-dry_run",
2699  // "-do_save_prealloc",
2700  "-spin",
2701  "-mstates",
2702  "-mwarmup",
2703  "-nsweeps",
2704  "-msweeps",
2705  "-maxnsweeps"
2706  };
2707 
2708  std::ostringstream oss;
2709  for(std::string& key: keys) {
2710  ierr = PetscOptionsGetString(NULL,NULL,key.c_str(),val,4096,&set); CHKERRQ(ierr);
2711  if(set) {
2712  std::string val_str(val);
2713  if(val_str.empty()) val_str = "yes";
2714  oss << key << " " << val_str << std::endl;
2715  }
2716  }
2717 
2718  FILE *fp;
2719  ierr = PetscFOpen(PETSC_COMM_SELF,filename.c_str(),"w", &fp); CHKERRQ(ierr);
2720  ierr = PetscFPrintf(PETSC_COMM_SELF,fp,"%s",oss.str().c_str()); CHKERRQ(ierr);
2721  ierr = PetscFClose(PETSC_COMM_SELF,fp); CHKERRQ(ierr);
2722  return(0);
2723  }
2724 
2727  PetscErrorCode SaveSweepsData()
2728  {
2729  /* do stuff only on root process */
2730  if(mpi_rank) return(0);
2731  PetscErrorCode ierr;
2732 
2733  /* Save Hamiltonian.dat */
2734  std::string ham_data_fn = scratch_dir + SweepDir(LoopIdx) + "Hamiltonian.dat";
2735  ierr = Ham.SaveAsOptions(ham_data_fn); CHKERRQ(ierr);
2736 
2737  /* Save DMRGInfo.dat */
2738  std::string dmrg_data_fn = scratch_dir + SweepDir(LoopIdx) + "PetscOptions.dat";
2739  ierr = SaveAsOptions(dmrg_data_fn); CHKERRQ(ierr);
2740 
2741  /* Save Sweep.dat */
2742  std::string sweep_data_fn = scratch_dir + SweepDir(LoopIdx) + "Sweep.dat";
2743  std::ofstream sweep_data_file(sweep_data_fn);
2744 
2745  #define SWEEP_DUMP(VAR) \
2746  sweep_data_file << std::setw(20) << (#VAR) << " " << (VAR) << "\n";
2747 
2748  /* >********************< 20 char */
2749  SWEEP_DUMP(GlobIdx ); /* 1 */
2750  SWEEP_DUMP(LoopIdx ); /* 2 */
2751  SWEEP_DUMP(num_sys_blocks ); /* 3 */
2752  SWEEP_DUMP(num_env_blocks ); /* 4 */
2753  SWEEP_DUMP(sys_ninit ); /* 5 */
2754  SWEEP_DUMP(env_ninit ); /* 6 */
2755  /* To be read-in and compared with the generated value */
2756  SWEEP_DUMP(num_sites ); /* 7 */
2757  /* To be read-in only with -restart_options enabled: */
2758  SWEEP_DUMP(sweep_mode ); /* 8 */
2759  SWEEP_DUMP(msweep_idx ); /* 9 */
2760 
2761  #undef SWEEP_DUMP
2762  sweep_data_file.close();
2763  return(0);
2764  }
2765 };
2766 
2767 #undef OpIdxToStr
2768 #undef GetOpMats
2769 #undef ASSIGN_VAR
2770 #undef PRINT_VAR
2771 #undef MAX_SWEEP_IDX
2772 
2777 #endif // __DMRG_BLOCK_CONTAINER_HPP__
PetscInt seqIdx
Index of the EPS and matrix objects in the vector sequence.
PetscErrorCode SaveSweepsData()
Save data at the end of a completed sweep into the sweep directory in the scratch.
PetscMPIInt mpi_rank
MPI rank in mpi_comm.
PetscErrorCode SaveEntanglementSpectra(const std::vector< Eigen_t > &eigen_L, const std::vector< PetscReal > &qn_L, const std::vector< Eigen_t > &eigen_R, const std::vector< PetscReal > &qn_R)
Save the entanglement spectra to file.
PetscErrorCode CalculateCorrelations_BlockDiag(KronBlocks_t &KronBlocks, const Vec &gsv_r, const BasisTransformation &BT, const PetscBool flg=PETSC_TRUE)
Calculates the correlation functions.
Storage for basic data to be saved corresponding to a call of SingleDMRGStep()
PetscErrorCode SysBlocksActive(const std::set< PetscInt > &SysIdx)
Ensure that required blocks are loaded while unrequired blocks are saved.
Contains information on quantum numbers and associated index ranges.
std::string restart_dir
Path to sweep directory for performing restart.
PetscErrorCode SaveTimingsData(const TimingsData &data)
Save timings data to file.
Block::SpinBase & RightBlockRefMod()
Returns a non-const reference to the right block object.
Definition: DMRGKron.hpp:269
std::vector< Op > SysOps
List of operators residing on the system block.
PetscInt NumSites_Env
Number of sites in the input env block.
PETSC_EXTERN PetscErrorCode Makedir(const std::string &dir_name)
Creates a directory named dir_name.
Definition: MiscTools.cpp:306
PetscErrorCode Warmup()
Performs the warmup stage of DMRG or loads blocks for the restart stage.
Step_t
Lists the types of steps that can be performed.
PetscLogDouble Total
Total time.
PetscErrorCode Initialize()
Initializes the container object with blocks of one site on each of the system and environment...
std::vector< Block > env_blocks
Environment blocks to be used only during warmup.
std::vector< PetscReal > trunc_err
Stores the truncation error at the end of a single dmrg step.
std::vector< Correlator > measurements
Stores the measurements to be performed at the end of each sweep.
bool less_blkIdx(const Eigen_t &e1, const Eigen_t &e2)
Comparison operator for sorting Eigen_t objects by increasing blkIdx (decreasing qn&#39;s) ...
PetscErrorCode SingleDMRGStep(Block &SysBlock, Block &EnvBlock, const PetscInt &MStates, Block &SysBlockOut, Block &EnvBlockOut, PetscBool do_measurements=PETSC_FALSE)
Performs a single DMRG iteration taking in a system and environment block, adding one site to each an...
Block::SpinBase & LeftBlockRefMod()
Returns a non-const reference to the left block object.
Definition: DMRGKron.hpp:266
PetscErrorCode SetUpCorrelation(const std::vector< Op > &OpList, const std::string &name, const std::string &desc)
Sets up measurement of correlation functions at the end of each sweep.
std::vector< Block > sys_blocks
Array of system blocks each of which will be kept all throughout the simulation.
PetscInt NumSites_SysEnl
Number of sites in the enlarged sys block.
const Block & EnvBlock(const PetscInt &BlockIdx) const
Accesses the specified environment block.
PetscInt size() const
Returns the total number blocks.
Definition: DMRGKron.hpp:216
PetscErrorCode CalculateOperatorProducts(const MPI_Comm &comm_in, const std::vector< Correlator > &Corr, const Block_t &BlockType, Block &BlockRef, std::map< std::string, Mat > &OpMats, std::vector< Mat > &AllOpProds, std::vector< Mat > &OpProds)
Calculates the products of operators that belong to a single block represented by the same basis...
const std::vector< PetscReal > & ListRef() const
Returns the list of quantum numbers as a const reference.
std::string BlockDir(const std::string &BlockType, const PetscInt &iblock)
Returns the relative path to the directory for the storage of a specific system block.
PetscReal TruncErr
total weights of discarded states for the block
#define GetOpMats(OPMATS, CORRSIDEOPS, OPID)
Get the operator matrix corresponding to the given type, size and index.
std::string SweepDir(const PetscInt &isweep)
Returns the relative path to the directory for the storage of a specific sweep / loop index...
#define MAX_SWEEP_IDX
Hardcoded maximum number of sweeps to be searched.
PetscInt RightIdx(const PetscInt &idx) const
Returns the left quantum number index (KronBlocks 1st endtry) for a given KronBlock index...
Definition: DMRGKron.hpp:250
#define ASSIGN_VAR(MAP, VAR)
Assign a value into key-variable and throw a proper PETSc error when lookup fails.
Perform sweeps with a varying number of kept states.
Block SingleSite
Single site that is added to each block during the block enlargement procedure.
PetscBool Verbose() const
Returns whether verbose printing is active.
PetscInt NumStates_Sys
Number of states in the input sys block.
PetscErrorCode EnsureSaved()
Ensures that the block matrices have been saved if the block is initialized, otherwise does nothing...
Definition: DMRGBlock.cpp:1090
PetscInt idx
Site index.
PetscLogDouble tEnlr
Enlargement of each block.
PetscErrorCode KronEye_Explicit(Block::SpinBase &LeftBlock, Block::SpinBase &RightBlock, const std::vector< Hamiltonians::Term > &Terms, Block::SpinBase &BlockOut)
Calculates a new block combining two spin-1/2 blocks.
Definition: DMRGKron.cpp:459
#define PrintBlocks(LEFT, RIGHT)
Printout the number of sites in each of the input blocks.
PetscInt NumSites_EnvEnl
Number of sites in the enlarged env block.
PetscErrorCode SetOptionsFromFile(MPI_Comm &mpi_comm, const std::string &filename)
Reads a file containing keys and values and sets them as command line arguments.
Definition: MiscTools.cpp:329
Op_t
Identifies the three possible spin operators and also represents the shift associated to its action o...
Definition: DMRGBlock.hpp:21
PetscErrorCode SaveAsOptions(const std::string &filename)
Dumps significant keys and values to file to be retrieved for restart.
PetscReal TruncErr_Sys
Truncation error for the system block.
PetscErrorCode PrintInfo() const
Print out information regarding this operator.
#define PRINT_VAR(VAR)
Print the value of a given variable name.
std::string desc1
Descriptor 1: Using 2d indices.
PetscInt NumStates_Env
Number of states in the input env block.
std::string desc3
Descriptor 3: Using 1d indices separated into sys and env blocks.
PetscErrorCode KronConstruct(const Mat &Mat_L, const Op_t &OpType_L, const Mat &Mat_R, const Op_t &OpType_R, Mat &MatOut)
Constructs the explicit sum of Kronecker products of two matrices provided that they follow a fixed K...
Definition: DMRGKron.cpp:635
PetscErrorCode Destroy()
Destroys the container object.
std::vector< PetscReal > List() const
Returns the list of quantum numbers.
Storage for information on resulting eigenpairs of the reduced density matrices.
#define PrintLines()
Print out horizontal lines of - characters.
PetscInt NumStates_SysEnl
Number of states in the enlarged sys block.
QuantumNumbers QN
quantum numbers context for the block
#define PRINTLINES()
Print out horizontal lines of = characters.
Contains the definitions for blocks of spin sites.
Definition: DMRGBlock.hpp:76
PetscErrorCode GetTruncation(const KronBlocks_t &KronBlocks, const Vec &gsv_r, const PetscInt &MStates, BasisTransformation &BT_L, BasisTransformation &BT_R)
Obtain the rotation matrix for the truncation step from the ground state vector.
A single interaction term on a one-dimensional lattice.
PetscInt NumStates_H
Number of states used in constructing the superblock Hamiltonian.
PetscInt NumSites_Sys
Number of sites in the input sys block.
std::vector< PetscInt > maxnsweeps
Maximum number of sweeps for each state in msweeps (for SWEEP_MODE_TOLERANCE_TEST only) ...
PetscErrorCode PrintInfo() const
Prints some information regarding the correlator.
PetscInt num_sites
Total number of sites.
Op_t OpType
Operator type.
Perform N sweeps with a fixed number of kept states.
PetscReal QN(const PetscInt &idx) const
Returns the left quantum number index (KronBlocks 1st endtry) for a given KronBlock index...
Definition: DMRGKron.hpp:240
std::vector< PetscInt > Sizes() const
Returns the number of basis states in each quantum number block.
PetscMPIInt mpi_size
MPI size of mpi_comm.
Describes an n-point correlator whose expectation value will be measured at the end of each sweep...
std::vector< PetscInt > Offsets() const
Returns the offsets for each quantum number block.
PetscScalar eigval
Eigenvalue.
const Block::SpinBase & LeftBlockRef() const
Returns a const reference to the left block object.
Definition: DMRGKron.hpp:260
PetscInt NumSites() const
Returns that number of sites recorded in the Hamiltonian object.
PetscLogDouble tKron
Construction of the Hamiltonian using Kronecker product routines.
std::vector< PetscInt > sweeps_mstates
Records the number of states requested for each sweep, where each entry is a single call to Sweep() ...
PetscErrorCode SingleSweep(const PetscInt &MStates, const PetscInt &MinBlock=PETSC_DEFAULT)
Performs a single sweep from center to right, and back to center.
PetscInt NumStates_EnvRot
Number of states in the rotated env block.
std::map< PetscInt, Mat > rdmd_list
diagonal blocks of the reduced density matrix
PetscErrorCode KronSumConstruct(const std::vector< Hamiltonians::Term > &Terms, Mat &MatOut)
Constructs the explicit sum of Kronecker products of matrices from the blocks.
Definition: DMRGKron.cpp:759
PetscInt blkIdx
Index in the block&#39;s magnetization sectors.
const std::vector< KronBlock_t > & data() const
Returns a const reference to the KronBlocks object.
Definition: DMRGKron.hpp:219
~BasisTransformation()
Destructor that safely deallocates the PETSc matrix objects.
#define OpToStr(OP)
Convert the input Op_t to a C++ string.
Definition: DMRGBlock.hpp:41
PetscErrorCode Sweeps()
Performs the sweep stage of DMRG.
PetscErrorCode KronSumSetShellMatrix(const PetscBool &do_shell_in)
Decide whether to create an implicit MATSHELL matrix.
Definition: DMRGKron.hpp:318
std::string name
Short name of the correlator.
PetscInt NumSectors() const
Returns the number of quantum number sectors.
std::vector< PetscInt > Offsets() const
Returns the offsets for each quantum number block.
Definition: DMRGKron.hpp:231
std::tuple< PetscReal, PetscInt, PetscInt, PetscInt > KronBlock_t
Storage for information on resulting blocks of quantum numbers stored as a tuple for quick sorting...
Definition: DMRGKron.hpp:22
PetscReal TruncErr_Env
Truncation error for the environment block.
Context struct for results of the basis transformation that may be useful in solving the correlators...
std::map< std::string, PetscInt > restart_sweep_dict
Contents of the Sweep.dat file which will be read when restarting.
PetscInt NumStates_EnvEnl
Number of states in the enlarged env block.
PetscLogDouble tRotb
Rotation of the block operators.
operator
Definition: DMRGBlock.hpp:24
std::string desc2
Descriptor 2: Using 1d indices.
const Block::SpinBase & RightBlockRef() const
Returns a const reference to the right block object.
Definition: DMRGKron.hpp:263
#define OpIdxToStr(OPTYPE, IDX)
Convert the input operator type and index to string.
PetscInt LeftIdx(const PetscInt &idx) const
Returns the left quantum number index (KronBlocks 1st endtry) for a given KronBlock index...
Definition: DMRGKron.hpp:245
Block_t
Provides an alias of Side_t to follow the Sys-Env convention.
#define CheckInitialization(init, mpi_comm)
Returns an error if DMRGBlockContainer object is not initialized.
const Hamiltonian & HamiltonianRef() const
Const reference to the Hamiltonian object for extracting its parameters.
Contains and manipulates the system and environment blocks used in a single DMRG run.
const Block & SysBlock(const PetscInt &BlockIdx) const
Accesses the specified system block.
A container of ordered KronBlock_t objects representing a Kronecker product structure.
Definition: DMRGKron.hpp:117
QuantumNumbers Magnetization
Stores the information on the magnetization Sectors.
Definition: DMRGBlock.hpp:326
PetscInt epsIdx
Index in the EPS object.
PetscLogDouble tRdms
Construction of the reduced density matrices from the ground state vector.
PetscInt NumStates_SysRot
Number of states in the rotated sys block.
PetscErrorCode EigRDM_BlockDiag(const Mat &matin, const PetscInt &seqIdx, const PetscInt &blkIdx, std::vector< Eigen_t > &eigList, EPS &eps)
Obtain the eigenspectrum of a diagonal block of the reduced density matrix through an interface to a ...
PetscErrorCode FillRotation_BlockDiag(const std::vector< Eigen_t > &eigen_list, const std::vector< EPS > &eps_list, const std::vector< Vec > &rdmd_vecs, const Block &BlockRef, Mat &RotMatT)
Fills the rotation matrix assumming that the reduced density matrix has a block diagonal structure...
PetscInt num_sys_blocks
Number of system blocks to be stored.
std::vector< Op > EnvOps
List of operators residing on the environment block.
~DMRGBlockContainer()
Calls the Destroy() method and deallocates container object.
PetscScalar GSEnergy
Ground state energy.
PetscErrorCode SaveLoopsData()
Save data regarding warmup/sweeps into the data directory.
bool greater_eigval(const Eigen_t &e1, const Eigen_t &e2)
Comparison operator for sorting Eigen_t objects by decreasing eigenvalues.
Mat RotMatT
rotation matrix for the block
PetscLogDouble tDiag
Diagonalization of the Hamiltonian using EPS routines.
SweepMode_t
Determines the type of sweep to be performed.
PetscInt NumStates() const
Returns the total number of states.
Definition: DMRGKron.hpp:297
Defines a single operator to be used for performing measurements.
std::vector< PetscInt > msweeps
Number of states for each sweep (for SWEEP_MODE_MSWEEPS and SWEEP_MODE_TOLERANCE_TEST only) ...
const Block & EnvBlock() const
Accesses the 0th environment block.
Hamiltonian Ham
Container for the Hamiltonian and geometry.
PetscInt idx
Index of the correlator in the sequence.
Storage for timings to be saved corresponding to a call of SingleDMRGStep()
DMRGBlockContainer(const MPI_Comm &mpi_comm)
The constructor only takes in the MPI communicator; Initialize() has to be called explicitly next...
PetscErrorCode SaveStepData(const StepData &data)
Save step data to file.
PetscInt NumStates() const
Returns the total number of states.
This site was generated by Sphinx using Doxygen with a customized theme from doxygen-bootstrapped.