DMRGBlock.cpp
1 #include "DMRGBlock.hpp"
2 #include <numeric> // partial_sum
3 #include <iostream>
4 #include <sstream>
5 #include <fstream>
6 #include <iomanip>
7 
8 #include <../src/mat/impls/aij/seq/aij.h> /* Mat_SeqAIJ */
9 #include <../src/mat/impls/aij/mpi/mpiaij.h> /* Mat_MPIAIJ */
10 
11 /* External functions taken from MiscTools.cpp */
12 PETSC_EXTERN int64_t ipow(int64_t base, uint8_t exp);
13 PETSC_EXTERN PetscErrorCode InitSingleSiteOperator(const MPI_Comm& comm, const PetscInt dim, Mat* mat);
14 PETSC_EXTERN PetscErrorCode MatEnsureAssembled(const Mat& matin);
15 PETSC_EXTERN PetscErrorCode MatEnsureAssembled_MultipleMats(const std::vector<Mat>& matrices);
16 PETSC_EXTERN PetscErrorCode MatEyeCreate(const MPI_Comm& comm, const PetscInt& dim, Mat& eye);
17 PETSC_EXTERN PetscErrorCode Makedir(const std::string& dir_name);
18 
20 #define CheckInit(func) if (PetscUnlikely(!init))\
21  SETERRQ1(mpi_comm, PETSC_ERR_ARG_CORRUPT, "%s was called but block was not yet initialized.",func);
22 
23 #define CheckInitOnce(func) if (PetscUnlikely(!init_once))\
24  SETERRQ1(mpi_comm, PETSC_ERR_ARG_CORRUPT, "%s was called but Initialize was never called before.",func);
25 
27 #define CheckIndex(row, col, cstart, cend) if((col) < (cstart) || (col) >= (cend))\
28  SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "On row %d, index %d out of bounds [%d,%d) ",\
29  (row), (col), (cstart), (cend));
30 
32  const MPI_Comm& comm_in)
33 {
34  PetscErrorCode ierr;
35  if(mpi_init) SETERRQ(mpi_comm,1,"This initializer should only be called once.");
36  mpi_comm = comm_in;
37  ierr = MPI_Comm_rank(mpi_comm, &mpi_rank); CHKERRQ(ierr);
38  ierr = MPI_Comm_size(mpi_comm, &mpi_size); CHKERRQ(ierr);
39  mpi_init = PETSC_TRUE;
40  return(0);
41 }
42 
43 
45  const MPI_Comm& comm_in,
46  const PetscInt& num_sites_in,
47  const PetscInt& num_states_in,
48  const PetscBool& init_ops
49  )
50 {
51  PetscErrorCode ierr = 0;
52 
53  /* Check whether to do verbose logging */
54  ierr = PetscOptionsGetBool(NULL,NULL,"-verbose",&verbose,NULL); CHKERRQ(ierr);
55  ierr = PetscOptionsGetBool(NULL,NULL,"-log_io",&log_io,NULL); CHKERRQ(ierr);
56  /* Initialize attributes */
57  if(!mpi_init){
58  ierr = Initialize(comm_in); CHKERRQ(ierr);
59  } else if (comm_in!=mpi_comm) {
60  SETERRQ(PETSC_COMM_SELF,1,"Mismatch in MPI communicators.");
61  }
62  /* Determine the spin-type from command line and set the attributes */
63  {
64  char spin[10];
65  PetscBool set;
66  ierr = PetscOptionsGetString(NULL,NULL,"-spin",spin,10,&set); CHKERRQ(ierr);
67  if(set)
68  {
69  spin_type_str = std::string(spin);
70  auto it = SpinTypes.find(spin_type_str);
71  if(it != SpinTypes.end())
72  {
73  spin_type = it->second;
74  }
75  else
76  {
77  SETERRQ1(mpi_comm,1,"Given -spin %s not valid/implemented.", spin);
78  }
79  switch(spin_type)
80  {
81  case SpinOneHalf :
82  _loc_dim = 2;
83  _loc_qn_list = {+0.5, -0.5};
84  _loc_qn_size = {1, 1};
85  break;
86 
87  case SpinOne :
88  _loc_dim = 3;
89  _loc_qn_list = {+1.0, 0.0, -1.0};
90  _loc_qn_size = {1, 1, 1};
91  break;
92 
93  default:
94  SETERRQ(mpi_comm,1,"Given spin_type not valid/implemented.");
95  }
96  }
97  }
98 
99  /* Initial number of sites and number of states */
100  num_sites = num_sites_in;
101  if(num_states_in == PETSC_DEFAULT){
102  num_states = ipow(loc_dim(), num_sites);
103  } else{
104  num_states = num_states_in;
105  }
108  /* Initialize array of operator matrices */
109  SzData.resize(num_sites);
110  SpData.resize(num_sites);
111  SmData.resize(num_sites);
112 
113  /* Initialize switch */
114  init = PETSC_TRUE;
115  init_once = PETSC_TRUE;
116 
117  if ((!init_ops) && (num_sites > 0))
118  {
119  /* Do nothing */
120  }
123  else if (init_ops && (num_sites == 1))
124  {
125  /* Create the spin operators for the single site */
126  ierr = MatSpinSzCreate(SzData[0]); CHKERRQ(ierr);
127  ierr = MatSpinSpCreate(SpData[0]); CHKERRQ(ierr);
128 
129  /* Also initialize the single-site Hamiltonian which is defaulted to zero */
130  ierr = InitSingleSiteOperator(mpi_comm, num_states, &H); CHKERRQ(ierr);
131  ierr = MatEnsureAssembled(H); CHKERRQ(ierr);
132  /* Initialize the magnetization sectors using the defaults for one site */
133  ierr = Magnetization.Initialize(mpi_comm, loc_qn_list(), loc_qn_size()); CHKERRQ(ierr);
134 
135  /* Check whether sector initialization was done right */
136  ierr = CheckSectors(); CHKERRQ(ierr);
137  }
140  else if(init_ops && (num_sites > 1))
141  {
142  for(PetscInt isite = 0; isite < num_sites; ++isite)
143  {
144  ierr = InitSingleSiteOperator(mpi_comm, num_states, &SzData[isite]); CHKERRQ(ierr);
145  ierr = InitSingleSiteOperator(mpi_comm, num_states, &SpData[isite]); CHKERRQ(ierr);
146  }
147  }
148  else
149  SETERRQ1(mpi_comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid input num_sites_in > 0. Given %d.", num_sites_in);
150 
151  /* Initialize options for operator rotation */
152  ierr = PetscOptionsGetInt(NULL,NULL,"-rot_nsubcomm",&nsubcomm,NULL); CHKERRQ(ierr);
153  if(nsubcomm < 1 || nsubcomm > mpi_size)
154  SETERRQ1(mpi_comm, 1, "-rot_nsubcomm must be in the range [1, mpi_rank]. Got %D.", nsubcomm);
155 
156  /* Require that the size of comm_world be a multiple of the number of subcommunicators */
157  if(mpi_size % nsubcomm)
158  SETERRQ2(mpi_comm, 1, "The size of mpi_comm (%d) must be a multiple of nsubcomm (%D).", mpi_size, nsubcomm);
159 
160  /* Determine the color of each lattice site. The Hamiltonian is processed at isite=num_sites */
161  site_color.resize((PetscMPIInt)(num_sites+1));
162  for(PetscMPIInt isite = 0; isite < num_sites+1; ++isite){
163  site_color[isite] = isite % nsubcomm;
164  }
165 
166  #if defined(PETSC_USE_COMPLEX)
167  if(rot_method == matptap) SETERRQ(mpi_comm,1,"The method matptap cannot be used for complex scalars.");
168  #endif
169 
170  return ierr;
171 }
172 
173 
175  const MPI_Comm& comm_in,
176  const PetscInt& num_sites_in,
177  const std::vector<PetscReal>& qn_list_in,
178  const std::vector<PetscInt>& qn_size_in,
179  const PetscBool& init_ops
180  )
181 {
182  PetscErrorCode ierr = 0;
183 
184  /* This is most likely useless */
185  // if(PetscUnlikely(num_sites_in == 1))
186  // SETERRQ(mpi_comm, PETSC_ERR_ARG_OUTOFRANGE,
187  // "Invalid input num_sites_in cannot be equal to 1. Call a different Initialize() function.");
188 
189  QuantumNumbers Magnetization_temp;
190  ierr = Magnetization_temp.Initialize(comm_in, qn_list_in, qn_size_in); CHKERRQ(ierr);
191  PetscInt num_states_in = Magnetization_temp.NumStates();
192 
193  ierr = Initialize(comm_in, num_sites_in, num_states_in, init_ops); CHKERRQ(ierr);
194  Magnetization = Magnetization_temp;
195 
196  return ierr;
197 }
198 
199 
201  const PetscInt& num_sites_in,
202  const QuantumNumbers& qn_in)
203 {
204  PetscErrorCode ierr = 0;
205 
206  ierr = qn_in.CheckInitialized(); CHKERRQ(ierr);
207  ierr = Initialize(qn_in.MPIComm(), num_sites_in, qn_in.NumStates()); CHKERRQ(ierr);
208  Magnetization = qn_in;
209 
210  return ierr;
211 }
212 
213 
215  const MPI_Comm& comm_in,
216  const std::string& block_path_in)
217 {
218  PetscErrorCode ierr;
219  PetscInt num_sites_in, num_states_in, num_sectors_in;
220  std::vector<PetscReal> qn_list_in;
221  std::vector<PetscInt> qn_size_in;
222  int spin_type_in;
223 
224  PetscMPIInt mpi_rank_in;
225  ierr = MPI_Comm_rank(comm_in,&mpi_rank_in); CHKERRQ(ierr);
226 
227  std::string block_path = block_path_in;
228  if(block_path.empty()) block_path = '/';
229  else if(block_path.back()!='/') block_path += '/';
230 
231  if(!mpi_rank_in)
232  {
233  /* Read data for Block */
234  std::string block_file = block_path + "BlockInfo.dat";
235 
236  PetscBool flg;
237  ierr = PetscTestFile(block_file.c_str(), 'r', &flg); CHKERRQ(ierr);
238  if(!flg) SETERRQ1(PETSC_COMM_SELF,1,"Error in reading %s", block_file.c_str());
239 
240  std::ifstream infofile(block_file.c_str());
241  std::map< std::string, PetscInt > infomap;
242  std::string line;
243  if (!infofile.is_open() || infofile.fail())
244  perror("error while opening file");
245  while (infofile && std::getline(infofile, line)) {
246  std::string key;
247  PetscInt val;
248  std::istringstream iss(line);
249  iss >> key >> val;
250  infomap[key] = val;
251  }
252  if (infofile.bad())
253  perror("error while reading file");
254 
255  infofile.close();
256 
257  /* Verify compatibility */
258  {
259  if(infomap.at("NumBytesPetscInt") != sizeof(PetscInt))
260  SETERRQ2(PETSC_COMM_SELF,1,"Incompatible NumBytesPetscInt. Expected %lld. Got %lld.",
261  PetscInt(sizeof(PetscInt)), infomap.at("NumBytesPetscInt"));
262 
263  if(infomap.at("NumBytesPetscScalar") != sizeof(PetscScalar))
264  SETERRQ2(PETSC_COMM_SELF,1,"Incompatible NumBytesPetscScalar. Expected %lld. Got %lld.",
265  PetscInt(sizeof(PetscScalar)), infomap.at("NumBytesPetscScalar"));
266 
267  #if defined(PETSC_USE_COMPLEX)
268  PetscInt PetscUseComplex = 1;
269  #else
270  PetscInt PetscUseComplex = 0;
271  #endif
272 
273  if(infomap.at("PetscUseComplex") != PetscUseComplex)
274  SETERRQ2(PETSC_COMM_SELF,1,"Incompatible PetscUseComplex. Expected %lld. Got %lld.",
275  PetscInt(PetscUseComplex), infomap.at("PetscUseComplex"));
276  }
277 
278  /* Get the spin type indicated in the file */
279  {
280  auto it = infomap.find(std::string("SpinTypeKey"));
281  if(it == infomap.end()) SETERRQ(PETSC_COMM_SELF,1,"SpinTypeKey not found.");
282  spin_type_in = int(it->second);
283  }
284 
285  /* Assign variables */
286  num_sites_in = infomap.at("NumSites");
287  num_states_in = infomap.at("NumStates");
288  num_sectors_in = infomap.at("NumSectors");
289  }
290 
291  ierr = MPI_Bcast(&num_sites_in, 1, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
292  ierr = MPI_Bcast(&num_states_in, 1, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
293  ierr = MPI_Bcast(&num_sectors_in, 1, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
294  ierr = MPI_Bcast(&spin_type_in, 1, MPI_INT, 0, comm_in); CHKERRQ(ierr);
295 
296  /* Get the spin type from command line */
297  Spin_t spin_type_opt;
298  char spin[10];
299  PetscBool set;
300  ierr = PetscOptionsGetString(NULL,NULL,"-spin",spin,10,&set); CHKERRQ(ierr);
301  if(set){
302  auto it = SpinTypes.find(std::string(spin));
303  if(it == SpinTypes.end()) SETERRQ1(PETSC_COMM_SELF,1,"Given -spin %s not valid/implemented.", spin);
304  spin_type_opt = it->second;
305  if(spin_type_in != int(spin_type_opt))
306  SETERRQ2(comm_in,1,"Given spin types from file (%d) "
307  "and command line (%d) do not match", spin_type_in, spin_type_opt);
308  } else {
309  /* Impose the given spin_type_in as a command line argument */
310  auto it = SpinTypesString.find(Spin_t(spin_type_in));
311  if(it!=SpinTypesString.end()){
312  ierr = PetscOptionsSetValue(NULL,"-spin",(it->second).c_str()); CHKERRQ(ierr);
313  } else {
314  SETERRQ1(PETSC_COMM_SELF,1,"Input SpinTypeKey %d not valid/implemented.", spin_type_in);
315  }
316  }
317 
318  qn_list_in.resize(num_sectors_in);
319  qn_size_in.resize(num_sectors_in);
320 
321  if(num_sectors_in > 0)
322  {
323  if(!mpi_rank_in)
324  {
325  /* Read data for quantum numbers */
326  std::string qn_file = block_path + "QuantumNumbers.dat";
327 
328  PetscBool flg;
329  ierr = PetscTestFile(qn_file.c_str(), 'r', &flg); CHKERRQ(ierr);
330  if(!flg) SETERRQ1(PETSC_COMM_SELF,1,"Error in reading %s", qn_file.c_str());
331 
332  std::ifstream qnfile(qn_file.c_str());
333  std::string line;
334  if (!qnfile.is_open())
335  perror("error while opening file");
336  PetscInt ctr = 0;
337  while (qnfile && std::getline(qnfile, line)) {
338  std::istringstream iss(line);
339  iss >> qn_size_in.at(ctr) >> qn_list_in.at(ctr);
340  ++ctr;
341  }
342  if (qnfile.bad())
343  perror("error while reading file");
344  if(ctr!=num_sectors_in)
345  SETERRQ3(PETSC_COMM_SELF,1,"Incorrect number of data points in %s. "
346  "Expected %lld. Got %lld.", qn_file.c_str(), num_sectors_in, ctr);
347 
348  qnfile.close();
349  }
350 
351  ierr = MPI_Bcast(qn_size_in.data(), num_sectors_in, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
352  ierr = MPI_Bcast(qn_list_in.data(), num_sectors_in, MPIU_REAL, 0, comm_in); CHKERRQ(ierr);
353  }
354  else
355  {
356  SETERRQ(comm_in,1,"NumSectors cannot be zero.");
357  }
358 
359  /* Initialize block object but do not initialize operators */
360  ierr = Initialize(comm_in, num_sites_in, qn_list_in, qn_size_in, PETSC_FALSE); CHKERRQ(ierr);
361 
362  /* Read-in the operators */
363  std::string save_dir_temp = save_dir;
364  save_dir = block_path;
365  ierr = Retrieve_NoChecks(); CHKERRQ(ierr);
366  save_dir = save_dir_temp;
367 
368  /* Check operator validity */
369  ierr = CheckOperatorBlocks(); CHKERRQ(ierr);
370 
371  return(0);
372 }
373 
374 
375 PetscErrorCode Block::SpinBase::CheckOperatorArray(const Op_t& OpType) const
376 {
377  PetscErrorCode ierr = 0;
378 
379  PetscInt label = 0;
380  const Mat *Op;
381  switch(OpType) {
382  case OpSm: Op = SmData.data(); break;
383  case OpSz: Op = SzData.data(); break;
384  case OpSp: Op = SpData.data(); break;
385  default: SETERRQ(mpi_comm, PETSC_ERR_ARG_WRONG, "Incorrect operator type.");
387  }
388 
389  /* Check the size of each matrix and make sure that it
390  matches the number of basis states */
391  PetscInt M, N;
392  for(PetscInt isite = 0; isite < num_sites; ++isite)
393  {
394  if(!Op[isite])
396  SETERRQ2(mpi_comm, PETSC_ERR_ARG_CORRUPT, "%s[%d] matrix not yet created.", label, isite);
397 
398  ierr = MatGetSize(Op[isite], &M, &N); CHKERRQ(ierr);
399  if (M != N)
401  SETERRQ2(mpi_comm, PETSC_ERR_ARG_WRONG, "%s[%d] matrix not square.", label, isite);
402 
403  if (M != num_states)
405  SETERRQ4(mpi_comm, PETSC_ERR_ARG_WRONG, "%s[%d] matrix dimension does not match "
406  "the number of states. Expected %d. Got %d.", label, isite, num_states, M);
407  }
408 
409  return ierr;
410 }
411 
412 
413 PetscErrorCode Block::SpinBase::CheckOperators() const
414 {
415  PetscErrorCode ierr = 0;
416  CheckInit(__FUNCTION__);
418  ierr = CheckOperatorArray(OpSz); CHKERRQ(ierr);
419  ierr = CheckOperatorArray(OpSp); CHKERRQ(ierr);
420 
421  if (init_Sm){
422  ierr = CheckOperatorArray(OpSm); CHKERRQ(ierr);
423  }
424 
425  return ierr;
426 }
427 
428 
429 PetscErrorCode Block::SpinBase::CheckSectors() const
430 {
431  PetscErrorCode ierr = 0;
432  CheckInitOnce(__FUNCTION__);
434  /* Check whether the Magnetization object has been initialized correctly */
435  ierr = Magnetization.CheckInitialized(); CHKERRQ(ierr);
436 
437  /* The last element of qn_offset must match the total number of states */
438  PetscInt magNumStates = Magnetization.NumStates();
439 
440  if(num_states != magNumStates)
443  SETERRQ2(mpi_comm, PETSC_ERR_ARG_WRONG, "The number of states in the Magnetization object "
444  "and the internal value do not match. " "Expected %d. Got %d.", num_states, magNumStates);
445 
446  return ierr;
447 }
448 
449 
451  const Op_t& OpType,
452  const PetscInt& isite,
453  std::vector<PetscInt>& nnzs
454  ) const
455 {
456  PetscErrorCode ierr = 0;
457 
458  /* TODO: Generalize this piece of redundant code */
459 
460  /* Decipher inputs */
461  Mat matin;
462  if(isite >= num_sites)
463  SETERRQ2(mpi_comm, PETSC_ERR_ARG_OUTOFRANGE, "Input isite (%d) out of bounds [0,%d).", isite, num_sites);
464  switch(OpType) {
465  case OpSm: matin = SmData[isite]; break;
466  case OpSz: matin = SzData[isite]; break;
467  case OpSp: matin = SpData[isite]; break;
468  default: SETERRQ(mpi_comm, PETSC_ERR_ARG_WRONG, "Incorrect operator type.");
470  }
471 
472  ierr = MatGetNNZs(matin, nnzs); CHKERRQ(ierr);
473  return(0);
474 }
475 
476 
478  const Mat& matin,
479  std::vector<PetscInt>& nnzs
480  ) const
481 {
482  nnzs.clear();
483  PetscInt rstart, rend;
484  PetscErrorCode ierr = MatGetOwnershipRange(matin, &rstart, &rend); CHKERRQ(ierr);
485  PetscInt lrows = rend - rstart;
486  nnzs.resize(lrows);
487  PetscInt ncols;
488  for(PetscInt irow=rstart; irow<rend; ++irow)
489  {
490  ierr = MatGetRow(matin, irow, &ncols, NULL, NULL); CHKERRQ(ierr);
491  nnzs[irow-rstart] = ncols;
492  ierr = MatRestoreRow(matin, irow, &ncols, NULL, NULL); CHKERRQ(ierr);
493  }
494  return(0);
495 }
496 
497 
498 PetscErrorCode Block::SpinBase::MatOpCheckOperatorBlocks(const Op_t& OpType, const PetscInt& isite) const
499 {
500  PetscErrorCode ierr = 0;
501 
502  /* Decipher inputs */
503  Mat matin;
504  if(isite >= num_sites)
505  SETERRQ2(mpi_comm, PETSC_ERR_ARG_OUTOFRANGE, "Input isite (%d) out of bounds [0,%d).", isite, num_sites);
506  switch(OpType) {
507  case OpSm: matin = SmData[isite]; break;
508  case OpSz: matin = SzData[isite]; break;
509  case OpSp: matin = SpData[isite]; break;
510  default: SETERRQ(mpi_comm, PETSC_ERR_ARG_WRONG, "Incorrect operator type.");
512  }
513 
514  ierr = MatCheckOperatorBlocks(OpType, matin); CHKERRQ(ierr);
515 
516  return ierr;
517 }
518 
519 
520 PetscErrorCode Block::SpinBase::MatCheckOperatorBlocks(const Op_t& OpType, const Mat& matin) const
521 {
522  PetscErrorCode ierr = 0;
523 
524  /* Ensure that the matrix is assembled */
525  ierr = MatEnsureAssembled(matin); CHKERRQ(ierr);
526 
527  /* Check whether sector initialization was done right */
528  ierr = CheckSectors(); CHKERRQ(ierr);
529 
530  /* Get row and column layout */
531  PetscInt rstart = matin->rmap->rstart;
532  PetscInt lrows = matin->rmap->n;
533  PetscInt cstart = matin->cmap->rstart;
534  PetscInt nrows = matin->rmap->N;
535  PetscInt ncols = matin->cmap->N;
536 
537  /* Check the matrix size */
538  const PetscInt magNumStates = Magnetization.NumStates();
539  if(magNumStates != nrows) SETERRQ2(mpi_comm,1,"Incorrect number of rows. Expected %d. Got %d.", magNumStates, nrows);
540  if(magNumStates != nrows) SETERRQ2(mpi_comm,1,"Incorrect number of cols. Expected %d. Got %d.", magNumStates, ncols);
541 
542  /* Ensure that empty processes do nothing */
543  if(!(0 <= rstart && rstart < nrows)) return ierr;
544 
545  /* Check the matrix type */
546  PetscBool matin_is_mpiaij, matin_is_mpiaijmkl;
547  ierr = PetscObjectTypeCompare((PetscObject)matin, MATMPIAIJ, &matin_is_mpiaij); CHKERRQ(ierr);
548  ierr = PetscObjectTypeCompare((PetscObject)matin, MATMPIAIJMKL, &matin_is_mpiaijmkl); CHKERRQ(ierr);
549 
550  /* Do specific tasks for MATMPIAIJ or MATMPIAIJMKL using the diagonal structure */
551  if(matin_is_mpiaij || matin_is_mpiaijmkl){
552  /* Extract diagonal (A) and off-diagonal (B) sequential matrices */
553  Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data;
554  PetscInt *cmap = mat->garray;
555 
556  PetscInt nzA, nzB, *cA=nullptr, *cB=nullptr;
557 
558  /* Determine the starting block */
559  PetscBool flg;
560  PetscInt col_GlobIdxStart, col_GlobIdxEnd;
561 
562  for(QuantumNumbersIterator Iter(Magnetization, rstart, rstart + lrows); Iter.Loop(); ++Iter)
563  {
564  const PetscInt lrow = Iter.Steps();
565  ierr = Iter.OpBlockToGlobalRange(OpType, col_GlobIdxStart, col_GlobIdxEnd, flg); CHKERRQ(ierr);
566 
567  ierr = (*mat->A->ops->getrow)(mat->A, lrow, &nzA, &cA, nullptr);CHKERRQ(ierr);
568  ierr = (*mat->B->ops->getrow)(mat->B, lrow, &nzB, &cB, nullptr);CHKERRQ(ierr);
569 
570  if(!flg && nzA!=0 && nzB!=0)
573  SETERRQ1(PETSC_COMM_SELF, 1, "Row %d should have no entries.", lrow+rstart);
574 
575  /* Check first and last element assuming entries are sorted */
576  if(nzA){
577  CheckIndex(lrow+rstart, cA[0] + cstart, col_GlobIdxStart, col_GlobIdxEnd);
578  CheckIndex(lrow+rstart, cA[nzA-1] + cstart, col_GlobIdxStart, col_GlobIdxEnd);
579  }
580 
581  if(nzB){
582  CheckIndex(lrow+rstart, cmap[cB[0]], col_GlobIdxStart, col_GlobIdxEnd);
583  CheckIndex(lrow+rstart, cmap[cB[nzB-1]], col_GlobIdxStart, col_GlobIdxEnd);
584  }
585  }
586  }
587  else
588  {
589  MatType type;
590  ierr = MatGetType(matin, &type);
591 
593  SETERRQ1(mpi_comm, PETSC_ERR_SUP, "Implemented only for MATMPIAIJ. Got %s.", type);
594  }
595 
596  ierr = PetscInfo(0, "Operator matrix check satisfied.\n"); CHKERRQ(ierr);
597  return ierr;
598 }
599 
600 
602 {
603  PetscErrorCode ierr = 0;
604  CheckInit(__FUNCTION__);
606  /* Check all operator matrices */
607  ierr = CheckOperators(); CHKERRQ(ierr);
608 
609  /* Check operator blocks of Sz matrices */
610  for(PetscInt isite = 0; isite < num_sites; ++isite){
611  ierr = MatOpCheckOperatorBlocks(OpSz, isite); CHKERRQ(ierr);
612  }
613 
614  /* Check operator blocks of Sp matrices */
615  for(PetscInt isite = 0; isite < num_sites; ++isite){
616  ierr = MatOpCheckOperatorBlocks(OpSp, isite); CHKERRQ(ierr);
617  }
618 
619  return ierr;
620 }
621 
622 
624 {
625  PetscErrorCode ierr = 0;
626 
627  if(init_Sm) SETERRQ(mpi_comm, 1, "Sm was previously initialized. Call DestroySm() first.");
628 
629  ierr = CheckOperatorArray(OpSp); CHKERRQ(ierr);
630  for(PetscInt isite = 0; isite < num_sites; ++isite){
631  ierr = MatHermitianTranspose(SpData[isite], MAT_INITIAL_MATRIX, &SmData[isite]); CHKERRQ(ierr);
632  }
633  init_Sm = PETSC_TRUE;
634 
635  return ierr;
636 }
637 
638 
640 {
641  PetscErrorCode ierr = 0;
642  if(!init_Sm && !init) return(0);
643  if(!init_Sm) SETERRQ1(mpi_comm, 1, "%s was called but Sm was not yet initialized. ",__FUNCTION__);
644 
645  for(PetscInt isite = 0; isite < num_sites; ++isite){
646  ierr = MatDestroy(&SmData[isite]); CHKERRQ(ierr);
647  SmData[isite] = NULL;
648  }
649  init_Sm = PETSC_FALSE;
650 
651  return ierr;
652 }
653 
654 
655 PetscErrorCode Block::SpinBase::Destroy()
656 {
657  PetscErrorCode ierr = 0;
658  if (PetscUnlikely(!init)) return 0;
659 
660  /* Destroy operator matrices */
661  for(PetscInt isite = 0; isite < num_sites; ++isite){
662  ierr = MatDestroy(&SzData[isite]); CHKERRQ(ierr);
663  ierr = MatDestroy(&SpData[isite]); CHKERRQ(ierr);
664  SzData[isite] = NULL;
665  SpData[isite] = NULL;
666  }
667  ierr = MatDestroy(&H); CHKERRQ(ierr);
668  H = NULL;
669  if (init_Sm){
670  ierr = DestroySm(); CHKERRQ(ierr);
671  }
672  init = PETSC_FALSE;
673  return ierr;
674 }
675 
676 /* Note: May modify save state of Source */
677 PetscErrorCode Block::SpinBase::RotateOperators(SpinBase& Source, const Mat& RotMatT_in)
678 {
679  PetscErrorCode ierr = 0;
680  Mat RotMatT, RotMat, *SpData_loc, *SzData_loc, H_loc;
681  MPI_Comm subcomm;
682  PetscMPIInt sub_rank, sub_size, sub_color=0;
683 
684  CheckInit(__FUNCTION__);
686  /* Since we do not want to rotate the Sm operators separately */
687  if(init_Sm){ ierr = DestroySm(); CHKERRQ(ierr); }
688 
689  /* Verify the sizes */
690  const PetscInt NumStatesOrig = Source.NumStates();
691  const PetscInt NumSitesOrig = Source.NumSites();
692  PetscInt NRows_RT, NCols_RT;
693  ierr = MatGetSize(RotMatT_in, &NRows_RT, &NCols_RT); CHKERRQ(ierr);
694  if(NCols_RT != NumStatesOrig)
695  SETERRQ2(mpi_comm, 1, "RotMatT_in incorrect number of cols. Expected %d. Got %d.", NCols_RT, NumStatesOrig);
696  if(NRows_RT != num_states)
697  SETERRQ2(mpi_comm, 1, "RotMatT_in incorrect number of rows. Expected %d. Got %d.", NRows_RT, num_states);
698  if(NumSitesOrig != num_sites)
699  SETERRQ2(mpi_comm, 1, "RotMatT_in incorrect number of sites. Expected %d. Got %d.", NumSitesOrig, num_sites);
700 
701  if(nsubcomm > 1)
702  {
703  /* Require that save_initialization must have been called first since this operation requires
704  reading the matrices from disk to the subcommunicators */
705  if(!(init_save || disk_set)) SETERRQ(mpi_comm,1,"InitializeSave() or SetDiskStorage() "
706  "must be called first before using this feature.");
707  ierr = MatCreateRedundantMatrix(RotMatT_in, nsubcomm, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &RotMatT); CHKERRQ(ierr);
708  ierr = PetscObjectGetComm((PetscObject)RotMatT, &subcomm); CHKERRQ(ierr);
709  ierr = MPI_Comm_rank(subcomm, &sub_rank); CHKERRQ(ierr);
710  ierr = MPI_Comm_size(subcomm, &sub_size); CHKERRQ(ierr);
711  sub_color = mpi_rank / sub_size;
712 
713  /* Require source and destination matrices to be saved to disk so that each subcommunicator can retrieve this data */
714  ierr = Source.EnsureSaved(); CHKERRQ(ierr);
715  }
716  else
717  {
718  RotMatT = RotMatT_in;
719  ierr = Source.EnsureRetrieved(); CHKERRQ(ierr);
720  }
721 
722  if(rot_method==mmmmult){
723  ierr = MatHermitianTranspose(RotMatT, MAT_INITIAL_MATRIX, &RotMat); CHKERRQ(ierr);
724  } else {
725  SETERRQ(mpi_comm,1,"Not implemented.");
726  }
727 
728  /* Destroy previously created operators */
729  for(PetscInt isite = 0; isite < num_sites; ++isite)
730  {
731  ierr = MatDestroy(&SpData[isite]); CHKERRQ(ierr);
732  ierr = MatDestroy(&SzData[isite]); CHKERRQ(ierr);
733  }
734  ierr = MatDestroy(&H); CHKERRQ(ierr);
735 
736  /* Load source matrices to an array of Mat's */
737  ierr = PetscCalloc1(num_sites, &SpData_loc); CHKERRQ(ierr);
738  ierr = PetscCalloc1(num_sites, &SzData_loc); CHKERRQ(ierr);
739 
740  if(nsubcomm > 1)
741  {
742  /* Retrieve source matrices manually */
743  for(PetscInt isite = 0; isite < num_sites; ++isite){
744  if(site_color[isite]!=sub_color) continue;
745  ierr = Source.RetrieveOperator("Sp", isite, SpData_loc[isite], subcomm); CHKERRQ(ierr);
746  ierr = Source.RetrieveOperator("Sz", isite, SzData_loc[isite], subcomm); CHKERRQ(ierr);
747  }
748  if(site_color[num_sites]==sub_color){
749  ierr = Source.RetrieveOperator("H", 0, H_loc, subcomm); CHKERRQ(ierr);
750  }
751  }
752  else
753  {
754  /* Copy source matrices (pointers) into local data */
755  for(PetscInt isite = 0; isite < num_sites; ++isite) SpData_loc[isite] = Source.Sp(isite);
756  for(PetscInt isite = 0; isite < num_sites; ++isite) SzData_loc[isite] = Source.Sz(isite);
757  H_loc = Source.H;
758  }
759 
760  /* Perform the rotation on all operators */
761  if(rot_method==mmmmult)
762  {
763  for(PetscInt isite = 0; isite < num_sites; ++isite)
764  {
765  if(site_color[isite]!=sub_color) continue;
766  ierr = MatMatMatMult(RotMatT, SpData_loc[isite], RotMat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &SpData[isite]); CHKERRQ(ierr);
767  ierr = MatMatMatMult(RotMatT, SzData_loc[isite], RotMat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &SzData[isite]); CHKERRQ(ierr);
768  }
769  if(site_color[num_sites]==sub_color)
770  {
771  ierr = MatMatMatMult(RotMatT, H_loc, RotMat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &H); CHKERRQ(ierr);
772  }
773  }
774  else
775  {
776  SETERRQ(mpi_comm,1,"Not implemented.");
777  }
778 
779  if(nsubcomm > 1)
780  {
781  /* Save data back to disk */
782  for(PetscInt isite = 0; isite < num_sites; ++isite)
783  {
784  if(site_color[isite]!=sub_color) continue;
785  ierr = SaveOperator("Sz", isite, SzData[isite], subcomm); CHKERRQ(ierr);
786  ierr = SaveOperator("Sp", isite, SpData[isite], subcomm); CHKERRQ(ierr);
787  }
788  if(site_color[num_sites]==sub_color)
789  {
790  ierr = SaveOperator("H", 0, H, subcomm); CHKERRQ(ierr);
791  ierr = MatDestroy(&H_loc); CHKERRQ(ierr);
792  }
793  /* Ensure that the current block is returned to a saved state */
794  for(PetscInt isite = 0; isite < num_sites; ++isite)
795  {
796  ierr = MatDestroy(&SpData[isite]); CHKERRQ(ierr);
797  ierr = MatDestroy(&SzData[isite]); CHKERRQ(ierr);
798  ierr = MatDestroy(&SpData_loc[isite]); CHKERRQ(ierr);
799  ierr = MatDestroy(&SzData_loc[isite]); CHKERRQ(ierr);
800  }
801  ierr = MatDestroy(&RotMatT); CHKERRQ(ierr);
802  ierr = MatDestroy(&H); CHKERRQ(ierr);
803  ierr = SaveBlockInfo(); CHKERRQ(ierr);
804  ierr = Destroy(); CHKERRQ(ierr);
805  init = PETSC_FALSE;
806  saved = PETSC_TRUE;
807  retrieved = PETSC_FALSE;
808 
811  }
812  else
813  {
814  ierr = CheckOperatorBlocks(); CHKERRQ(ierr);
815  ierr = SaveAndDestroy(); CHKERRQ(ierr);
816  }
817 
818  ierr = PetscFree(SpData_loc); CHKERRQ(ierr);
819  ierr = PetscFree(SzData_loc); CHKERRQ(ierr);
820  ierr = MatDestroy(&RotMat); CHKERRQ(ierr);
821 
822  return(0);
823 }
824 
826 {
827  PetscErrorCode ierr;
828  ierr = MatEnsureAssembled_MultipleMats(SzData); CHKERRQ(ierr);
829  ierr = MatEnsureAssembled_MultipleMats(SpData); CHKERRQ(ierr);
830  if(H){ ierr = MatEnsureAssembled(H); CHKERRQ(ierr);}
831  return(0);
832 }
833 
834 
836  const std::string& save_dir_in
837  )
838 {
839  PetscErrorCode ierr = 0;
840  PetscBool flg = PETSC_FALSE;
841  if(save_dir_in.empty()) SETERRQ(mpi_comm,1,"Save dir cannot be empty.");
842  if(!mpi_init) SETERRQ(mpi_comm,1,"MPI Initialization must be completed first.");
843  if(!mpi_rank){
844  ierr = PetscTestDirectory(save_dir_in.c_str(), 'r', &flg); CHKERRQ(ierr);
845  }
846  ierr = MPI_Bcast(&flg, 1, MPI_INT, 0, mpi_comm); CHKERRQ(ierr);
847  if(!flg) SETERRQ1(mpi_comm,1,"Directory %s does not exist. Please verify that -scratch_dir is specified correctly.",save_dir_in.c_str());
848  save_dir = save_dir_in;
849  /* If the last character is not a slash then add one */
850  if(save_dir.back()!='/') save_dir += '/';
852  init_save = PETSC_TRUE;
853  return(0);
854 }
855 
856 
858  const std::string& read_dir_in,
859  const std::string& write_dir_in
860  )
861 {
862  if(init_save == PETSC_TRUE)
863  SETERRQ(mpi_comm,1,"InitializeSave() and SetDiskStorage() cannot both be used "
864  "on the same block.");
865 
866  read_dir = read_dir_in;
867  if(read_dir.back()!='/') read_dir += '/';
868 
869  write_dir = write_dir_in;
870  if(write_dir.back()!='/') write_dir += '/';
871 
879  save_dir = read_dir;
880 
883  num_reads = 0;
884  disk_set = PETSC_TRUE;
885  return(0);
886 }
887 
888 
889 std::string OpFilename(const std::string& RootDir, const std::string& OpName, const size_t& isite = 0){
890  std::ostringstream oss;
891  oss << RootDir << OpName << "_" << std::setfill('0') << std::setw(9) << isite << ".mat";
892  return oss.str();
893 }
894 
895 
897  const std::string& OpName,
898  const size_t& isite,
899  Mat& Op,
900  const MPI_Comm& comm_in)
901 {
902  PetscErrorCode ierr;
903  PetscViewer binv;
904  if(!Op) SETERRQ(PETSC_COMM_SELF,1,"Input matrix is null.");
905  ierr = PetscViewerBinaryOpen(comm_in, OpFilename(write_dir,OpName,isite).c_str(), FILE_MODE_WRITE, &binv); CHKERRQ(ierr);
906  ierr = MatView(Op, binv); CHKERRQ(ierr);
907  ierr = PetscViewerDestroy(&binv); CHKERRQ(ierr);
908  ierr = MatDestroy(&Op); CHKERRQ(ierr);
909  Op = NULL;
910 
911  if(log_io){
912  PetscMPIInt subcomm_rank;
913  ierr = MPI_Comm_rank(comm_in, &subcomm_rank); CHKERRQ(ierr);
914  if(!subcomm_rank)
915  std::cout
916  << " IO: [" << mpi_rank << "] "
917  << "Saved: " << OpFilename(write_dir,OpName,isite) << std::endl;
918  }
919 
920  return(0);
921 }
922 
923 
925 {
926  CheckInit(__FUNCTION__);
927  if(!init_save && !disk_set)
928  SETERRQ(mpi_comm,1,"InitializeSave() or SetDiskStorage() must be called first.");
929 
930  if(mpi_rank) return(0);
931 
932  /* Save block information */
933  {
934  std::ofstream infofile;
935  infofile.open((write_dir + "BlockInfo.dat").c_str());
936 
937  #if defined(PETSC_USE_COMPLEX)
938  PetscInt PetscUseComplex = 1;
939  #else
940  PetscInt PetscUseComplex = 0;
941  #endif
942 
943  #define SaveInfo(KEY,VALUE) infofile << std::left << std::setfill(' ') \
944  << std::setw(30) << KEY << " " << VALUE << std::endl;
945  SaveInfo("NumBytesPetscInt", sizeof(PetscInt));
946  SaveInfo("NumBytesPetscScalar", sizeof(PetscScalar));
947  SaveInfo("PetscUseComplex", PetscUseComplex);
948  SaveInfo("SpinTypeKey", spin_type);
949  SaveInfo("NumSites", num_sites);
950  SaveInfo("NumStates", num_states);
951  SaveInfo("NumSectors", Magnetization.NumSectors());
952  #undef SaveInfo
953 
954  infofile.close();
955  }
956 
957  /* Save quantum numbers information */
958  {
959  std::ofstream qnfile;
960  qnfile.open((write_dir + "QuantumNumbers.dat").c_str());
961  std::vector<PetscInt> qn_size = Magnetization.Sizes();
962  std::vector<PetscReal> qn_list = Magnetization.List();
963  PetscInt num_sectors = Magnetization.NumSectors();
964  for(PetscInt idx=0; idx<num_sectors; idx++)
965  qnfile << qn_size.at(idx) << " "
966  << qn_list.at(idx) << std::endl;
967  qnfile.close();
968  }
969 
970  return(0);
971 }
972 
973 
975  const std::string& OpName,
976  const size_t& isite,
977  Mat& Op,
978  const MPI_Comm& comm_in)
979 {
980  PetscErrorCode ierr;
981  /* Separately handle the case of Sm operators since they are never saved */
982  if(OpName=="Sm")
983  {
984  Mat OpSp;
985  ierr = RetrieveOperator("Sp",isite,OpSp,comm_in); CHKERRQ(ierr);
986  ierr = MatHermitianTranspose(OpSp,MAT_INITIAL_MATRIX,&Op); CHKERRQ(ierr);
987  ierr = MatDestroy(&OpSp); CHKERRQ(ierr);
988  return(0);
989  }
990  PetscViewer binv;
991  MPI_Comm comm = (comm_in==MPI_COMM_NULL) ? mpi_comm : comm_in;
992  ierr = PetscViewerBinaryOpen(comm, OpFilename(save_dir,OpName,isite).c_str(), FILE_MODE_READ, &binv); CHKERRQ(ierr);
993  ierr = MatCreate(comm, &Op); CHKERRQ(ierr);
994  /* NOTE: Added this fix for the conflicting cases of mpiaij operators and seqaij rotation matrices */
995  if(comm_in==MPI_COMM_NULL){
996  ierr = MatSetFromOptions(Op); CHKERRQ(ierr);
997  }
998  ierr = MatLoad(Op, binv); CHKERRQ(ierr);
999  ierr = PetscViewerDestroy(&binv); CHKERRQ(ierr);
1000 
1001  if(log_io){
1002  PetscMPIInt subcomm_rank;
1003  ierr = MPI_Comm_rank(comm_in, &subcomm_rank); CHKERRQ(ierr);
1004  if(!subcomm_rank)
1005  std::cout
1006  << " IO: [" << mpi_rank << "] "
1007  << "Retrieved: " << OpFilename(save_dir,OpName,isite) << std::endl;
1008  }
1009 
1010  return(0);
1011 }
1012 
1013 
1015 {
1016  CheckInit(__FUNCTION__);
1017  if(!init_save && !disk_set)
1018  SETERRQ(mpi_comm,1,"InitializeSave() or SetDiskStorage() must be called first.");
1019 
1020  PetscErrorCode ierr;
1021  for(PetscInt isite = 0; isite < num_sites; ++isite){
1022  ierr = SaveOperator("Sz",isite,SzData[isite],mpi_comm); CHKERRQ(ierr);
1023  }
1024  for(PetscInt isite = 0; isite < num_sites; ++isite){
1025  ierr = SaveOperator("Sp",isite,SpData[isite],mpi_comm); CHKERRQ(ierr);
1026  }
1027  if(H){
1028  ierr = SaveOperator("H",0,H,mpi_comm); CHKERRQ(ierr);
1029  }
1030  ierr = SaveBlockInfo(); CHKERRQ(ierr);
1031  ierr = Destroy(); CHKERRQ(ierr);
1032  init = PETSC_FALSE;
1033  saved = PETSC_TRUE;
1034  retrieved = PETSC_FALSE;
1035 
1040  if(disk_set)
1041  {
1042  save_dir = write_dir;
1043  }
1044  return(0);
1045 }
1046 
1047 
1049 {
1050  if(!init_save && !disk_set)
1051  SETERRQ(mpi_comm,1,"InitializeSave() or SetDiskStorage() must be called first.");
1052 
1053  if(init) SETERRQ(mpi_comm,1,"Destroy() must be called first.");
1054  PetscErrorCode ierr;
1055  ierr = Retrieve_NoChecks(); CHKERRQ(ierr);
1056  init = PETSC_TRUE;
1057  saved = PETSC_FALSE;
1058  retrieved = PETSC_TRUE;
1059  return(0);
1060 }
1061 
1062 
1064 {
1065  PetscErrorCode ierr;
1066  PetscBool flg = PETSC_FALSE;
1067  for(PetscInt isite = 0; isite < num_sites; ++isite){
1068  ierr = RetrieveOperator("Sz",isite,SzData[isite],mpi_comm); CHKERRQ(ierr);
1069  }
1070  for(PetscInt isite = 0; isite < num_sites; ++isite){
1071  ierr = RetrieveOperator("Sp",isite,SpData[isite],mpi_comm); CHKERRQ(ierr);
1072  }
1073  ierr = PetscTestFile(OpFilename(save_dir,"H",0).c_str(), 'r', &flg); CHKERRQ(ierr);
1074  if(flg){
1075  ierr = RetrieveOperator("H",0,H,mpi_comm); CHKERRQ(ierr);
1076  }
1077 
1081  if(num_reads==0 && disk_set)
1082  {
1083  save_dir = write_dir;
1084  }
1085  ++num_reads;
1086  return(0);
1087 }
1088 
1089 
1091 {
1092  if(!init || !(init_save || disk_set) || saved) return(0);
1093  PetscErrorCode ierr = SaveAndDestroy(); CHKERRQ(ierr);
1094  return(0);
1095 }
1096 
1097 
1099 {
1100  if(init || !(init_save || disk_set) || retrieved) return(0);
1101  PetscErrorCode ierr = Retrieve(); CHKERRQ(ierr);
1102  return(0);
1103 }
1104 
1105 
1107 {
1108  if(!MPIInitialized()) SETERRQ(PETSC_COMM_SELF,1,"Block's MPI communicator not initialized.");
1109  PetscErrorCode ierr;
1110 
1111  ierr = InitSingleSiteOperator(MPIComm(), loc_dim(), &Sz); CHKERRQ(ierr);
1112 
1113  PetscInt locrows, Istart;
1114  ierr = PreSplitOwnership(MPIComm(), loc_dim(), locrows, Istart); CHKERRQ(ierr);
1115  PetscInt Iend = Istart + locrows;
1116 
1117  switch(spin_type)
1118  {
1119  case SpinOneHalf :
1131  if (Istart <= 0 && 0 < Iend){
1132  ierr = MatSetValue(Sz, 0, 0, +0.5, INSERT_VALUES); CHKERRQ(ierr);
1133  }
1134  if (Istart <= 1 && 1 < Iend){
1135  ierr = MatSetValue(Sz, 1, 1, -0.5, INSERT_VALUES); CHKERRQ(ierr);
1136  }
1137  break;
1138 
1139  case SpinOne :
1151  if (Istart <= 0 && 0 < Iend){
1152  ierr = MatSetValue(Sz, 0, 0, +1.0, INSERT_VALUES); CHKERRQ(ierr);
1153  }
1154  if (Istart <= 2 && 2 < Iend){
1155  ierr = MatSetValue(Sz, 2, 2, -1.0, INSERT_VALUES); CHKERRQ(ierr);
1156  }
1157  break;
1158 
1159  default:
1160  SETERRQ(mpi_comm,1,"Given spin_type not valid/implemented.");
1161  }
1162 
1163  ierr = MatAssemblyBegin(Sz, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1164  ierr = MatAssemblyEnd(Sz, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1165  return(0);
1166 }
1167 
1168 
1170 {
1171  if(!MPIInitialized()) SETERRQ(PETSC_COMM_SELF,1,"Block's MPI communicator not initialized.");
1172  PetscErrorCode ierr;
1173 
1174  ierr = InitSingleSiteOperator(MPIComm(), loc_dim(), &Sp); CHKERRQ(ierr);
1175 
1176  PetscInt locrows, Istart;
1177  ierr = PreSplitOwnership(MPIComm(), loc_dim(), locrows, Istart); CHKERRQ(ierr);
1178  PetscInt Iend = Istart + locrows;
1179  const PetscScalar Sqrt2 = PetscSqrtScalar(2.0);
1180  switch(spin_type)
1181  {
1182  case SpinOneHalf :
1193  if (Istart <= 0 && 0 < Iend){
1194  ierr = MatSetValue(Sp, 0, 1, +1.0, INSERT_VALUES); CHKERRQ(ierr);
1195  }
1196  break;
1197 
1198  case SpinOne :
1210  if (Istart <= 0 && 0 < Iend){
1211  ierr = MatSetValue(Sp, 0, 1, Sqrt2, INSERT_VALUES); CHKERRQ(ierr);
1212  }
1213  if (Istart <= 1 && 1 < Iend){
1214  ierr = MatSetValue(Sp, 1, 2, Sqrt2, INSERT_VALUES); CHKERRQ(ierr);
1215  }
1216  break;
1217 
1218  default:
1219  SETERRQ(mpi_comm,1,"Given spin_type not valid/implemented.");
1220  }
1221 
1222  ierr = MatAssemblyBegin(Sp, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1223  ierr = MatAssemblyEnd(Sp, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1224  return(0);
1225 }
PetscErrorCode EnsureRetrieved()
Ensures that the block matrices have been retrieved if the block is initialized, otherwise does nothi...
Definition: DMRGBlock.cpp:1098
std::vector< Mat > SzData
Array of matrices representing operators.
Definition: DMRGBlock.hpp:133
Contains information on quantum numbers and associated index ranges.
PetscErrorCode InitializeSave(const std::string &save_dir_in)
Initializes the writing of the block matrices to file.
Definition: DMRGBlock.cpp:835
PetscBool retrieved
Whether the block matrices have been retrieved.
Definition: DMRGBlock.hpp:151
RotMethod rot_method
Method to use for performing basis transformation.
Definition: DMRGBlock.hpp:191
Spin_t
Identifies the spin types implemented for this block.
Definition: DMRGBlock.hpp:51
PetscBool init_save
Whether saving the block matrices to file has been initialized correctly.
Definition: DMRGBlock.hpp:142
MPI_Comm mpi_comm
MPI Communicator.
Definition: DMRGBlock.hpp:85
PETSC_EXTERN PetscErrorCode Makedir(const std::string &dir_name)
Creates a directory named dir_name.
Definition: MiscTools.cpp:306
virtual std::vector< PetscScalar > loc_qn_list() const
Sz sectors of a single site.
Definition: DMRGBlock.hpp:207
std::vector< Mat > SpData
Array of matrices representing operators.
Definition: DMRGBlock.hpp:136
Base class for the implementation of a block of spin sites.
Definition: DMRGBlock.hpp:79
Spin one.
Definition: DMRGBlock.hpp:54
operator
Definition: DMRGBlock.hpp:25
const std::vector< Mat > & Sp() const
Returns the list of matrix pointer to the operators.
Definition: DMRGBlock.hpp:375
PetscInt num_reads
Number of reads from file made for this block.
Definition: DMRGBlock.hpp:167
std::string read_dir
Root directory to read the matrix blocks from during the first access.
Definition: DMRGBlock.hpp:157
std::string write_dir
Root directory to write the matrix blocks into.
Definition: DMRGBlock.hpp:160
PetscErrorCode AssembleOperators()
Ensures that all operators are assembled.
Definition: DMRGBlock.cpp:825
PetscErrorCode EnsureSaved()
Ensures that the block matrices have been saved if the block is initialized, otherwise does nothing...
Definition: DMRGBlock.cpp:1090
bool Loop() const
Determines whether the end of the range has not yet been reached.
MPI_Comm MPIComm() const
Gets the communicator associated to the block.
PetscBool log_io
Tells whether to printout IO functions.
Definition: DMRGBlock.hpp:121
PetscErrorCode SetDiskStorage(const std::string &read_dir_in, const std::string &write_dir_in)
Tells where to read from and save the operators and data about the block.
Definition: DMRGBlock.cpp:857
Op_t
Identifies the three possible spin operators and also represents the shift associated to its action o...
Definition: DMRGBlock.hpp:21
PetscBool MPIInitialized() const
Returns PETSC_TRUE if the MPI communicator was initialized properly.
Definition: DMRGBlock.hpp:196
PetscErrorCode CheckOperatorBlocks() const
Checks whether all matrix blocks follow the correct sector indices using MatCheckOperatorBlocks() ...
Definition: DMRGBlock.cpp:601
std::vector< Mat > SmData
Array of matrices representing operators.
Definition: DMRGBlock.hpp:139
PetscBool verbose
Tells whether to printout info during certain function calls.
Definition: DMRGBlock.hpp:118
Mat H
Matrix representation of the Hamiltonian operator.
Definition: DMRGBlock.hpp:350
virtual std::vector< PetscInt > loc_qn_size() const
Number of states in each sector in a single site.
Definition: DMRGBlock.hpp:213
std::vector< PetscReal > List() const
Returns the list of quantum numbers.
PetscErrorCode RotateOperators(SpinBase &Source, const Mat &RotMatT)
Rotates all operators from a source block using the given transposed rotation matrix.
Definition: DMRGBlock.cpp:677
PetscBool init_Sm
Tells whether the Sm matrices have been initialized.
Definition: DMRGBlock.hpp:130
PetscErrorCode InitializeFromDisk(const MPI_Comm &comm_in, const std::string &block_path)
Initializes block object from data located in the directory block_path.
Definition: DMRGBlock.cpp:214
PetscErrorCode MatOpGetNNZs(const Op_t &OpType, const PetscInt &isite, std::vector< PetscInt > &nnzs) const
Returns the number of non-zeros in each row for an operator acting on a given site.
Definition: DMRGBlock.cpp:450
PetscErrorCode DestroySm()
Destroys the Sm matrices on the fly.
Definition: DMRGBlock.cpp:639
Spin_t spin_type
Type of spin contained in the block.
Definition: DMRGBlock.hpp:94
virtual PetscErrorCode MatSpinSzCreate(Mat &Sz)
Creates the single-site operator.
Definition: DMRGBlock.cpp:1106
PetscErrorCode CheckOperators() const
Checks whether all operators have been initialized and have correct dimensions.
Definition: DMRGBlock.cpp:413
PetscErrorCode CheckSectors() const
Checks whether sector indexing was done properly.
Definition: DMRGBlock.cpp:429
PetscInt num_sites
Number of sites in the block.
Definition: DMRGBlock.hpp:124
operator
Definition: DMRGBlock.hpp:23
std::vector< PetscInt > Sizes() const
Returns the number of basis states in each quantum number block.
PetscErrorCode SaveOperator(const std::string &OpName, const size_t &isite, Mat &Op, const MPI_Comm &comm_in)
Saves a single operator.
Definition: DMRGBlock.cpp:896
std::string spin_type_str
Type of spin contained in the block expressed as a string.
Definition: DMRGBlock.hpp:97
const std::vector< Mat > & Sz() const
Returns the list of matrix pointer to the operators.
Definition: DMRGBlock.hpp:372
Iterates over a range of basis indices with information on each of its quantum number.
PetscMPIInt mpi_size
MPI size of mpi_comm.
Definition: DMRGBlock.hpp:91
PetscBool init_once
Tells whether the block was initialized at least once before.
Definition: DMRGBlock.hpp:112
std::vector< PetscInt > _loc_qn_size
Stores the number of states in each sector in a single site.
Definition: DMRGBlock.hpp:106
PetscErrorCode Retrieve()
Retrieve all the matrix operators that were written to file by SaveAndDestroy()
Definition: DMRGBlock.cpp:1048
virtual PetscInt loc_dim() const
Local dimension of a single site.
Definition: DMRGBlock.hpp:201
PetscErrorCode RetrieveOperator(const std::string &OpName, const size_t &isite, Mat &Op, const MPI_Comm &comm_in=MPI_COMM_NULL)
Retrieves a single operator.
Definition: DMRGBlock.cpp:974
virtual PetscErrorCode MatSpinSpCreate(Mat &Sp)
Creates the single-site raising operator , from which we can define .
Definition: DMRGBlock.cpp:1169
PetscErrorCode CheckOperatorArray(const Op_t &OpType) const
Determines whether the operator arrays have been successfully filled with matrices.
Definition: DMRGBlock.cpp:375
Mat Sz(const PetscInt &Isite) const
Returns the matrix pointer to the operator at site Isite.
Definition: DMRGBlock.hpp:353
PetscBool mpi_init
Tells whether the block&#39;s MPI attributes were initialized.
Definition: DMRGBlock.hpp:115
PetscMPIInt mpi_rank
MPI rank in mpi_comm.
Definition: DMRGBlock.hpp:88
PetscErrorCode SaveAndDestroy()
Save all the matrix operators to file and destroy the current storage.
Definition: DMRGBlock.cpp:1014
PetscErrorCode SaveBlockInfo()
Save some information about the block that could be used to reconstruct it later. ...
Definition: DMRGBlock.cpp:924
PetscErrorCode CreateSm()
Creates the Sm matrices on the fly.
Definition: DMRGBlock.cpp:623
PetscErrorCode Destroy()
Destroys all operator matrices and frees memory.
Definition: DMRGBlock.cpp:655
PetscInt NumSectors() const
Returns the number of quantum number sectors.
operator
Definition: DMRGBlock.hpp:24
PetscErrorCode MatOpCheckOperatorBlocks(const Op_t &op_t, const PetscInt &isite) const
Checks the block indexing in the matrix operator op_t on site isite.
Definition: DMRGBlock.cpp:498
PetscErrorCode Initialize(const MPI_Comm &comm_in)
Initializes block object&#39;s MPI attributes.
Definition: DMRGBlock.cpp:31
PetscBool disk_set
Whether SetDiskStorage() has properly set the block storage locations.
Definition: DMRGBlock.hpp:145
std::string save_dir
Root directory to read from and write the matrix blocks into.
Definition: DMRGBlock.hpp:154
PetscBool saved
Whether the block matrices have been saved.
Definition: DMRGBlock.hpp:148
QuantumNumbers Magnetization
Stores the information on the magnetization Sectors.
Definition: DMRGBlock.hpp:326
PetscInt _loc_dim
Stores the local dimension of a single site.
Definition: DMRGBlock.hpp:100
PetscErrorCode Initialize(const MPI_Comm &mpi_comm_in, const std::vector< PetscReal > &qn_list_in, const std::vector< PetscInt > &qn_size_in)
Initializes the quantum number object.
PetscInt num_states
Number of basis states in the Hilbert space.
Definition: DMRGBlock.hpp:127
Spin one-half.
Definition: DMRGBlock.hpp:53
PetscErrorCode MatGetNNZs(const Mat &matin, std::vector< PetscInt > &nnzs) const
Returns the number of non-zeros in each row for a given matrix.
Definition: DMRGBlock.cpp:477
PetscInt NumSites() const
Gets the number of sites that are currently initialized.
Definition: DMRGBlock.hpp:344
PetscErrorCode MatCheckOperatorBlocks(const Op_t &op_t, const Mat &matin) const
Checks the block indexing in the matrix operator op_t on specified matrix matin.
Definition: DMRGBlock.cpp:520
PetscErrorCode CheckInitialized() const
Checks whether quantum number object was properly initialized.
Mat Sp(const PetscInt &Isite) const
Returns the matrix pointer to the operator at site Isite.
Definition: DMRGBlock.hpp:359
MPI_Comm MPIComm() const
Gets the communicator associated to the block.
Definition: DMRGBlock.hpp:341
Defines a single operator to be used for performing measurements.
PetscBool init
Tells whether the block was initialized.
Definition: DMRGBlock.hpp:109
std::vector< PetscScalar > _loc_qn_list
Stores the Sz sectors of a single site.
Definition: DMRGBlock.hpp:103
PetscInt NumStates() const
Gets the number of states that are currently used.
Definition: DMRGBlock.hpp:347
PetscErrorCode Retrieve_NoChecks()
Private function to retrieve operators without checking for save initialization.
Definition: DMRGBlock.cpp:1063
PetscInt NumStates() const
Returns the total number of states.
This site was generated by Sphinx using Doxygen with a customized theme from doxygen-bootstrapped.