1 #include "DMRGBlock.hpp" 8 #include <../src/mat/impls/aij/seq/aij.h> 9 #include <../src/mat/impls/aij/mpi/mpiaij.h> 12 PETSC_EXTERN int64_t ipow(int64_t base, uint8_t exp);
13 PETSC_EXTERN PetscErrorCode InitSingleSiteOperator(
const MPI_Comm& comm,
const PetscInt dim, Mat* mat);
14 PETSC_EXTERN PetscErrorCode MatEnsureAssembled(
const Mat& matin);
15 PETSC_EXTERN PetscErrorCode MatEnsureAssembled_MultipleMats(
const std::vector<Mat>& matrices);
16 PETSC_EXTERN PetscErrorCode MatEyeCreate(
const MPI_Comm& comm,
const PetscInt& dim, Mat& eye);
17 PETSC_EXTERN PetscErrorCode
Makedir(
const std::string& dir_name);
20 #define CheckInit(func) if (PetscUnlikely(!init))\ 21 SETERRQ1(mpi_comm, PETSC_ERR_ARG_CORRUPT, "%s was called but block was not yet initialized.",func); 23 #define CheckInitOnce(func) if (PetscUnlikely(!init_once))\ 24 SETERRQ1(mpi_comm, PETSC_ERR_ARG_CORRUPT, "%s was called but Initialize was never called before.",func); 27 #define CheckIndex(row, col, cstart, cend) if((col) < (cstart) || (col) >= (cend))\ 28 SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "On row %d, index %d out of bounds [%d,%d) ",\ 29 (row), (col), (cstart), (cend)); 32 const MPI_Comm& comm_in)
35 if(
mpi_init) SETERRQ(
mpi_comm,1,
"This initializer should only be called once.");
45 const MPI_Comm& comm_in,
46 const PetscInt& num_sites_in,
47 const PetscInt& num_states_in,
48 const PetscBool& init_ops
51 PetscErrorCode ierr = 0;
54 ierr = PetscOptionsGetBool(NULL,NULL,
"-verbose",&
verbose,NULL); CHKERRQ(ierr);
55 ierr = PetscOptionsGetBool(NULL,NULL,
"-log_io",&
log_io,NULL); CHKERRQ(ierr);
60 SETERRQ(PETSC_COMM_SELF,1,
"Mismatch in MPI communicators.");
66 ierr = PetscOptionsGetString(NULL,NULL,
"-spin",spin,10,&
set); CHKERRQ(ierr);
71 if(it != SpinTypes.end())
77 SETERRQ1(
mpi_comm,1,
"Given -spin %s not valid/implemented.", spin);
94 SETERRQ(
mpi_comm,1,
"Given spin_type not valid/implemented.");
101 if(num_states_in == PETSC_DEFAULT){
131 ierr = MatEnsureAssembled(
H); CHKERRQ(ierr);
142 for(PetscInt isite = 0; isite <
num_sites; ++isite)
149 SETERRQ1(
mpi_comm, PETSC_ERR_ARG_OUTOFRANGE,
"Invalid input num_sites_in > 0. Given %d.", num_sites_in);
152 ierr = PetscOptionsGetInt(NULL,NULL,
"-rot_nsubcomm",&nsubcomm,NULL); CHKERRQ(ierr);
153 if(nsubcomm < 1 || nsubcomm >
mpi_size)
154 SETERRQ1(
mpi_comm, 1,
"-rot_nsubcomm must be in the range [1, mpi_rank]. Got %D.", nsubcomm);
157 if(mpi_size % nsubcomm)
158 SETERRQ2(
mpi_comm, 1,
"The size of mpi_comm (%d) must be a multiple of nsubcomm (%D).", mpi_size, nsubcomm);
161 site_color.resize((PetscMPIInt)(
num_sites+1));
162 for(PetscMPIInt isite = 0; isite <
num_sites+1; ++isite){
163 site_color[isite] = isite % nsubcomm;
166 #if defined(PETSC_USE_COMPLEX) 167 if(
rot_method == matptap) SETERRQ(
mpi_comm,1,
"The method matptap cannot be used for complex scalars.");
175 const MPI_Comm& comm_in,
176 const PetscInt& num_sites_in,
177 const std::vector<PetscReal>& qn_list_in,
178 const std::vector<PetscInt>& qn_size_in,
179 const PetscBool& init_ops
182 PetscErrorCode ierr = 0;
190 ierr = Magnetization_temp.
Initialize(comm_in, qn_list_in, qn_size_in); CHKERRQ(ierr);
191 PetscInt num_states_in = Magnetization_temp.
NumStates();
193 ierr =
Initialize(comm_in, num_sites_in, num_states_in, init_ops); CHKERRQ(ierr);
201 const PetscInt& num_sites_in,
204 PetscErrorCode ierr = 0;
215 const MPI_Comm& comm_in,
216 const std::string& block_path_in)
219 PetscInt num_sites_in, num_states_in, num_sectors_in;
220 std::vector<PetscReal> qn_list_in;
221 std::vector<PetscInt> qn_size_in;
224 PetscMPIInt mpi_rank_in;
225 ierr = MPI_Comm_rank(comm_in,&mpi_rank_in); CHKERRQ(ierr);
227 std::string block_path = block_path_in;
228 if(block_path.empty()) block_path =
'/';
229 else if(block_path.back()!=
'/') block_path +=
'/';
234 std::string block_file = block_path +
"BlockInfo.dat";
237 ierr = PetscTestFile(block_file.c_str(),
'r', &flg); CHKERRQ(ierr);
238 if(!flg) SETERRQ1(PETSC_COMM_SELF,1,
"Error in reading %s", block_file.c_str());
240 std::ifstream infofile(block_file.c_str());
241 std::map< std::string, PetscInt > infomap;
243 if (!infofile.is_open() || infofile.fail())
244 perror(
"error while opening file");
245 while (infofile && std::getline(infofile, line)) {
248 std::istringstream iss(line);
253 perror(
"error while reading file");
259 if(infomap.at(
"NumBytesPetscInt") !=
sizeof(PetscInt))
260 SETERRQ2(PETSC_COMM_SELF,1,
"Incompatible NumBytesPetscInt. Expected %lld. Got %lld.",
261 PetscInt(
sizeof(PetscInt)), infomap.at(
"NumBytesPetscInt"));
263 if(infomap.at(
"NumBytesPetscScalar") !=
sizeof(PetscScalar))
264 SETERRQ2(PETSC_COMM_SELF,1,
"Incompatible NumBytesPetscScalar. Expected %lld. Got %lld.",
265 PetscInt(
sizeof(PetscScalar)), infomap.at(
"NumBytesPetscScalar"));
267 #if defined(PETSC_USE_COMPLEX) 268 PetscInt PetscUseComplex = 1;
270 PetscInt PetscUseComplex = 0;
273 if(infomap.at(
"PetscUseComplex") != PetscUseComplex)
274 SETERRQ2(PETSC_COMM_SELF,1,
"Incompatible PetscUseComplex. Expected %lld. Got %lld.",
275 PetscInt(PetscUseComplex), infomap.at(
"PetscUseComplex"));
280 auto it = infomap.find(std::string(
"SpinTypeKey"));
281 if(it == infomap.end()) SETERRQ(PETSC_COMM_SELF,1,
"SpinTypeKey not found.");
282 spin_type_in = int(it->second);
286 num_sites_in = infomap.at(
"NumSites");
287 num_states_in = infomap.at(
"NumStates");
288 num_sectors_in = infomap.at(
"NumSectors");
291 ierr = MPI_Bcast(&num_sites_in, 1, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
292 ierr = MPI_Bcast(&num_states_in, 1, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
293 ierr = MPI_Bcast(&num_sectors_in, 1, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
294 ierr = MPI_Bcast(&spin_type_in, 1, MPI_INT, 0, comm_in); CHKERRQ(ierr);
300 ierr = PetscOptionsGetString(NULL,NULL,
"-spin",spin,10,&
set); CHKERRQ(ierr);
302 auto it = SpinTypes.find(std::string(spin));
303 if(it == SpinTypes.end()) SETERRQ1(PETSC_COMM_SELF,1,
"Given -spin %s not valid/implemented.", spin);
304 spin_type_opt = it->second;
305 if(spin_type_in !=
int(spin_type_opt))
306 SETERRQ2(comm_in,1,
"Given spin types from file (%d) " 307 "and command line (%d) do not match", spin_type_in, spin_type_opt);
310 auto it = SpinTypesString.find(
Spin_t(spin_type_in));
311 if(it!=SpinTypesString.end()){
312 ierr = PetscOptionsSetValue(NULL,
"-spin",(it->second).c_str()); CHKERRQ(ierr);
314 SETERRQ1(PETSC_COMM_SELF,1,
"Input SpinTypeKey %d not valid/implemented.", spin_type_in);
318 qn_list_in.resize(num_sectors_in);
319 qn_size_in.resize(num_sectors_in);
321 if(num_sectors_in > 0)
326 std::string qn_file = block_path +
"QuantumNumbers.dat";
329 ierr = PetscTestFile(qn_file.c_str(),
'r', &flg); CHKERRQ(ierr);
330 if(!flg) SETERRQ1(PETSC_COMM_SELF,1,
"Error in reading %s", qn_file.c_str());
332 std::ifstream qnfile(qn_file.c_str());
334 if (!qnfile.is_open())
335 perror(
"error while opening file");
337 while (qnfile && std::getline(qnfile, line)) {
338 std::istringstream iss(line);
339 iss >> qn_size_in.at(ctr) >> qn_list_in.at(ctr);
343 perror(
"error while reading file");
344 if(ctr!=num_sectors_in)
345 SETERRQ3(PETSC_COMM_SELF,1,
"Incorrect number of data points in %s. " 346 "Expected %lld. Got %lld.", qn_file.c_str(), num_sectors_in, ctr);
351 ierr = MPI_Bcast(qn_size_in.data(), num_sectors_in, MPIU_INT, 0, comm_in); CHKERRQ(ierr);
352 ierr = MPI_Bcast(qn_list_in.data(), num_sectors_in, MPIU_REAL, 0, comm_in); CHKERRQ(ierr);
356 SETERRQ(comm_in,1,
"NumSectors cannot be zero.");
360 ierr =
Initialize(comm_in, num_sites_in, qn_list_in, qn_size_in, PETSC_FALSE); CHKERRQ(ierr);
363 std::string save_dir_temp =
save_dir;
377 PetscErrorCode ierr = 0;
385 default: SETERRQ(
mpi_comm, PETSC_ERR_ARG_WRONG,
"Incorrect operator type.");
392 for(PetscInt isite = 0; isite <
num_sites; ++isite)
396 SETERRQ2(
mpi_comm, PETSC_ERR_ARG_CORRUPT,
"%s[%d] matrix not yet created.", label, isite);
398 ierr = MatGetSize(Op[isite], &M, &N); CHKERRQ(ierr);
401 SETERRQ2(
mpi_comm, PETSC_ERR_ARG_WRONG,
"%s[%d] matrix not square.", label, isite);
405 SETERRQ4(
mpi_comm, PETSC_ERR_ARG_WRONG,
"%s[%d] matrix dimension does not match " 406 "the number of states. Expected %d. Got %d.", label, isite,
num_states, M);
415 PetscErrorCode ierr = 0;
416 CheckInit(__FUNCTION__);
431 PetscErrorCode ierr = 0;
432 CheckInitOnce(__FUNCTION__);
443 SETERRQ2(
mpi_comm, PETSC_ERR_ARG_WRONG,
"The number of states in the Magnetization object " 444 "and the internal value do not match. " "Expected %d. Got %d.",
num_states, magNumStates);
452 const PetscInt& isite,
453 std::vector<PetscInt>& nnzs
456 PetscErrorCode ierr = 0;
463 SETERRQ2(
mpi_comm, PETSC_ERR_ARG_OUTOFRANGE,
"Input isite (%d) out of bounds [0,%d).", isite,
num_sites);
468 default: SETERRQ(
mpi_comm, PETSC_ERR_ARG_WRONG,
"Incorrect operator type.");
472 ierr =
MatGetNNZs(matin, nnzs); CHKERRQ(ierr);
479 std::vector<PetscInt>& nnzs
483 PetscInt rstart, rend;
484 PetscErrorCode ierr = MatGetOwnershipRange(matin, &rstart, &rend); CHKERRQ(ierr);
485 PetscInt lrows = rend - rstart;
488 for(PetscInt irow=rstart; irow<rend; ++irow)
490 ierr = MatGetRow(matin, irow, &ncols, NULL, NULL); CHKERRQ(ierr);
491 nnzs[irow-rstart] = ncols;
492 ierr = MatRestoreRow(matin, irow, &ncols, NULL, NULL); CHKERRQ(ierr);
500 PetscErrorCode ierr = 0;
505 SETERRQ2(
mpi_comm, PETSC_ERR_ARG_OUTOFRANGE,
"Input isite (%d) out of bounds [0,%d).", isite,
num_sites);
510 default: SETERRQ(
mpi_comm, PETSC_ERR_ARG_WRONG,
"Incorrect operator type.");
522 PetscErrorCode ierr = 0;
525 ierr = MatEnsureAssembled(matin); CHKERRQ(ierr);
531 PetscInt rstart = matin->rmap->rstart;
532 PetscInt lrows = matin->rmap->n;
533 PetscInt cstart = matin->cmap->rstart;
534 PetscInt nrows = matin->rmap->N;
535 PetscInt ncols = matin->cmap->N;
539 if(magNumStates != nrows) SETERRQ2(
mpi_comm,1,
"Incorrect number of rows. Expected %d. Got %d.", magNumStates, nrows);
540 if(magNumStates != nrows) SETERRQ2(
mpi_comm,1,
"Incorrect number of cols. Expected %d. Got %d.", magNumStates, ncols);
543 if(!(0 <= rstart && rstart < nrows))
return ierr;
546 PetscBool matin_is_mpiaij, matin_is_mpiaijmkl;
547 ierr = PetscObjectTypeCompare((PetscObject)matin, MATMPIAIJ, &matin_is_mpiaij); CHKERRQ(ierr);
548 ierr = PetscObjectTypeCompare((PetscObject)matin, MATMPIAIJMKL, &matin_is_mpiaijmkl); CHKERRQ(ierr);
551 if(matin_is_mpiaij || matin_is_mpiaijmkl){
553 Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data;
554 PetscInt *cmap = mat->garray;
556 PetscInt nzA, nzB, *cA=
nullptr, *cB=
nullptr;
560 PetscInt col_GlobIdxStart, col_GlobIdxEnd;
564 const PetscInt lrow = Iter.Steps();
565 ierr = Iter.OpBlockToGlobalRange(OpType, col_GlobIdxStart, col_GlobIdxEnd, flg); CHKERRQ(ierr);
567 ierr = (*mat->A->ops->getrow)(mat->A, lrow, &nzA, &cA,
nullptr);CHKERRQ(ierr);
568 ierr = (*mat->B->ops->getrow)(mat->B, lrow, &nzB, &cB,
nullptr);CHKERRQ(ierr);
570 if(!flg && nzA!=0 && nzB!=0)
573 SETERRQ1(PETSC_COMM_SELF, 1,
"Row %d should have no entries.", lrow+rstart);
577 CheckIndex(lrow+rstart, cA[0] + cstart, col_GlobIdxStart, col_GlobIdxEnd);
578 CheckIndex(lrow+rstart, cA[nzA-1] + cstart, col_GlobIdxStart, col_GlobIdxEnd);
582 CheckIndex(lrow+rstart, cmap[cB[0]], col_GlobIdxStart, col_GlobIdxEnd);
583 CheckIndex(lrow+rstart, cmap[cB[nzB-1]], col_GlobIdxStart, col_GlobIdxEnd);
590 ierr = MatGetType(matin, &type);
593 SETERRQ1(
mpi_comm, PETSC_ERR_SUP,
"Implemented only for MATMPIAIJ. Got %s.", type);
596 ierr = PetscInfo(0,
"Operator matrix check satisfied.\n"); CHKERRQ(ierr);
603 PetscErrorCode ierr = 0;
604 CheckInit(__FUNCTION__);
610 for(PetscInt isite = 0; isite <
num_sites; ++isite){
615 for(PetscInt isite = 0; isite <
num_sites; ++isite){
625 PetscErrorCode ierr = 0;
627 if(
init_Sm) SETERRQ(
mpi_comm, 1,
"Sm was previously initialized. Call DestroySm() first.");
630 for(PetscInt isite = 0; isite <
num_sites; ++isite){
631 ierr = MatHermitianTranspose(
SpData[isite], MAT_INITIAL_MATRIX, &
SmData[isite]); CHKERRQ(ierr);
641 PetscErrorCode ierr = 0;
643 if(!
init_Sm) SETERRQ1(
mpi_comm, 1,
"%s was called but Sm was not yet initialized. ",__FUNCTION__);
645 for(PetscInt isite = 0; isite <
num_sites; ++isite){
646 ierr = MatDestroy(&
SmData[isite]); CHKERRQ(ierr);
657 PetscErrorCode ierr = 0;
658 if (PetscUnlikely(!
init))
return 0;
661 for(PetscInt isite = 0; isite <
num_sites; ++isite){
662 ierr = MatDestroy(&
SzData[isite]); CHKERRQ(ierr);
663 ierr = MatDestroy(&
SpData[isite]); CHKERRQ(ierr);
667 ierr = MatDestroy(&
H); CHKERRQ(ierr);
679 PetscErrorCode ierr = 0;
680 Mat RotMatT, RotMat, *SpData_loc, *SzData_loc, H_loc;
682 PetscMPIInt sub_rank, sub_size, sub_color=0;
684 CheckInit(__FUNCTION__);
690 const PetscInt NumStatesOrig = Source.
NumStates();
691 const PetscInt NumSitesOrig = Source.
NumSites();
692 PetscInt NRows_RT, NCols_RT;
693 ierr = MatGetSize(RotMatT_in, &NRows_RT, &NCols_RT); CHKERRQ(ierr);
694 if(NCols_RT != NumStatesOrig)
695 SETERRQ2(
mpi_comm, 1,
"RotMatT_in incorrect number of cols. Expected %d. Got %d.", NCols_RT, NumStatesOrig);
697 SETERRQ2(
mpi_comm, 1,
"RotMatT_in incorrect number of rows. Expected %d. Got %d.", NRows_RT,
num_states);
699 SETERRQ2(
mpi_comm, 1,
"RotMatT_in incorrect number of sites. Expected %d. Got %d.", NumSitesOrig,
num_sites);
706 "must be called first before using this feature.");
707 ierr = MatCreateRedundantMatrix(RotMatT_in, nsubcomm, MPI_COMM_NULL, MAT_INITIAL_MATRIX, &RotMatT); CHKERRQ(ierr);
708 ierr = PetscObjectGetComm((PetscObject)RotMatT, &subcomm); CHKERRQ(ierr);
709 ierr = MPI_Comm_rank(subcomm, &sub_rank); CHKERRQ(ierr);
710 ierr = MPI_Comm_size(subcomm, &sub_size); CHKERRQ(ierr);
718 RotMatT = RotMatT_in;
723 ierr = MatHermitianTranspose(RotMatT, MAT_INITIAL_MATRIX, &RotMat); CHKERRQ(ierr);
725 SETERRQ(
mpi_comm,1,
"Not implemented.");
729 for(PetscInt isite = 0; isite <
num_sites; ++isite)
731 ierr = MatDestroy(&
SpData[isite]); CHKERRQ(ierr);
732 ierr = MatDestroy(&
SzData[isite]); CHKERRQ(ierr);
734 ierr = MatDestroy(&
H); CHKERRQ(ierr);
737 ierr = PetscCalloc1(num_sites, &SpData_loc); CHKERRQ(ierr);
738 ierr = PetscCalloc1(num_sites, &SzData_loc); CHKERRQ(ierr);
743 for(PetscInt isite = 0; isite <
num_sites; ++isite){
744 if(site_color[isite]!=sub_color)
continue;
745 ierr = Source.
RetrieveOperator(
"Sp", isite, SpData_loc[isite], subcomm); CHKERRQ(ierr);
746 ierr = Source.
RetrieveOperator(
"Sz", isite, SzData_loc[isite], subcomm); CHKERRQ(ierr);
748 if(site_color[num_sites]==sub_color){
755 for(PetscInt isite = 0; isite <
num_sites; ++isite) SpData_loc[isite] = Source.
Sp(isite);
756 for(PetscInt isite = 0; isite <
num_sites; ++isite) SzData_loc[isite] = Source.
Sz(isite);
763 for(PetscInt isite = 0; isite <
num_sites; ++isite)
765 if(site_color[isite]!=sub_color)
continue;
766 ierr = MatMatMatMult(RotMatT, SpData_loc[isite], RotMat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &
SpData[isite]); CHKERRQ(ierr);
767 ierr = MatMatMatMult(RotMatT, SzData_loc[isite], RotMat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &
SzData[isite]); CHKERRQ(ierr);
769 if(site_color[num_sites]==sub_color)
771 ierr = MatMatMatMult(RotMatT, H_loc, RotMat, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &
H); CHKERRQ(ierr);
776 SETERRQ(
mpi_comm,1,
"Not implemented.");
782 for(PetscInt isite = 0; isite <
num_sites; ++isite)
784 if(site_color[isite]!=sub_color)
continue;
788 if(site_color[num_sites]==sub_color)
791 ierr = MatDestroy(&H_loc); CHKERRQ(ierr);
794 for(PetscInt isite = 0; isite <
num_sites; ++isite)
796 ierr = MatDestroy(&
SpData[isite]); CHKERRQ(ierr);
797 ierr = MatDestroy(&
SzData[isite]); CHKERRQ(ierr);
798 ierr = MatDestroy(&SpData_loc[isite]); CHKERRQ(ierr);
799 ierr = MatDestroy(&SzData_loc[isite]); CHKERRQ(ierr);
801 ierr = MatDestroy(&RotMatT); CHKERRQ(ierr);
802 ierr = MatDestroy(&
H); CHKERRQ(ierr);
804 ierr =
Destroy(); CHKERRQ(ierr);
818 ierr = PetscFree(SpData_loc); CHKERRQ(ierr);
819 ierr = PetscFree(SzData_loc); CHKERRQ(ierr);
820 ierr = MatDestroy(&RotMat); CHKERRQ(ierr);
828 ierr = MatEnsureAssembled_MultipleMats(
SzData); CHKERRQ(ierr);
829 ierr = MatEnsureAssembled_MultipleMats(
SpData); CHKERRQ(ierr);
830 if(
H){ ierr = MatEnsureAssembled(
H); CHKERRQ(ierr);}
836 const std::string& save_dir_in
839 PetscErrorCode ierr = 0;
840 PetscBool flg = PETSC_FALSE;
841 if(save_dir_in.empty()) SETERRQ(
mpi_comm,1,
"Save dir cannot be empty.");
842 if(!
mpi_init) SETERRQ(
mpi_comm,1,
"MPI Initialization must be completed first.");
844 ierr = PetscTestDirectory(save_dir_in.c_str(),
'r', &flg); CHKERRQ(ierr);
846 ierr = MPI_Bcast(&flg, 1, MPI_INT, 0,
mpi_comm); CHKERRQ(ierr);
847 if(!flg) SETERRQ1(
mpi_comm,1,
"Directory %s does not exist. Please verify that -scratch_dir is specified correctly.",save_dir_in.c_str());
850 if(save_dir.back()!=
'/') save_dir +=
'/';
858 const std::string& read_dir_in,
859 const std::string& write_dir_in
863 SETERRQ(
mpi_comm,1,
"InitializeSave() and SetDiskStorage() cannot both be used " 864 "on the same block.");
889 std::string OpFilename(
const std::string& RootDir,
const std::string& OpName,
const size_t& isite = 0){
890 std::ostringstream oss;
891 oss << RootDir << OpName <<
"_" << std::setfill(
'0') << std::setw(9) << isite <<
".mat";
897 const std::string& OpName,
900 const MPI_Comm& comm_in)
904 if(!Op) SETERRQ(PETSC_COMM_SELF,1,
"Input matrix is null.");
905 ierr = PetscViewerBinaryOpen(comm_in, OpFilename(
write_dir,OpName,isite).c_str(), FILE_MODE_WRITE, &binv); CHKERRQ(ierr);
906 ierr = MatView(Op, binv); CHKERRQ(ierr);
907 ierr = PetscViewerDestroy(&binv); CHKERRQ(ierr);
908 ierr = MatDestroy(&Op); CHKERRQ(ierr);
912 PetscMPIInt subcomm_rank;
913 ierr = MPI_Comm_rank(comm_in, &subcomm_rank); CHKERRQ(ierr);
917 <<
"Saved: " << OpFilename(
write_dir,OpName,isite) << std::endl;
926 CheckInit(__FUNCTION__);
928 SETERRQ(
mpi_comm,1,
"InitializeSave() or SetDiskStorage() must be called first.");
934 std::ofstream infofile;
935 infofile.open((
write_dir +
"BlockInfo.dat").c_str());
937 #if defined(PETSC_USE_COMPLEX) 938 PetscInt PetscUseComplex = 1;
940 PetscInt PetscUseComplex = 0;
943 #define SaveInfo(KEY,VALUE) infofile << std::left << std::setfill(' ') \ 944 << std::setw(30) << KEY << " " << VALUE << std::endl; 945 SaveInfo(
"NumBytesPetscInt",
sizeof(PetscInt));
946 SaveInfo(
"NumBytesPetscScalar",
sizeof(PetscScalar));
947 SaveInfo(
"PetscUseComplex", PetscUseComplex);
959 std::ofstream qnfile;
960 qnfile.open((
write_dir +
"QuantumNumbers.dat").c_str());
964 for(PetscInt idx=0; idx<num_sectors; idx++)
965 qnfile << qn_size.at(idx) <<
" " 966 << qn_list.at(idx) << std::endl;
975 const std::string& OpName,
978 const MPI_Comm& comm_in)
986 ierr = MatHermitianTranspose(OpSp,MAT_INITIAL_MATRIX,&Op); CHKERRQ(ierr);
987 ierr = MatDestroy(&OpSp); CHKERRQ(ierr);
991 MPI_Comm comm = (comm_in==MPI_COMM_NULL) ?
mpi_comm : comm_in;
992 ierr = PetscViewerBinaryOpen(comm, OpFilename(
save_dir,OpName,isite).c_str(), FILE_MODE_READ, &binv); CHKERRQ(ierr);
993 ierr = MatCreate(comm, &Op); CHKERRQ(ierr);
995 if(comm_in==MPI_COMM_NULL){
996 ierr = MatSetFromOptions(Op); CHKERRQ(ierr);
998 ierr = MatLoad(Op, binv); CHKERRQ(ierr);
999 ierr = PetscViewerDestroy(&binv); CHKERRQ(ierr);
1002 PetscMPIInt subcomm_rank;
1003 ierr = MPI_Comm_rank(comm_in, &subcomm_rank); CHKERRQ(ierr);
1007 <<
"Retrieved: " << OpFilename(
save_dir,OpName,isite) << std::endl;
1016 CheckInit(__FUNCTION__);
1018 SETERRQ(
mpi_comm,1,
"InitializeSave() or SetDiskStorage() must be called first.");
1020 PetscErrorCode ierr;
1021 for(PetscInt isite = 0; isite <
num_sites; ++isite){
1024 for(PetscInt isite = 0; isite <
num_sites; ++isite){
1031 ierr =
Destroy(); CHKERRQ(ierr);
1051 SETERRQ(
mpi_comm,1,
"InitializeSave() or SetDiskStorage() must be called first.");
1053 if(
init) SETERRQ(
mpi_comm,1,
"Destroy() must be called first.");
1054 PetscErrorCode ierr;
1057 saved = PETSC_FALSE;
1065 PetscErrorCode ierr;
1066 PetscBool flg = PETSC_FALSE;
1067 for(PetscInt isite = 0; isite <
num_sites; ++isite){
1070 for(PetscInt isite = 0; isite <
num_sites; ++isite){
1073 ierr = PetscTestFile(OpFilename(
save_dir,
"H",0).c_str(),
'r', &flg); CHKERRQ(ierr);
1101 PetscErrorCode ierr =
Retrieve(); CHKERRQ(ierr);
1108 if(!
MPIInitialized()) SETERRQ(PETSC_COMM_SELF,1,
"Block's MPI communicator not initialized.");
1109 PetscErrorCode ierr;
1111 ierr = InitSingleSiteOperator(
MPIComm(),
loc_dim(), &Sz); CHKERRQ(ierr);
1113 PetscInt locrows, Istart;
1114 ierr = PreSplitOwnership(
MPIComm(),
loc_dim(), locrows, Istart); CHKERRQ(ierr);
1115 PetscInt Iend = Istart + locrows;
1131 if (Istart <= 0 && 0 < Iend){
1132 ierr = MatSetValue(Sz, 0, 0, +0.5, INSERT_VALUES); CHKERRQ(ierr);
1134 if (Istart <= 1 && 1 < Iend){
1135 ierr = MatSetValue(Sz, 1, 1, -0.5, INSERT_VALUES); CHKERRQ(ierr);
1151 if (Istart <= 0 && 0 < Iend){
1152 ierr = MatSetValue(Sz, 0, 0, +1.0, INSERT_VALUES); CHKERRQ(ierr);
1154 if (Istart <= 2 && 2 < Iend){
1155 ierr = MatSetValue(Sz, 2, 2, -1.0, INSERT_VALUES); CHKERRQ(ierr);
1160 SETERRQ(
mpi_comm,1,
"Given spin_type not valid/implemented.");
1163 ierr = MatAssemblyBegin(Sz, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1164 ierr = MatAssemblyEnd(Sz, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1171 if(!
MPIInitialized()) SETERRQ(PETSC_COMM_SELF,1,
"Block's MPI communicator not initialized.");
1172 PetscErrorCode ierr;
1174 ierr = InitSingleSiteOperator(
MPIComm(),
loc_dim(), &Sp); CHKERRQ(ierr);
1176 PetscInt locrows, Istart;
1177 ierr = PreSplitOwnership(
MPIComm(),
loc_dim(), locrows, Istart); CHKERRQ(ierr);
1178 PetscInt Iend = Istart + locrows;
1179 const PetscScalar Sqrt2 = PetscSqrtScalar(2.0);
1193 if (Istart <= 0 && 0 < Iend){
1194 ierr = MatSetValue(Sp, 0, 1, +1.0, INSERT_VALUES); CHKERRQ(ierr);
1210 if (Istart <= 0 && 0 < Iend){
1211 ierr = MatSetValue(Sp, 0, 1, Sqrt2, INSERT_VALUES); CHKERRQ(ierr);
1213 if (Istart <= 1 && 1 < Iend){
1214 ierr = MatSetValue(Sp, 1, 2, Sqrt2, INSERT_VALUES); CHKERRQ(ierr);
1219 SETERRQ(
mpi_comm,1,
"Given spin_type not valid/implemented.");
1222 ierr = MatAssemblyBegin(Sp, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
1223 ierr = MatAssemblyEnd(Sp, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
PetscErrorCode EnsureRetrieved()
Ensures that the block matrices have been retrieved if the block is initialized, otherwise does nothi...
std::vector< Mat > SzData
Array of matrices representing operators.
Contains information on quantum numbers and associated index ranges.
PetscErrorCode InitializeSave(const std::string &save_dir_in)
Initializes the writing of the block matrices to file.
PetscBool retrieved
Whether the block matrices have been retrieved.
RotMethod rot_method
Method to use for performing basis transformation.
Spin_t
Identifies the spin types implemented for this block.
PetscBool init_save
Whether saving the block matrices to file has been initialized correctly.
MPI_Comm mpi_comm
MPI Communicator.
PETSC_EXTERN PetscErrorCode Makedir(const std::string &dir_name)
Creates a directory named dir_name.
virtual std::vector< PetscScalar > loc_qn_list() const
Sz sectors of a single site.
std::vector< Mat > SpData
Array of matrices representing operators.
Base class for the implementation of a block of spin sites.
const std::vector< Mat > & Sp() const
Returns the list of matrix pointer to the operators.
PetscInt num_reads
Number of reads from file made for this block.
std::string read_dir
Root directory to read the matrix blocks from during the first access.
std::string write_dir
Root directory to write the matrix blocks into.
PetscErrorCode AssembleOperators()
Ensures that all operators are assembled.
PetscErrorCode EnsureSaved()
Ensures that the block matrices have been saved if the block is initialized, otherwise does nothing...
bool Loop() const
Determines whether the end of the range has not yet been reached.
MPI_Comm MPIComm() const
Gets the communicator associated to the block.
PetscBool log_io
Tells whether to printout IO functions.
PetscErrorCode SetDiskStorage(const std::string &read_dir_in, const std::string &write_dir_in)
Tells where to read from and save the operators and data about the block.
Op_t
Identifies the three possible spin operators and also represents the shift associated to its action o...
PetscBool MPIInitialized() const
Returns PETSC_TRUE if the MPI communicator was initialized properly.
PetscErrorCode CheckOperatorBlocks() const
Checks whether all matrix blocks follow the correct sector indices using MatCheckOperatorBlocks() ...
std::vector< Mat > SmData
Array of matrices representing operators.
PetscBool verbose
Tells whether to printout info during certain function calls.
Mat H
Matrix representation of the Hamiltonian operator.
virtual std::vector< PetscInt > loc_qn_size() const
Number of states in each sector in a single site.
std::vector< PetscReal > List() const
Returns the list of quantum numbers.
PetscErrorCode RotateOperators(SpinBase &Source, const Mat &RotMatT)
Rotates all operators from a source block using the given transposed rotation matrix.
PetscBool init_Sm
Tells whether the Sm matrices have been initialized.
PetscErrorCode InitializeFromDisk(const MPI_Comm &comm_in, const std::string &block_path)
Initializes block object from data located in the directory block_path.
PetscErrorCode MatOpGetNNZs(const Op_t &OpType, const PetscInt &isite, std::vector< PetscInt > &nnzs) const
Returns the number of non-zeros in each row for an operator acting on a given site.
PetscErrorCode DestroySm()
Destroys the Sm matrices on the fly.
Spin_t spin_type
Type of spin contained in the block.
virtual PetscErrorCode MatSpinSzCreate(Mat &Sz)
Creates the single-site operator.
PetscErrorCode CheckOperators() const
Checks whether all operators have been initialized and have correct dimensions.
PetscErrorCode CheckSectors() const
Checks whether sector indexing was done properly.
PetscInt num_sites
Number of sites in the block.
std::vector< PetscInt > Sizes() const
Returns the number of basis states in each quantum number block.
PetscErrorCode SaveOperator(const std::string &OpName, const size_t &isite, Mat &Op, const MPI_Comm &comm_in)
Saves a single operator.
std::string spin_type_str
Type of spin contained in the block expressed as a string.
const std::vector< Mat > & Sz() const
Returns the list of matrix pointer to the operators.
Iterates over a range of basis indices with information on each of its quantum number.
PetscMPIInt mpi_size
MPI size of mpi_comm.
PetscBool init_once
Tells whether the block was initialized at least once before.
std::vector< PetscInt > _loc_qn_size
Stores the number of states in each sector in a single site.
PetscErrorCode Retrieve()
Retrieve all the matrix operators that were written to file by SaveAndDestroy()
virtual PetscInt loc_dim() const
Local dimension of a single site.
PetscErrorCode RetrieveOperator(const std::string &OpName, const size_t &isite, Mat &Op, const MPI_Comm &comm_in=MPI_COMM_NULL)
Retrieves a single operator.
virtual PetscErrorCode MatSpinSpCreate(Mat &Sp)
Creates the single-site raising operator , from which we can define .
PetscErrorCode CheckOperatorArray(const Op_t &OpType) const
Determines whether the operator arrays have been successfully filled with matrices.
Mat Sz(const PetscInt &Isite) const
Returns the matrix pointer to the operator at site Isite.
PetscBool mpi_init
Tells whether the block's MPI attributes were initialized.
PetscMPIInt mpi_rank
MPI rank in mpi_comm.
PetscErrorCode SaveAndDestroy()
Save all the matrix operators to file and destroy the current storage.
PetscErrorCode SaveBlockInfo()
Save some information about the block that could be used to reconstruct it later. ...
PetscErrorCode CreateSm()
Creates the Sm matrices on the fly.
PetscErrorCode Destroy()
Destroys all operator matrices and frees memory.
PetscInt NumSectors() const
Returns the number of quantum number sectors.
PetscErrorCode MatOpCheckOperatorBlocks(const Op_t &op_t, const PetscInt &isite) const
Checks the block indexing in the matrix operator op_t on site isite.
PetscErrorCode Initialize(const MPI_Comm &comm_in)
Initializes block object's MPI attributes.
PetscBool disk_set
Whether SetDiskStorage() has properly set the block storage locations.
std::string save_dir
Root directory to read from and write the matrix blocks into.
PetscBool saved
Whether the block matrices have been saved.
QuantumNumbers Magnetization
Stores the information on the magnetization Sectors.
PetscInt _loc_dim
Stores the local dimension of a single site.
PetscErrorCode Initialize(const MPI_Comm &mpi_comm_in, const std::vector< PetscReal > &qn_list_in, const std::vector< PetscInt > &qn_size_in)
Initializes the quantum number object.
PetscInt num_states
Number of basis states in the Hilbert space.
PetscErrorCode MatGetNNZs(const Mat &matin, std::vector< PetscInt > &nnzs) const
Returns the number of non-zeros in each row for a given matrix.
PetscInt NumSites() const
Gets the number of sites that are currently initialized.
PetscErrorCode MatCheckOperatorBlocks(const Op_t &op_t, const Mat &matin) const
Checks the block indexing in the matrix operator op_t on specified matrix matin.
PetscErrorCode CheckInitialized() const
Checks whether quantum number object was properly initialized.
Mat Sp(const PetscInt &Isite) const
Returns the matrix pointer to the operator at site Isite.
MPI_Comm MPIComm() const
Gets the communicator associated to the block.
Defines a single operator to be used for performing measurements.
PetscBool init
Tells whether the block was initialized.
std::vector< PetscScalar > _loc_qn_list
Stores the Sz sectors of a single site.
PetscInt NumStates() const
Gets the number of states that are currently used.
PetscErrorCode Retrieve_NoChecks()
Private function to retrieve operators without checking for save initialization.
PetscInt NumStates() const
Returns the total number of states.