Actual source code: aijhdf5.c

  1: #include <../src/mat/impls/aij/seq/aij.h>
  2: #include <petsc/private/isimpl.h>
  3: #include <petsc/private/vecimpl.h>
  4: #include <petsclayouthdf5.h>

  6: PetscErrorCode MatLoad_AIJ_HDF5(Mat mat, PetscViewer viewer)
  7: {
  8:   PetscViewerFormat  format;
  9:   const PetscInt    *i_glob = NULL;
 10:   PetscInt          *i      = NULL;
 11:   const PetscInt    *j      = NULL;
 12:   const PetscScalar *a      = NULL;
 13:   char              *a_name = NULL, *i_name = NULL, *j_name = NULL, *c_name = NULL;
 14:   const char        *mat_name = NULL;
 15:   PetscInt           p, m, M, N;
 16:   PetscInt           bs = mat->rmap->bs;
 17:   PetscInt          *range;
 18:   PetscBool          flg;
 19:   IS                 is_i = NULL, is_j = NULL;
 20:   Vec                vec_a = NULL;
 21:   PetscLayout        jmap  = NULL;
 22:   MPI_Comm           comm;
 23:   PetscMPIInt        rank, size;

 25:   PetscFunctionBegin;
 26:   PetscCall(PetscViewerGetFormat(viewer, &format));
 27:   switch (format) {
 28:   case PETSC_VIEWER_HDF5_PETSC:
 29:   case PETSC_VIEWER_DEFAULT:
 30:   case PETSC_VIEWER_NATIVE:
 31:   case PETSC_VIEWER_HDF5_MAT:
 32:     break;
 33:   default:
 34:     SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "PetscViewerFormat %s not supported for HDF5 input.", PetscViewerFormats[format]);
 35:   }

 37:   PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
 38:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 39:   PetscCallMPI(MPI_Comm_size(comm, &size));
 40:   PetscCall(PetscObjectGetName((PetscObject)mat, &mat_name));
 41:   if (format == PETSC_VIEWER_HDF5_MAT) {
 42:     PetscCall(PetscStrallocpy("jc", &i_name));
 43:     PetscCall(PetscStrallocpy("ir", &j_name));
 44:     PetscCall(PetscStrallocpy("data", &a_name));
 45:     PetscCall(PetscStrallocpy("MATLAB_sparse", &c_name));
 46:   } else {
 47:     /* TODO Once corresponding MatView is implemented, change the names to i,j,a */
 48:     /* TODO Maybe there could be both namings in the file, using "symbolic link" features of HDF5. */
 49:     PetscCall(PetscStrallocpy("jc", &i_name));
 50:     PetscCall(PetscStrallocpy("ir", &j_name));
 51:     PetscCall(PetscStrallocpy("data", &a_name));
 52:     PetscCall(PetscStrallocpy("MATLAB_sparse", &c_name));
 53:   }

 55:   PetscOptionsBegin(comm, NULL, "Options for loading matrix from HDF5", "Mat");
 56:   PetscCall(PetscOptionsInt("-matload_block_size", "Set the blocksize used to store the matrix", "MatLoad", bs, &bs, &flg));
 57:   PetscOptionsEnd();
 58:   if (flg) PetscCall(MatSetBlockSize(mat, bs));

 60:   PetscCall(PetscViewerHDF5PushGroup(viewer, mat_name));
 61:   PetscCall(PetscViewerHDF5ReadAttribute(viewer, NULL, c_name, PETSC_INT, NULL, &N));
 62:   PetscCall(PetscViewerHDF5ReadSizes(viewer, i_name, NULL, &M));
 63:   --M; /* i has size M+1 as there is global number of nonzeros stored at the end */

 65:   if (format == PETSC_VIEWER_HDF5_MAT && mat->symmetric != PETSC_BOOL3_TRUE) {
 66:     /* Swap row and columns layout for unallocated matrix. I want to avoid calling MatTranspose() just to transpose sparsity pattern and layout. */
 67:     if (!mat->preallocated) {
 68:       PetscLayout tmp;
 69:       tmp       = mat->rmap;
 70:       mat->rmap = mat->cmap;
 71:       mat->cmap = tmp;
 72:     } else SETERRQ(comm, PETSC_ERR_SUP, "Not for preallocated matrix - we would need to transpose it here which we want to avoid");
 73:   }

 75:   /* If global sizes are set, check if they are consistent with that given in the file */
 76:   PetscCheck(mat->rmap->N < 0 || mat->rmap->N == M, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows: Matrix in file has (%" PetscInt_FMT ") and input matrix has (%" PetscInt_FMT ")", mat->rmap->N, M);
 77:   PetscCheck(mat->cmap->N < 0 || mat->cmap->N == N, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols: Matrix in file has (%" PetscInt_FMT ") and input matrix has (%" PetscInt_FMT ")", mat->cmap->N, N);

 79:   /* Determine ownership of all (block) rows and columns */
 80:   mat->rmap->N = M;
 81:   mat->cmap->N = N;
 82:   PetscCall(PetscLayoutSetUp(mat->rmap));
 83:   PetscCall(PetscLayoutSetUp(mat->cmap));
 84:   m = mat->rmap->n;

 86:   /* Read array i (array of row indices) */
 87:   PetscCall(PetscMalloc1(m + 1, &i)); /* allocate i with one more position for local number of nonzeros on each rank */
 88:   i[0] = i[m] = 0;                    /* make the last entry always defined - the code block below overwrites it just on last rank */
 89:   if (rank == size - 1) m++;          /* in the loaded array i_glob, only the last rank has one more position with the global number of nonzeros */
 90:   M++;
 91:   PetscCall(ISCreate(comm, &is_i));
 92:   PetscCall(PetscObjectSetName((PetscObject)is_i, i_name));
 93:   PetscCall(PetscLayoutSetLocalSize(is_i->map, m));
 94:   PetscCall(PetscLayoutSetSize(is_i->map, M));
 95:   PetscCall(ISLoad(is_i, viewer));
 96:   PetscCall(ISGetIndices(is_i, &i_glob));
 97:   PetscCall(PetscArraycpy(i, i_glob, m));

 99:   /* Reset m and M to the matrix sizes */
100:   m = mat->rmap->n;
101:   M--;

103:   /* Create PetscLayout for j and a vectors; construct ranges first */
104:   PetscCall(PetscMalloc1(size + 1, &range));
105:   PetscCallMPI(MPI_Allgather(i, 1, MPIU_INT, range, 1, MPIU_INT, comm));
106:   /* Last rank has global number of nonzeros (= length of j and a arrays) in i[m] (last i entry) so broadcast it */
107:   range[size] = i[m];
108:   PetscCallMPI(MPI_Bcast(&range[size], 1, MPIU_INT, size - 1, comm));
109:   for (p = size - 1; p > 0; p--) {
110:     if (!range[p]) range[p] = range[p + 1]; /* for ranks with 0 rows, take the value from the next processor */
111:   }
112:   i[m] = range[rank + 1]; /* i[m] (last i entry) is equal to next rank's offset */
113:   /* Deduce rstart, rend, n and N from the ranges */
114:   PetscCall(PetscLayoutCreateFromRanges(comm, range, PETSC_OWN_POINTER, 1, &jmap));

116:   /* Convert global to local indexing of rows */
117:   for (p = 1; p < m + 1; ++p) i[p] -= i[0];
118:   i[0] = 0;

120:   /* Read array j (array of column indices) */
121:   PetscCall(ISCreate(comm, &is_j));
122:   PetscCall(PetscObjectSetName((PetscObject)is_j, j_name));
123:   PetscCall(PetscLayoutDuplicate(jmap, &is_j->map));
124:   PetscCall(ISLoad(is_j, viewer));
125:   PetscCall(ISGetIndices(is_j, &j));

127:   /* Read array a (array of values) */
128:   PetscCall(VecCreate(comm, &vec_a));
129:   PetscCall(PetscObjectSetName((PetscObject)vec_a, a_name));
130:   PetscCall(PetscLayoutDuplicate(jmap, &vec_a->map));
131:   PetscCall(VecLoad(vec_a, viewer));
132:   PetscCall(VecGetArrayRead(vec_a, &a));

134:   /* populate matrix */
135:   if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
136:   PetscCall(MatSeqAIJSetPreallocationCSR(mat, i, j, a));
137:   PetscCall(MatMPIAIJSetPreallocationCSR(mat, i, j, a));
138:   /*
139:   PetscCall(MatSeqBAIJSetPreallocationCSR(mat,bs,i,j,a));
140:   PetscCall(MatMPIBAIJSetPreallocationCSR(mat,bs,i,j,a));
141:   */

143:   if (format == PETSC_VIEWER_HDF5_MAT && mat->symmetric != PETSC_BOOL3_TRUE) {
144:     /* Transpose the input matrix back */
145:     PetscCall(MatTranspose(mat, MAT_INPLACE_MATRIX, &mat));
146:   }

148:   PetscCall(PetscViewerHDF5PopGroup(viewer));
149:   PetscCall(PetscFree(i_name));
150:   PetscCall(PetscFree(j_name));
151:   PetscCall(PetscFree(a_name));
152:   PetscCall(PetscFree(c_name));
153:   PetscCall(PetscLayoutDestroy(&jmap));
154:   PetscCall(PetscFree(i));
155:   PetscCall(ISRestoreIndices(is_i, &i_glob));
156:   PetscCall(ISRestoreIndices(is_j, &j));
157:   PetscCall(VecRestoreArrayRead(vec_a, &a));
158:   PetscCall(ISDestroy(&is_i));
159:   PetscCall(ISDestroy(&is_j));
160:   PetscCall(VecDestroy(&vec_a));
161:   PetscFunctionReturn(PETSC_SUCCESS);
162: }