diff --git a/examples/fluids/include/mat-ceed-impl.h b/examples/fluids/include/mat-ceed-impl.h index cfc1bd61f6..d035fd230d 100644 --- a/examples/fluids/include/mat-ceed-impl.h +++ b/examples/fluids/include/mat-ceed-impl.h @@ -36,7 +36,7 @@ PETSC_CEED_EXTERN PetscErrorCode MatCeedContextCreate(DM dm_x, DM dm_y, Vec X_lo PetscLogEvent log_event_ceed_mult_transpose, MatCeedContext *ctx); PETSC_CEED_EXTERN PetscErrorCode MatCeedContextReference(MatCeedContext ctx); PETSC_CEED_EXTERN PetscErrorCode MatCeedContextReferenceCopy(MatCeedContext ctx, MatCeedContext *ctx_copy); -PETSC_CEED_EXTERN PetscErrorCode MatCeedContextDestroy(MatCeedContext ctx); +PETSC_CEED_EXTERN PetscErrorCode MatCeedContextDestroy(MatCeedContext *ctx); // MatCEED PETSC_CEED_EXTERN PetscErrorCode MatGetDiagonal_Ceed(Mat A, Vec D); diff --git a/examples/fluids/include/mat-ceed.h b/examples/fluids/include/mat-ceed.h index 42a192fa12..d392c6532f 100644 --- a/examples/fluids/include/mat-ceed.h +++ b/examples/fluids/include/mat-ceed.h @@ -14,20 +14,25 @@ #define MATCEED "ceed" // Core functionality -PETSC_CEED_EXTERN PetscErrorCode MatCeedCreate(DM dm_x, DM dm_y, CeedOperator op_mult, CeedOperator op_mult_transpose, Mat *mat); +PETSC_CEED_EXTERN PetscErrorCode MatCreateCeed(DM dm_x, DM dm_y, CeedOperator op_mult, CeedOperator op_mult_transpose, Mat *mat); PETSC_CEED_EXTERN PetscErrorCode MatCeedCopy(Mat mat_ceed, Mat mat_other); +PETSC_CEED_EXTERN PetscErrorCode MatCeedSetAssemblyDataUpdateNeeded(Mat mat_ceed, PetscBool update_needed); PETSC_CEED_EXTERN PetscErrorCode MatCeedCreateMatCOO(Mat mat_ceed, Mat *mat_coo); PETSC_CEED_EXTERN PetscErrorCode MatCeedSetPreallocationCOO(Mat mat_ceed, Mat mat_coo); PETSC_CEED_EXTERN PetscErrorCode MatCeedAssembleCOO(Mat mat_ceed, Mat mat_coo); PETSC_CEED_INTERN PetscErrorCode MatCeedSetContextDouble(Mat mat, const char *name, double value); PETSC_CEED_INTERN PetscErrorCode MatCeedGetContextDouble(Mat mat, const char *name, double *value); +PETSC_CEED_EXTERN PetscErrorCode MatCeedSetContextReal(Mat mat, const char *name, PetscReal value); +PETSC_CEED_EXTERN PetscErrorCode MatCeedGetContextReal(Mat mat, const char *name, PetscReal *value); +PETSC_CEED_INTERN PetscErrorCode MatCeedSetTime(Mat mat, PetscReal time); +PETSC_CEED_INTERN PetscErrorCode MatCeedGetTime(Mat mat, PetscReal *time); +PETSC_CEED_INTERN PetscErrorCode MatCeedSetDt(Mat mat, PetscReal dt); +PETSC_CEED_INTERN PetscErrorCode MatCeedSetShifts(Mat mat, PetscReal shift_v, PetscReal shift_a); // Advanced functionality -PETSC_CEED_EXTERN PetscErrorCode MatCeedSetContext(Mat mat, PetscErrorCode (*f)(void *), void *ctx); +PETSC_CEED_EXTERN PetscErrorCode MatCeedSetContext(Mat mat, PetscCtxDestroyFn f, void *ctx); PETSC_CEED_EXTERN PetscErrorCode MatCeedGetContext(Mat mat, void *ctx); -PETSC_CEED_EXTERN PetscErrorCode MatCeedSetContextReal(Mat mat, const char *name, PetscReal value); -PETSC_CEED_EXTERN PetscErrorCode MatCeedGetContextReal(Mat mat, const char *name, PetscReal *value); PETSC_CEED_EXTERN PetscErrorCode MatCeedSetOperation(Mat mat, MatOperation op, void (*g)(void)); PETSC_CEED_EXTERN PetscErrorCode MatCeedSetCOOMatType(Mat mat, MatType type); diff --git a/examples/fluids/src/differential_filter.c b/examples/fluids/src/differential_filter.c index 12c8771ca3..e6c0db2120 100644 --- a/examples/fluids/src/differential_filter.c +++ b/examples/fluids/src/differential_filter.c @@ -159,7 +159,7 @@ PetscErrorCode DifferentialFilterCreateOperators(Ceed ceed, User user, CeedData PetscCallCeed(ceed, CeedElemRestrictionDestroy(&elem_restr_grid_aniso)); PetscCallCeed(ceed, CeedOperatorGetContextFieldLabel(op_lhs, "filter width scaling", &diff_filter->filter_width_scaling_label)); - PetscCall(MatCeedCreate(dm_filter, dm_filter, op_lhs, NULL, &mat_lhs)); + PetscCall(MatCreateCeed(dm_filter, dm_filter, op_lhs, NULL, &mat_lhs)); PetscCall(KSPCreate(PetscObjectComm((PetscObject)dm_filter), &diff_filter->ksp)); PetscCall(KSPSetOptionsPrefix(diff_filter->ksp, "diff_filter_")); diff --git a/examples/fluids/src/grid_anisotropy_tensor.c b/examples/fluids/src/grid_anisotropy_tensor.c index 15692ee7d6..02f78bbb67 100644 --- a/examples/fluids/src/grid_anisotropy_tensor.c +++ b/examples/fluids/src/grid_anisotropy_tensor.c @@ -75,7 +75,7 @@ PetscErrorCode GridAnisotropyTensorProjectionSetupApply(Ceed ceed, User user, Ce { // -- Setup KSP for L^2 projection Mat mat_mass; - PetscCall(MatCeedCreate(grid_aniso_proj->dm, grid_aniso_proj->dm, op_mass, NULL, &mat_mass)); + PetscCall(MatCreateCeed(grid_aniso_proj->dm, grid_aniso_proj->dm, op_mass, NULL, &mat_mass)); PetscCall(KSPCreate(comm, &ksp)); PetscCall(KSPSetOptionsPrefix(ksp, "grid_anisotropy_tensor_projection_")); diff --git a/examples/fluids/src/mat-ceed.c b/examples/fluids/src/mat-ceed.c index cd164598ed..f5fb608935 100644 --- a/examples/fluids/src/mat-ceed.c +++ b/examples/fluids/src/mat-ceed.c @@ -7,7 +7,10 @@ #include #include #include -#include +#include +#include +#include +#include #include #include @@ -163,79 +166,76 @@ static PetscErrorCode MatCeedAssembleInnerBlockDiagonalMat(Mat mat_ceed, PetscBo } /** - @brief Get `MATCEED` diagonal block for Jacobi. + @brief Get `MATCEED` variable block diagonal for Jacobi. Collective across MPI processes. @param[in] mat_ceed `MATCEED` to invert - @param[out] mat_block The diagonal block matrix + @param[out] mat_vblock The variable diagonal block matrix @return An error code: 0 - success, otherwise - failure **/ -static PetscErrorCode MatGetDiagonalBlock_Ceed(Mat mat_ceed, Mat *mat_block) { - Mat mat_inner = NULL; +static PetscErrorCode MatGetVariableBlockDiagonal_Ceed(Mat mat_ceed, Mat *mat_vblock) { MatCeedContext ctx; PetscFunctionBeginUser; PetscCall(MatShellGetContext(mat_ceed, &ctx)); // Assemble inner mat if needed - PetscCall(MatCeedAssembleInnerBlockDiagonalMat(mat_ceed, ctx->is_ceed_pbd_valid, &mat_inner)); - - // Get block diagonal - PetscCall(MatGetDiagonalBlock(mat_inner, mat_block)); + PetscCall(MatCeedAssembleInnerBlockDiagonalMat(mat_ceed, ctx->is_ceed_vpbd_valid, mat_vblock)); + PetscCall(PetscObjectReference((PetscObject)*mat_vblock)); PetscFunctionReturn(PETSC_SUCCESS); } /** - @brief Invert `MATCEED` diagonal block for Jacobi. + @brief Get `MATCEED` block diagonal for Jacobi. Collective across MPI processes. - @param[in] mat_ceed `MATCEED` to invert - @param[out] values The block inverses in column major order + @param[in] mat_ceed `MATCEED` to invert + @param[out] mat_block The variable diagonal block matrix @return An error code: 0 - success, otherwise - failure **/ -static PetscErrorCode MatInvertBlockDiagonal_Ceed(Mat mat_ceed, const PetscScalar **values) { - Mat mat_inner = NULL; +static PetscErrorCode MatGetBlockDiagonal_Ceed(Mat mat_ceed, Mat *mat_block) { MatCeedContext ctx; PetscFunctionBeginUser; PetscCall(MatShellGetContext(mat_ceed, &ctx)); // Assemble inner mat if needed - PetscCall(MatCeedAssembleInnerBlockDiagonalMat(mat_ceed, ctx->is_ceed_pbd_valid, &mat_inner)); - - // Invert PB diagonal - PetscCall(MatInvertBlockDiagonal(mat_inner, values)); + PetscCall(MatCeedAssembleInnerBlockDiagonalMat(mat_ceed, ctx->is_ceed_pbd_valid, mat_block)); + PetscCall(PetscObjectReference((PetscObject)*mat_block)); PetscFunctionReturn(PETSC_SUCCESS); } /** - @brief Invert `MATCEED` variable diagonal block for Jacobi. + @brief Get on-process diagonal block of `MATCEED` + + This is a block per-process of the diagonal of the global matrix. + This is NOT the diagonal blocks associated with the block size of the matrix (i.e. `MatSetBlockSize()` has no effect on this function). Collective across MPI processes. - @param[in] mat_ceed `MATCEED` to invert - @param[in] num_blocks The number of blocks on the process - @param[in] block_sizes The size of each block on the process - @param[out] values The block inverses in column major order + @param[in] mat_ceed `MATCEED` to invert + @param[out] mat_block The diagonal block matrix @return An error code: 0 - success, otherwise - failure **/ -static PetscErrorCode MatInvertVariableBlockDiagonal_Ceed(Mat mat_ceed, PetscInt num_blocks, const PetscInt *block_sizes, PetscScalar *values) { - Mat mat_inner = NULL; +static PetscErrorCode MatGetDiagonalBlock_Ceed(Mat mat_ceed, Mat *mat_block) { MatCeedContext ctx; PetscFunctionBeginUser; PetscCall(MatShellGetContext(mat_ceed, &ctx)); - // Assemble inner mat if needed - PetscCall(MatCeedAssembleInnerBlockDiagonalMat(mat_ceed, ctx->is_ceed_vpbd_valid, &mat_inner)); + // Check if COO pattern set + if (!ctx->mat_assembled_full_internal) PetscCall(MatCeedCreateMatCOO(mat_ceed, &ctx->mat_assembled_full_internal)); + + // Assemble mat_assembled_full_internal + PetscCall(MatCeedAssembleCOO(mat_ceed, ctx->mat_assembled_full_internal)); - // Invert PB diagonal - PetscCall(MatInvertVariableBlockDiagonal(mat_inner, num_blocks, block_sizes, values)); + // Get diagonal block + PetscCall(MatGetDiagonalBlock(ctx->mat_assembled_full_internal, mat_block)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -252,7 +252,7 @@ static PetscErrorCode MatInvertVariableBlockDiagonal_Ceed(Mat mat_ceed, PetscInt static PetscErrorCode MatView_Ceed(Mat mat_ceed, PetscViewer viewer) { PetscBool is_ascii; PetscViewerFormat format; - PetscMPIInt size; + PetscMPIInt size, rank; MatCeedContext ctx; PetscFunctionBeginUser; @@ -264,18 +264,35 @@ static PetscErrorCode MatView_Ceed(Mat mat_ceed, PetscViewer viewer) { PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat_ceed), &size)); if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS); + PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mat_ceed), &rank)); + if (rank != 0) PetscFunctionReturn(PETSC_SUCCESS); + PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &is_ascii)); { - FILE *file; - - PetscCall(PetscViewerASCIIPrintf(viewer, "MatCEED:\n Default COO MatType:%s\n", ctx->coo_mat_type)); + PetscBool is_detailed = format == PETSC_VIEWER_ASCII_INFO_DETAIL; + char rank_string[16] = {'\0'}; + FILE *file; + + PetscCall(PetscViewerASCIIPrintf(viewer, "MatCEED:\n")); + PetscCall(PetscViewerASCIIPushTab(viewer)); // MatCEED + PetscCall(PetscViewerASCIIPrintf(viewer, "Default COO MatType: %s\n", ctx->coo_mat_type)); + PetscCall(PetscSNPrintf(rank_string, 16, "on Rank %d", rank)); + PetscCall(PetscViewerASCIIPrintf(viewer, "CeedOperator Apply %s:\n", is_detailed ? rank_string : "Summary")); + PetscCall(PetscViewerASCIIPrintf(viewer, "libCEED PB Diagonal Assembly: %s\n", ctx->is_ceed_pbd_valid ? "True" : "False")); + PetscCall(PetscViewerASCIIPrintf(viewer, "libCEED VPB Diagonal Assembly: %s\n", ctx->is_ceed_vpbd_valid ? "True" : "False")); PetscCall(PetscViewerASCIIGetPointer(viewer, &file)); - PetscCall(PetscViewerASCIIPrintf(viewer, " libCEED Operator:\n")); - PetscCallCeed(ctx->ceed, CeedOperatorView(ctx->op_mult, file)); + PetscCall(PetscViewerASCIIPushTab(viewer)); // CeedOperator + if (is_detailed) PetscCallCeed(ctx->ceed, CeedOperatorView(ctx->op_mult, file)); + else PetscCallCeed(ctx->ceed, CeedOperatorViewTerse(ctx->op_mult, file)); + PetscCall(PetscViewerASCIIPopTab(viewer)); // CeedOperator if (ctx->op_mult_transpose) { - PetscCall(PetscViewerASCIIPrintf(viewer, " libCEED Transpose Operator:\n")); - PetscCallCeed(ctx->ceed, CeedOperatorView(ctx->op_mult_transpose, file)); + PetscCall(PetscViewerASCIIPrintf(viewer, "CeedOperator ApplyTranspose %s:\n", is_detailed ? rank_string : "Summary")); + PetscCall(PetscViewerASCIIPushTab(viewer)); // CeedOperator + if (is_detailed) PetscCallCeed(ctx->ceed, CeedOperatorView(ctx->op_mult_transpose, file)); + else PetscCallCeed(ctx->ceed, CeedOperatorViewTerse(ctx->op_mult_transpose, file)); + PetscCall(PetscViewerASCIIPopTab(viewer)); // CeedOperator } + PetscCall(PetscViewerASCIIPopTab(viewer)); // MatCEED } PetscFunctionReturn(PETSC_SUCCESS); } @@ -297,7 +314,7 @@ static PetscErrorCode MatView_Ceed(Mat mat_ceed, PetscViewer viewer) { @return An error code: 0 - success, otherwise - failure **/ -PetscErrorCode MatCeedCreate(DM dm_x, DM dm_y, CeedOperator op_mult, CeedOperator op_mult_transpose, Mat *mat) { +PetscErrorCode MatCreateCeed(DM dm_x, DM dm_y, CeedOperator op_mult, CeedOperator op_mult_transpose, Mat *mat) { PetscInt X_l_size, X_g_size, Y_l_size, Y_g_size; VecType vec_type; MatCeedContext ctx; @@ -449,14 +466,14 @@ PetscErrorCode MatCeedCreate(DM dm_x, DM dm_y, CeedOperator op_mult, CeedOperato PetscCall(PetscStrallocpy(coo_mat_type, &ctx->coo_mat_type)); } // -- Set mat operations - PetscCall(MatShellSetContextDestroy(*mat, (PetscErrorCode(*)(void *))MatCeedContextDestroy)); + PetscCall(MatShellSetContextDestroy(*mat, (PetscCtxDestroyFn *)MatCeedContextDestroy)); PetscCall(MatShellSetOperation(*mat, MATOP_VIEW, (void (*)(void))MatView_Ceed)); PetscCall(MatShellSetOperation(*mat, MATOP_MULT, (void (*)(void))MatMult_Ceed)); if (op_mult_transpose) PetscCall(MatShellSetOperation(*mat, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultTranspose_Ceed)); PetscCall(MatShellSetOperation(*mat, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_Ceed)); PetscCall(MatShellSetOperation(*mat, MATOP_GET_DIAGONAL_BLOCK, (void (*)(void))MatGetDiagonalBlock_Ceed)); - PetscCall(MatShellSetOperation(*mat, MATOP_INVERT_BLOCK_DIAGONAL, (void (*)(void))MatInvertBlockDiagonal_Ceed)); - PetscCall(MatShellSetOperation(*mat, MATOP_INVERT_VBLOCK_DIAGONAL, (void (*)(void))MatInvertVariableBlockDiagonal_Ceed)); + PetscCall(MatShellSetOperation(*mat, MATOP_GET_BLOCK_DIAGONAL, (void (*)(void))MatGetBlockDiagonal_Ceed)); + PetscCall(MatShellSetOperation(*mat, MATOP_GET_VBLOCK_DIAGONAL, (void (*)(void))MatGetVariableBlockDiagonal_Ceed)); PetscCall(MatShellSetVecType(*mat, vec_type)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -515,14 +532,14 @@ PetscErrorCode MatCeedCopy(Mat mat_ceed, Mat mat_other) { PetscCall(MatShellGetContext(mat_ceed, &ctx)); PetscCall(MatCeedContextReference(ctx)); PetscCall(MatShellSetContext(mat_other, ctx)); - PetscCall(MatShellSetContextDestroy(mat_other, (PetscErrorCode(*)(void *))MatCeedContextDestroy)); + PetscCall(MatShellSetContextDestroy(mat_other, (PetscCtxDestroyFn *)MatCeedContextDestroy)); PetscCall(MatShellSetOperation(mat_other, MATOP_VIEW, (void (*)(void))MatView_Ceed)); PetscCall(MatShellSetOperation(mat_other, MATOP_MULT, (void (*)(void))MatMult_Ceed)); if (ctx->op_mult_transpose) PetscCall(MatShellSetOperation(mat_other, MATOP_MULT_TRANSPOSE, (void (*)(void))MatMultTranspose_Ceed)); PetscCall(MatShellSetOperation(mat_other, MATOP_GET_DIAGONAL, (void (*)(void))MatGetDiagonal_Ceed)); PetscCall(MatShellSetOperation(mat_other, MATOP_GET_DIAGONAL_BLOCK, (void (*)(void))MatGetDiagonalBlock_Ceed)); - PetscCall(MatShellSetOperation(mat_other, MATOP_INVERT_BLOCK_DIAGONAL, (void (*)(void))MatInvertBlockDiagonal_Ceed)); - PetscCall(MatShellSetOperation(mat_other, MATOP_INVERT_VBLOCK_DIAGONAL, (void (*)(void))MatInvertVariableBlockDiagonal_Ceed)); + PetscCall(MatShellSetOperation(mat_other, MATOP_GET_BLOCK_DIAGONAL, (void (*)(void))MatGetBlockDiagonal_Ceed)); + PetscCall(MatShellSetOperation(mat_other, MATOP_GET_VBLOCK_DIAGONAL, (void (*)(void))MatGetVariableBlockDiagonal_Ceed)); { PetscInt block_size; @@ -542,6 +559,32 @@ PetscErrorCode MatCeedCopy(Mat mat_ceed, Mat mat_other) { PetscFunctionReturn(PETSC_SUCCESS); } +/** + @brief Mark `CeedQFunction` data as updated and the `CeedQFunction` as requiring re-assembly for a `MatCEED`. + + Collective across MPI processes. + + @param[in] mat_ceed `MATCEED` + @param[out] update_needed Boolean flag indicating `CeedQFunction` update needed + + @return An error code: 0 - success, otherwise - failure +**/ +PetscErrorCode MatCeedSetAssemblyDataUpdateNeeded(Mat mat_ceed, PetscBool update_needed) { + MatCeedContext ctx; + + PetscFunctionBeginUser; + PetscCall(MatShellGetContext(mat_ceed, &ctx)); + PetscCallCeed(ctx->ceed, CeedOperatorSetQFunctionAssemblyDataUpdateNeeded(ctx->op_mult, update_needed)); + if (ctx->op_mult_transpose) { + PetscCallCeed(ctx->ceed, CeedOperatorSetQFunctionAssemblyDataUpdateNeeded(ctx->op_mult_transpose, update_needed)); + } + if (update_needed) { + PetscCall(MatAssemblyBegin(mat_ceed, MAT_FINAL_ASSEMBLY)); + PetscCall(MatAssemblyEnd(mat_ceed, MAT_FINAL_ASSEMBLY)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + /** @brief Setup a `Mat` with the same COO pattern as a `MatCEED`. @@ -720,15 +763,25 @@ PetscErrorCode MatCeedSetContextDouble(Mat mat, const char *name, double value) PetscCallCeed(ctx->ceed, CeedOperatorGetContextFieldLabel(ctx->op_mult, name, &label)); if (label) { - PetscCallCeed(ctx->ceed, CeedOperatorSetContextDouble(ctx->op_mult, label, &value)); - was_updated = PETSC_TRUE; + double set_value = 2 * value + 1.0; + + PetscCall(MatCeedGetContextDouble(mat, name, &set_value)); + if (set_value != value) { + PetscCallCeed(ctx->ceed, CeedOperatorSetContextDouble(ctx->op_mult, label, &value)); + was_updated = PETSC_TRUE; + } } if (ctx->op_mult_transpose) { label = NULL; PetscCallCeed(ctx->ceed, CeedOperatorGetContextFieldLabel(ctx->op_mult_transpose, name, &label)); if (label) { - PetscCallCeed(ctx->ceed, CeedOperatorSetContextDouble(ctx->op_mult_transpose, label, &value)); - was_updated = PETSC_TRUE; + double set_value = 2 * value + 1.0; + + PetscCall(MatCeedGetContextDouble(mat, name, &set_value)); + if (set_value != value) { + PetscCallCeed(ctx->ceed, CeedOperatorSetContextDouble(ctx->op_mult_transpose, label, &value)); + was_updated = PETSC_TRUE; + } } } } @@ -739,25 +792,6 @@ PetscErrorCode MatCeedSetContextDouble(Mat mat, const char *name, double value) PetscFunctionReturn(PETSC_SUCCESS); } -/** - @brief Set the current `PetscReal` value of a context field for a `MatCEED`. - - Not collective across MPI processes. - - @param[in,out] mat `MatCEED` - @param[in] name Name of the context field - @param[in] value New context field value - - @return An error code: 0 - success, otherwise - failure -**/ -PetscErrorCode MatCeedSetContextReal(Mat mat, const char *name, PetscReal value) { - double value_double = value; - - PetscFunctionBeginUser; - PetscCall(MatCeedSetContextDouble(mat, name, value_double)); - PetscFunctionReturn(PETSC_SUCCESS); -} - /** @brief Get the current value of a context field for a `MatCEED`. @@ -795,6 +829,25 @@ PetscErrorCode MatCeedGetContextDouble(Mat mat, const char *name, double *value) PetscFunctionReturn(PETSC_SUCCESS); } +/** + @brief Set the current `PetscReal` value of a context field for a `MatCEED`. + + Not collective across MPI processes. + + @param[in,out] mat `MatCEED` + @param[in] name Name of the context field + @param[in] value New context field value + + @return An error code: 0 - success, otherwise - failure +**/ +PetscErrorCode MatCeedSetContextReal(Mat mat, const char *name, PetscReal value) { + double value_double = value; + + PetscFunctionBeginUser; + PetscCall(MatCeedSetContextDouble(mat, name, value_double)); + PetscFunctionReturn(PETSC_SUCCESS); +} + /** @brief Get the current `PetscReal` value of a context field for a `MatCEED`. @@ -807,7 +860,7 @@ PetscErrorCode MatCeedGetContextDouble(Mat mat, const char *name, double *value) @return An error code: 0 - success, otherwise - failure **/ PetscErrorCode MatCeedGetContextReal(Mat mat, const char *name, PetscReal *value) { - double value_double; + double value_double = 0.0; PetscFunctionBeginUser; PetscCall(MatCeedGetContextDouble(mat, name, &value_double)); @@ -815,6 +868,94 @@ PetscErrorCode MatCeedGetContextReal(Mat mat, const char *name, PetscReal *value PetscFunctionReturn(PETSC_SUCCESS); } +/** + @brief Set the current time for a `MatCEED`. + + Not collective across MPI processes. + + @param[in,out] mat `MatCEED` + @param[in] time Current time + + @return An error code: 0 - success, otherwise - failure +**/ +PetscErrorCode MatCeedSetTime(Mat mat, PetscReal time) { + PetscFunctionBeginUser; + { + double time_ceed = time; + + PetscCall(MatCeedSetContextDouble(mat, "time", time_ceed)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/** + @brief Get the current time for a `MatCEED`. + + Not collective across MPI processes. + + @param[in] mat `MatCEED` + @param[out] time Current time, or -1.0 if the boundary evaluator has no time field + + @return An error code: 0 - success, otherwise - failure +**/ +PetscErrorCode MatCeedGetTime(Mat mat, PetscReal *time) { + PetscFunctionBeginUser; + *time = -1.0; + { + double time_ceed = -1.0; + + PetscCall(MatCeedGetContextDouble(mat, "time", &time_ceed)); + *time = time_ceed; + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/** + @brief Set the current time step for a `MatCEED`. + + Not collective across MPI processes. + + @param[in,out] mat `MatCEED` + @param[in] dt Current time step + + @return An error code: 0 - success, otherwise - failure +**/ +PetscErrorCode MatCeedSetDt(Mat mat, PetscReal dt) { + PetscFunctionBeginUser; + { + double dt_ceed = dt; + + PetscCall(MatCeedSetContextDouble(mat, "dt", dt_ceed)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + +/** + @brief Set the Jacobian shifts for a `MatCEED`. + + Not collective across MPI processes. + + @param[in,out] mat `MatCEED` + @param[in] shift_v Velocity shift + @param[in] shift_a Acceleration shift + + @return An error code: 0 - success, otherwise - failure +**/ +PetscErrorCode MatCeedSetShifts(Mat mat, PetscReal shift_v, PetscReal shift_a) { + PetscFunctionBeginUser; + { + double shift_v_ceed = shift_v; + + PetscCall(MatCeedSetContextDouble(mat, "shift v", shift_v_ceed)); + } + if (shift_a) { + double shift_a_ceed = shift_a; + + PetscCall(MatCeedSetContextDouble(mat, "shift a", shift_a_ceed)); + } + PetscFunctionReturn(PETSC_SUCCESS); +} + /** @brief Set user context for a `MATCEED`. @@ -826,14 +967,14 @@ PetscErrorCode MatCeedGetContextReal(Mat mat, const char *name, PetscReal *value @return An error code: 0 - success, otherwise - failure **/ -PetscErrorCode MatCeedSetContext(Mat mat, PetscErrorCode (*f)(void *), void *ctx) { +PetscErrorCode MatCeedSetContext(Mat mat, PetscCtxDestroyFn f, void *ctx) { PetscContainer user_ctx = NULL; PetscFunctionBeginUser; if (ctx) { PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)mat), &user_ctx)); PetscCall(PetscContainerSetPointer(user_ctx, ctx)); - PetscCall(PetscContainerSetUserDestroy(user_ctx, f)); + PetscCall(PetscContainerSetCtxDestroy(user_ctx, f)); } PetscCall(PetscObjectCompose((PetscObject)mat, "MatCeed user context", (PetscObject)user_ctx)); PetscCall(PetscContainerDestroy(&user_ctx)); @@ -1316,7 +1457,7 @@ PetscErrorCode MatCeedContextReference(MatCeedContext ctx) { PetscErrorCode MatCeedContextReferenceCopy(MatCeedContext ctx, MatCeedContext *ctx_copy) { PetscFunctionBeginUser; PetscCall(MatCeedContextReference(ctx)); - PetscCall(MatCeedContextDestroy(*ctx_copy)); + PetscCall(MatCeedContextDestroy(ctx_copy)); *ctx_copy = ctx; PetscFunctionReturn(PETSC_SUCCESS); } @@ -1330,33 +1471,33 @@ PetscErrorCode MatCeedContextReferenceCopy(MatCeedContext ctx, MatCeedContext *c @return An error code: 0 - success, otherwise - failure **/ -PetscErrorCode MatCeedContextDestroy(MatCeedContext ctx) { +PetscErrorCode MatCeedContextDestroy(MatCeedContext *ctx) { PetscFunctionBeginUser; - if (!ctx || --ctx->ref_count > 0) PetscFunctionReturn(PETSC_SUCCESS); + if (!ctx || --(*ctx)->ref_count > 0) PetscFunctionReturn(PETSC_SUCCESS); // PETSc objects - PetscCall(DMDestroy(&ctx->dm_x)); - PetscCall(DMDestroy(&ctx->dm_y)); - PetscCall(VecDestroy(&ctx->X_loc)); - PetscCall(VecDestroy(&ctx->Y_loc_transpose)); - PetscCall(MatDestroy(&ctx->mat_assembled_full_internal)); - PetscCall(MatDestroy(&ctx->mat_assembled_pbd_internal)); - PetscCall(PetscFree(ctx->coo_mat_type)); - PetscCall(PetscFree(ctx->mats_assembled_full)); - PetscCall(PetscFree(ctx->mats_assembled_pbd)); + PetscCall(DMDestroy(&(*ctx)->dm_x)); + PetscCall(DMDestroy(&(*ctx)->dm_y)); + PetscCall(VecDestroy(&(*ctx)->X_loc)); + PetscCall(VecDestroy(&(*ctx)->Y_loc_transpose)); + PetscCall(MatDestroy(&(*ctx)->mat_assembled_full_internal)); + PetscCall(MatDestroy(&(*ctx)->mat_assembled_pbd_internal)); + PetscCall(PetscFree((*ctx)->coo_mat_type)); + PetscCall(PetscFree((*ctx)->mats_assembled_full)); + PetscCall(PetscFree((*ctx)->mats_assembled_pbd)); // libCEED objects - PetscCallCeed(ctx->ceed, CeedVectorDestroy(&ctx->x_loc)); - PetscCallCeed(ctx->ceed, CeedVectorDestroy(&ctx->y_loc)); - PetscCallCeed(ctx->ceed, CeedVectorDestroy(&ctx->coo_values_full)); - PetscCallCeed(ctx->ceed, CeedVectorDestroy(&ctx->coo_values_pbd)); - PetscCallCeed(ctx->ceed, CeedOperatorDestroy(&ctx->op_mult)); - PetscCallCeed(ctx->ceed, CeedOperatorDestroy(&ctx->op_mult_transpose)); - PetscCheck(CeedDestroy(&ctx->ceed) == CEED_ERROR_SUCCESS, PETSC_COMM_SELF, PETSC_ERR_LIB, "destroying libCEED context object failed"); + PetscCallCeed((*ctx)->ceed, CeedVectorDestroy(&(*ctx)->x_loc)); + PetscCallCeed((*ctx)->ceed, CeedVectorDestroy(&(*ctx)->y_loc)); + PetscCallCeed((*ctx)->ceed, CeedVectorDestroy(&(*ctx)->coo_values_full)); + PetscCallCeed((*ctx)->ceed, CeedVectorDestroy(&(*ctx)->coo_values_pbd)); + PetscCallCeed((*ctx)->ceed, CeedOperatorDestroy(&(*ctx)->op_mult)); + PetscCallCeed((*ctx)->ceed, CeedOperatorDestroy(&(*ctx)->op_mult_transpose)); + PetscCheck(CeedDestroy(&(*ctx)->ceed) == CEED_ERROR_SUCCESS, PETSC_COMM_SELF, PETSC_ERR_LIB, "destroying libCEED context object failed"); // Deallocate - ctx->is_destroyed = PETSC_TRUE; // Flag as destroyed in case someone has stale ref - PetscCall(PetscFree(ctx)); + (*ctx)->is_destroyed = PETSC_TRUE; // Flag as destroyed in case someone has stale ref + PetscCall(PetscFree(*ctx)); PetscFunctionReturn(PETSC_SUCCESS); } @@ -1434,11 +1575,11 @@ PetscErrorCode MatMult_Ceed(Mat A, Vec X, Vec Y) { PetscCall(VecPetscToCeed(Y_loc, &y_mem_type, ctx->y_loc)); // Apply libCEED operator - PetscCall(PetscLogGpuTimeBegin()); PetscCall(PetscLogEventBegin(ctx->log_event_ceed_mult, A, X, Y, NULL)); + PetscCall(PetscLogGpuTimeBegin()); PetscCallCeed(ctx->ceed, CeedOperatorApplyAdd(ctx->op_mult, ctx->x_loc, ctx->y_loc, CEED_REQUEST_IMMEDIATE)); - PetscCall(PetscLogEventEnd(ctx->log_event_ceed_mult, A, X, Y, NULL)); PetscCall(PetscLogGpuTimeEnd()); + PetscCall(PetscLogEventEnd(ctx->log_event_ceed_mult, A, X, Y, NULL)); // Restore PETSc vectors PetscCall(VecReadCeedToPetsc(ctx->x_loc, x_mem_type, X_loc)); @@ -1495,11 +1636,11 @@ PetscErrorCode MatMultTranspose_Ceed(Mat A, Vec Y, Vec X) { PetscCall(VecPetscToCeed(X_loc, &x_mem_type, ctx->x_loc)); // Apply libCEED operator - PetscCall(PetscLogGpuTimeBegin()); PetscCall(PetscLogEventBegin(ctx->log_event_ceed_mult_transpose, A, Y, X, NULL)); + PetscCall(PetscLogGpuTimeBegin()); PetscCallCeed(ctx->ceed, CeedOperatorApplyAdd(ctx->op_mult_transpose, ctx->y_loc, ctx->x_loc, CEED_REQUEST_IMMEDIATE)); - PetscCall(PetscLogEventEnd(ctx->log_event_ceed_mult_transpose, A, Y, X, NULL)); PetscCall(PetscLogGpuTimeEnd()); + PetscCall(PetscLogEventEnd(ctx->log_event_ceed_mult_transpose, A, Y, X, NULL)); // Restore PETSc vectors PetscCall(VecReadCeedToPetsc(ctx->y_loc, y_mem_type, Y_loc)); diff --git a/examples/fluids/src/setuplibceed.c b/examples/fluids/src/setuplibceed.c index 1754d5e521..9ce48ae762 100644 --- a/examples/fluids/src/setuplibceed.c +++ b/examples/fluids/src/setuplibceed.c @@ -70,7 +70,7 @@ static PetscErrorCode CreateKSPMass(User user, ProblemData problem) { PetscCall(DMCreateLocalVector(dm, &Zeros_loc)); PetscCall(VecZeroEntries(Zeros_loc)); - PetscCall(MatCeedCreate(dm, dm, op_mass, NULL, &mat_mass)); + PetscCall(MatCreateCeed(dm, dm, op_mass, NULL, &mat_mass)); PetscCall(MatCeedSetLocalVectors(mat_mass, Zeros_loc, NULL)); PetscCall(KSPCreate(comm, &user->mass_ksp)); @@ -469,7 +469,7 @@ PetscErrorCode SetupLibceed(Ceed ceed, CeedData ceed_data, DM dm, User user, App PetscCallCeed(ceed, CeedOperatorGetContextFieldLabel(user->op_ifunction, "timestep size", &user->phys->timestep_size_label)); if (op_ijacobian) { - PetscCall(MatCeedCreate(user->dm, user->dm, op_ijacobian, NULL, &user->mat_ijacobian)); + PetscCall(MatCreateCeed(user->dm, user->dm, op_ijacobian, NULL, &user->mat_ijacobian)); PetscCall(MatCeedSetLocalVectors(user->mat_ijacobian, user->Q_dot_loc, NULL)); PetscCallCeed(ceed, CeedOperatorDestroy(&op_ijacobian)); } diff --git a/examples/fluids/src/turb_spanstats.c b/examples/fluids/src/turb_spanstats.c index 08f9ef36b9..6c27978fde 100644 --- a/examples/fluids/src/turb_spanstats.c +++ b/examples/fluids/src/turb_spanstats.c @@ -326,7 +326,7 @@ PetscErrorCode SetupL2ProjectionStats(Ceed ceed, User user, CeedData ceed_data, Mat mat_mass; KSP ksp; - PetscCall(MatCeedCreate(user->spanstats.dm, user->spanstats.dm, op_mass, NULL, &mat_mass)); + PetscCall(MatCreateCeed(user->spanstats.dm, user->spanstats.dm, op_mass, NULL, &mat_mass)); PetscCall(KSPCreate(PetscObjectComm((PetscObject)user->spanstats.dm), &ksp)); PetscCall(KSPSetOptionsPrefix(ksp, "turbulence_spanstats_")); diff --git a/examples/fluids/src/velocity_gradient_projection.c b/examples/fluids/src/velocity_gradient_projection.c index 931b69f57d..0ee457139a 100644 --- a/examples/fluids/src/velocity_gradient_projection.c +++ b/examples/fluids/src/velocity_gradient_projection.c @@ -107,7 +107,7 @@ PetscErrorCode VelocityGradientProjectionSetup(Ceed ceed, User user, CeedData ce Mat mat_mass; MPI_Comm comm = PetscObjectComm((PetscObject)grad_velo_proj->dm); - PetscCall(MatCeedCreate(grad_velo_proj->dm, grad_velo_proj->dm, op_mass, NULL, &mat_mass)); + PetscCall(MatCreateCeed(grad_velo_proj->dm, grad_velo_proj->dm, op_mass, NULL, &mat_mass)); PetscCall(KSPCreate(comm, &grad_velo_proj->ksp)); PetscCall(KSPSetOptionsPrefix(grad_velo_proj->ksp, "velocity_gradient_projection_")); diff --git a/examples/petsc/dmswarm.c b/examples/petsc/dmswarm.c index 1dc3618fd8..113730e15a 100644 --- a/examples/petsc/dmswarm.c +++ b/examples/petsc/dmswarm.c @@ -398,7 +398,7 @@ PetscErrorCode DMSwarmInterpolateFromCellToSwarm_Petsc(DM dm_swarm, const char * PetscCall(DMRestoreWorkArray(dm_mesh, num_points_in_cell * dim, MPIU_REAL, &coords_points_cell_true)); PetscCall(DMRestoreWorkArray(dm_mesh, num_points_in_cell * dim, MPIU_REAL, &coords_points_cell_ref)); PetscCall(PetscTabulationDestroy(&tabulation)); - PetscCall(PetscFree(points_cell)); + PetscCall(DMSwarmSortRestorePointsPerCell(dm_swarm, cell, &num_points_in_cell, &points_cell)); } // Cleanup @@ -486,7 +486,7 @@ PetscErrorCode DMSwarmCheckSwarmValues(DM dm_swarm, const char *field, PetscScal } // -- Cleanup - PetscCall(PetscFree(points)); + PetscCall(DMSwarmSortRestorePointsPerCell(dm_swarm, cell, &num_points_in_cell, &points)); } // Cleanup diff --git a/examples/petsc/src/swarmutils.c b/examples/petsc/src/swarmutils.c index f736581ee5..ee047b724f 100644 --- a/examples/petsc/src/swarmutils.c +++ b/examples/petsc/src/swarmutils.c @@ -391,7 +391,7 @@ PetscErrorCode DMSwarmCreateReferenceCoordinates(DM dm_swarm, IS *is_points, Vec } // -- Cleanup - PetscCall(PetscFree(points_in_cell)); + PetscCall(DMSwarmSortRestorePointsPerCell(dm_swarm, cell, &num_points_in_cell, &points_in_cell)); } cell_points[points_offset - 1] = num_points_local + points_offset;