Skip to content

Commit

Permalink
Merge pull request #121 from wenqing/fixing
Browse files Browse the repository at this point in the history
Fixed bugs in the excavation modelling in the coupled HM processes
  • Loading branch information
wenqing authored Dec 21, 2018
2 parents 08268fa + 202643a commit 400ebfc
Show file tree
Hide file tree
Showing 3 changed files with 84 additions and 117 deletions.
169 changes: 58 additions & 111 deletions FEM/fem_ele_vec.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1026,7 +1026,7 @@ void CFiniteElementVec::LocalAssembly(const int update)
}
else
#endif //#if !defined(USE_PETSC) // && !defined(other parallel libs)//03.3012.
//WW
// WW
{
#if defined(USE_PETSC) // || defined (other parallel solver lib). 04.2012 WW
// TODO
Expand Down Expand Up @@ -1056,7 +1056,7 @@ void CFiniteElementVec::LocalAssembly(const int update)
}
else
#endif //#if !defined(USE_PETSC) // && !defined(other parallel libs)//03.3012.
//WW
// WW
#if !defined(USE_PETSC) // && !defined(other parallel libs)//03.3012. WW
{
for (int i = 0; i < nnodesHQ; i++)
Expand Down Expand Up @@ -1637,71 +1637,6 @@ void CFiniteElementVec::GlobalAssembly_RHS()
if (dynamic)
Residual = true;

bool onExBoundaryState[max_nnodes_QE_3D] = {false};
if (excavation)
{
int valid = 0;
excavation = true;
bool onExBoundary = false;
CNode* node;
CElem* elem;
CSolidProperties* smat_e;

for (int i = 0; i < nnodesHQ; i++)
{
node = MeshElement->nodes[i];
onExBoundary = false;
const std::size_t n_elements(node->getConnectedElementIDs().size());
for (std::size_t j = 0; j < n_elements; j++)
{
elem =
pcs->m_msh->ele_vector[node->getConnectedElementIDs()[j]];
if (!elem->GetMark())
continue;

smat_e = msp_vector[elem->GetPatchIndex()];
if (smat_e->excavation > 0)
{
if (fabs(GetCurveValue(smat_e->excavation, 0, aktuelle_zeit,
&valid) -
1.0) < DBL_MIN)
{
onExBoundary = true;
break;
}
}
else if (pcs->ExcavMaterialGroup > -1)
{
double const* ele_center(elem->GetGravityCenter());
const double expected_coordinate =
GetCurveValue(pcs->ExcavCurve, 0, aktuelle_zeit,
&valid) +
pcs->ExcavBeginCoordinate;
const double max_excavation_range = std::max(
expected_coordinate, pcs->ExcavBeginCoordinate);
if (ele_center[pcs->ExcavDirection] > max_excavation_range)
{
onExBoundary = true;
break;
}
else if (elem->GetPatchIndex() !=
static_cast<size_t>(pcs->ExcavMaterialGroup))
{
onExBoundary = true;
break;
}
}
else
{
onExBoundary = true;
break;
}
}
if (onExBoundary)
onExBoundaryState[i] = 1;
}
}

if (Residual)
{
const double biot = smat->biot_const;
Expand All @@ -1714,14 +1649,6 @@ void CFiniteElementVec::GlobalAssembly_RHS()
case FiniteElement::LIQUID_FLOW:
for (int i = 0; i < nnodes; i++)
nodal_pore_p[i] = LoadFactor * _nodal_p1[i];
if (excavation)
{
for (int i = 0; i < nnodes; i++)
{
if (onExBoundaryState[i] == 1) // WX:02.2013
nodal_pore_p[i] = 0.0;
}
}
if (pcs->Neglect_H_ini == 2)
{
for (int i = 0; i < nnodes; i++)
Expand Down Expand Up @@ -1754,14 +1681,6 @@ void CFiniteElementVec::GlobalAssembly_RHS()
nodal_pore_p[i] =
LoadFactor * _nodal_S[i] * _nodal_p1[i];
}
if (excavation)
{
for (int i = 0; i < nnodes; i++)
{
if (onExBoundaryState[i] == 1) // WX:02.2013
nodal_pore_p[i] = 0.0;
}
}
if (pcs->Neglect_H_ini == 2)
{
for (int i = 0; i < nnodes; i++)
Expand Down Expand Up @@ -1790,15 +1709,6 @@ void CFiniteElementVec::GlobalAssembly_RHS()
smat->getBishopCoefficient(S_e, val_n) * val_n;
}

if (excavation)
{
for (int i = 0; i < nnodes; i++)
{
if (onExBoundaryState[i] == 1)
nodal_pore_p[i] = 0.0;
}
}

if (pcs->Neglect_H_ini == 2)
{
for (int i = 0; i < nnodes; i++)
Expand Down Expand Up @@ -1834,15 +1744,6 @@ void CFiniteElementVec::GlobalAssembly_RHS()
nodal_pore_p[i] = pore_p * LoadFactor;
}

if (excavation)
{
for (int i = 0; i < nnodes; i++)
{
if (onExBoundaryState[i] == 1) // WX:02.2013
nodal_pore_p[i] = 0.;
}
}

if (pcs->Neglect_H_ini == 2)
{
for (int i = 0; i < nnodes; i++)
Expand Down Expand Up @@ -1873,15 +1774,6 @@ void CFiniteElementVec::GlobalAssembly_RHS()
nodal_pore_p[i] = pore_p * LoadFactor;
}

if (excavation)
{
for (int i = 0; i < nnodes; i++)
{
if (onExBoundaryState[i] == 1) // WX:02.2013
nodal_pore_p[i] = 0.;
}
}

if (pcs->Neglect_H_ini == 2)
{
for (int i = 0; i < nnodes; i++)
Expand Down Expand Up @@ -1947,9 +1839,64 @@ void CFiniteElementVec::GlobalAssembly_RHS()

if (excavation)
{
int valid = 0;
excavation = true;
bool onExBoundary = false;
CNode* node;
CElem* elem;
CSolidProperties* smat_e;

for (int i = 0; i < nnodesHQ; i++)
{
if (!onExBoundaryState[i])
node = MeshElement->nodes[i];
onExBoundary = false;
const std::size_t n_elements(node->getConnectedElementIDs().size());
for (std::size_t j = 0; j < n_elements; j++)
{
elem =
pcs->m_msh->ele_vector[node->getConnectedElementIDs()[j]];
if (!elem->GetMark())
continue;

smat_e = msp_vector[elem->GetPatchIndex()];
if (smat_e->excavation > 0)
{
if (fabs(GetCurveValue(smat_e->excavation, 0, aktuelle_zeit,
&valid) -
1.0) < DBL_MIN)
{
onExBoundary = true;
break;
}
}
else if (pcs->ExcavMaterialGroup > -1)
{
double const* ele_center(elem->GetGravityCenter());
const double expected_coordinate =
GetCurveValue(pcs->ExcavCurve, 0, aktuelle_zeit,
&valid) +
pcs->ExcavBeginCoordinate;
const double max_excavation_range = std::max(
expected_coordinate, pcs->ExcavBeginCoordinate);
if (ele_center[pcs->ExcavDirection] > max_excavation_range)
{
onExBoundary = true;
break;
}
else if (elem->GetPatchIndex() !=
static_cast<size_t>(pcs->ExcavMaterialGroup))
{
onExBoundary = true;
break;
}
}
else
{
onExBoundary = true;
break;
}
}
if (!onExBoundary)
{
for (std::size_t j = 0; j < dim; j++)
b_rhs[eqs_number[i] + NodeShift[j]] = 0.0;
Expand Down
21 changes: 15 additions & 6 deletions FEM/rf_pcs.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@
solver and parellelisation of them 02/2008 PCH OpenMP parallelization for Lis
matrix solver
**************************************************************************/
#include "FEMEnums.h"
#include "Output.h"
#include "MathTools.h"
#include "rf_pcs.h"

#include <limits> // std::numeric_limits

/*--------------------- MPI Parallel -------------------*/
#if defined(USE_MPI) || defined(USE_MPI_PARPROC) || defined(USE_MPI_REGSOIL)
Expand Down Expand Up @@ -55,11 +55,14 @@ matrix solver
// GEOLib
#include "PointWithID.h"

#include "FEMEnums.h"
#include "Output.h"
#include "MathTools.h"

#include "PhysicalConstant.h"

/* Objects */
#include "pcs_dm.h"
#include "rf_pcs.h"
#include "rf_st_new.h" // ST
//#include "rf_bc_new.h" // ST
//#include "rf_mmp_new.h" // MAT
Expand Down Expand Up @@ -4617,8 +4620,14 @@ bool CRFProcess::isPointInExcavatedDomain(double const* point,
point[ExcavDirection];
max_excavation_range = std::max(expected_coordinate, ExcavBeginCoordinate);
min_excavation_range = std::min(expected_coordinate, ExcavBeginCoordinate);
if (element_center_x_in_excavation_direction > min_excavation_range &&
element_center_x_in_excavation_direction < max_excavation_range)
if ((element_center_x_in_excavation_direction > min_excavation_range ||
(std::fabs(element_center_x_in_excavation_direction -
min_excavation_range) <
std::numeric_limits<double>::epsilon())) &&(
(element_center_x_in_excavation_direction < max_excavation_range) ||
(std::fabs(element_center_x_in_excavation_direction -
max_excavation_range) <
std::numeric_limits<double>::epsilon())))
{
return true;
}
Expand Down
11 changes: 11 additions & 0 deletions scripts/cmake/benchmarks/WW.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,17 @@ Benchmark(AUTHOR WW
h2_Liako_domain_hex.tec
)

Benchmark(AUTHOR WW
PATH HM/excavation/BoreholeExcavation/borehole_excav
CONFIG FEM
NUM_PROCESSORS 1
RUNTIME 40
OUTPUT_FILES
borehole_excav_ply_horizon_t2.tec
borehole_excav_ply_vertikal_t3.tec
)


Benchmark(AUTHOR WW
PATH TH2M/H2M_TEP/w_exp
CONFIG FEM
Expand Down

0 comments on commit 400ebfc

Please sign in to comment.