Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

48 simplify notch approximation lawbinned #60

Merged
merged 10 commits into from
Jan 26, 2024
18 changes: 12 additions & 6 deletions src/pylife/materiallaws/notch_approximation_law.py
Original file line number Diff line number Diff line change
Expand Up @@ -583,10 +583,11 @@ def strain(self, stress, load):
'''Get the strain of the primary path in the stress-strain diagram at a given stress and load
by using the value of the look-up table.

This method performs the task for for multiple points at once,
i.e. delta_load is a DataFrame with values for every node.

Parameters
----------
stress : array-like float
The stress
load : array-like float
The load

Expand Down Expand Up @@ -655,7 +656,10 @@ def strain(self, stress, load):

def stress_secondary_branch(self, delta_load, *, rtol=1e-5, tol=1e-6):
'''Get the stress on secondary branches in the stress-strain diagram at a given load
by using the value of the look-up table.
by using the value of the look-up table (lut).

This method performs the task for for multiple points at once,
i.e. delta_load is a DataFrame with values for every node.

Parameters
----------
Expand Down Expand Up @@ -734,12 +738,14 @@ def stress_secondary_branch(self, delta_load, *, rtol=1e-5, tol=1e-6):

def strain_secondary_branch(self, delta_stress, delta_load):
'''Get the strain on secondary branches in the stress-strain diagram at a given stress and load
by using the value of the look-up table.
by using the value of the look-up table (lut).
The lut is a DataFrame with MultiIndex with levels class_index and node_id.

This method performs the task for for multiple points at once,
i.e. delta_load is a DataFrame with values for every node.

Parameters
----------
delta_stress : array-like float
The stress increment
delta_load : array-like float
The load increment

Expand Down
34 changes: 18 additions & 16 deletions src/pylife/strength/damage_parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _compute_values(self):
self._M_sigma = self._constants.a_M * 1e-3 * R_m + self._constants.b_M

# compute k according to eq. (2.6-83)
self._collective["k"] = 0
self._collective["k"] = 0.0
self._collective.loc[self._collective["S_m"]>=0, "k"] = self._M_sigma * (self._M_sigma + 2)
self._collective.loc[self._collective["S_m"]<0, "k"] = self._M_sigma/3 * (self._M_sigma/3 + 2)

Expand All @@ -98,7 +98,7 @@ def _compute_values(self):
self._collective["P_RAM"] = np.where(self._collective["discriminant"] >= 0,
np.sqrt(self._collective["discriminant"]
* self._collective["epsilon_a"]
* self._assessment_parameters.E), 0)
* self._assessment_parameters.E), 0.0)

# delete temporary columns
self._collective.drop(columns = ["discriminant", "k"], inplace=True)
Expand Down Expand Up @@ -295,37 +295,39 @@ def _compute_crack_opening_loop(self):
"""compute crack opening strain with history (chapter 2.8.9.3, chapter 2.9.8.1 is better)"""

# initialize new columns in collective DataFrame
self._collective["epsilon_open_alt"] = 0
self._collective["epsilon_open"] = 0
self._collective["P_RAJ"] = 0
self._collective["D"] = 0
self._collective["S_close"] = 0
self._collective["epsilon_open_alt"] = 0.0
self._collective["epsilon_open"] = 0.0
self._collective["P_RAJ"] = 0.0
self._collective["D"] = 0.0
self._collective["S_close"] = 0.0
self._collective["case_name"] = ""

self._collective["epsilon_min_alt_SP"] = 0
self._collective["epsilon_max_alt_SP"] = 0
self._collective["epsilon_min_alt_SP"] = 0.0
self._collective["epsilon_max_alt_SP"] = 0.0

assessment_point_index = self._collective[self._collective.index.get_level_values("hysteresis_index")==0]\
.index.get_level_values("assessment_point_index")

# initialize variables
epsilon_open_alt = -np.inf # initialized according to 2.9.7 point 2
epsilon_open_alt = pd.Series(0, index=assessment_point_index)
epsilon_open_alt = pd.Series(0.0, index=assessment_point_index)

epsilon_min_alt_SP = pd.Series(np.infty, index=assessment_point_index)
epsilon_max_alt_SP = pd.Series(-np.infty, index=assessment_point_index)

epsilon_min_alt_SP = pd.Series(0, index=assessment_point_index)
epsilon_max_alt_SP = pd.Series(0, index=assessment_point_index)
epsilon_min_alt_SP = pd.Series(0.0, index=assessment_point_index)
epsilon_max_alt_SP = pd.Series(0.0, index=assessment_point_index)

# cumulative damage value
D_akt = pd.Series(0, index=assessment_point_index)
D_akt = pd.Series(0.0, index=assessment_point_index)

# initialize current fatigue limit
P_RAJ_D = pd.Series(self._P_RAJ_D_0, index=assessment_point_index)

P_RAJ_D = pd.Series(self._P_RAJ_D_0, index=assessment_point_index)

# initialize helper variables
epsilon_open = pd.Series(0, index=assessment_point_index)
epsilon_open = pd.Series(0.0, index=assessment_point_index)

# Find the last hysteresis of the first run of the HCM algorithm. After this hysteresis, we need to initialize some variables for the second HCM run.
last_index_of_first_run = self._collective[(self._collective["run_index"]==1) & (self._collective["run_index"].shift(-1)==2)].index
Expand Down Expand Up @@ -505,7 +507,7 @@ def optimize(row):
# compute value of P_RAJ, eq. (2.9-110)
P_RAJ = np.where(is_damage_in_current_hysteresis,
self._calculate_P_RAJ(delta_S_eff, delta_epsilon_eff),
0)
0.0)

# store value in collective DataFrame for current hysteresis
self._collective.loc[self._collective.index.get_level_values("hysteresis_index")==index,"P_RAJ"] = P_RAJ
Expand All @@ -524,7 +526,7 @@ def optimize(row):

# compute damage contribution
D = np.where(group.is_closed_hysteresis,
1/N,
1.0/N,
0.5/N)

# store damage in collective
Expand Down
58 changes: 29 additions & 29 deletions tests/mesh/test_gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def test_grad_constant():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dx():
Expand All @@ -68,7 +68,7 @@ def test_grad_dx():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dx_flipped_index_levels():
Expand All @@ -88,7 +88,7 @@ def test_grad_dx_flipped_index_levels():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dx_shuffle():
Expand All @@ -110,7 +110,7 @@ def test_grad_dx_shuffle():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dy():
Expand All @@ -132,7 +132,7 @@ def test_grad_dy():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dy_shuffle():
Expand All @@ -153,7 +153,7 @@ def test_grad_dy_shuffle():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dxy_simple():
Expand All @@ -174,7 +174,7 @@ def test_grad_dxy_simple():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dxy_complex():
Expand All @@ -195,7 +195,7 @@ def test_grad_dxy_complex():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dxy_simple_shuffle():
Expand All @@ -217,7 +217,7 @@ def test_grad_dxy_simple_shuffle():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


def test_grad_dxy_complex_shuffle():
Expand All @@ -239,7 +239,7 @@ def test_grad_dxy_complex_shuffle():

grad = df.gradient.gradient_of('fct')

pd.testing.assert_frame_equal(grad, expected)
pd.testing.assert_frame_equal(grad, expected, rtol=1e-12)


# ---- gradient_3D
Expand All @@ -260,7 +260,7 @@ def test_gradient_3D_constant():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)

def test_gradient_3D_is_not_3D():
fkt = [5, 6, 7, 8, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]
Expand All @@ -279,7 +279,7 @@ def test_gradient_3D_is_not_3D():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_dx():
Expand Down Expand Up @@ -316,7 +316,7 @@ def test_gradient_3D_dx():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_dx_flipped_index_levels():
Expand All @@ -338,7 +338,7 @@ def test_gradient_3D_dx_flipped_index_levels():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_dy():
Expand All @@ -361,7 +361,7 @@ def test_gradient_3D_dy():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)

def test_gradient_3D_dy_flipped_index_levels():

Expand All @@ -383,7 +383,7 @@ def test_gradient_3D_dy_flipped_index_levels():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_dxy():
Expand All @@ -406,7 +406,7 @@ def test_gradient_3D_dxy():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)

def test_gradient_3D_dxy_flipped_index_levels():
fkt_x = np.array([1, 4, 4, 1, 1, 4, 4, 1, 4, 7, 7, 4, 4, 7, 7, 4, 1, 4, 4, 1, 1, 4, 4, 1, 4, 7, 7, 4, 4, 7, 7, 4]) # x: 1 4 7, y: 1 5 9
Expand All @@ -428,7 +428,7 @@ def test_gradient_3D_dxy_flipped_index_levels():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_dxyz_8nodes():
Expand Down Expand Up @@ -460,7 +460,7 @@ def test_gradient_3D_dxyz_8nodes():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index())
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), rtol=1e-12)

def test_gradient_3D_dxyz_8_nodes_flipped_index_levels():

Expand Down Expand Up @@ -492,7 +492,7 @@ def test_gradient_3D_dxyz_8_nodes_flipped_index_levels():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index())
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), rtol=1e-12)

def test_gradient_3D_dxyz_16nodes():

Expand Down Expand Up @@ -523,7 +523,7 @@ def test_gradient_3D_dxyz_16nodes():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index())
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), rtol=1e-12)


def test_gradient_3D_dxyz_20nodes():
Expand Down Expand Up @@ -555,7 +555,7 @@ def test_gradient_3D_dxyz_20nodes():

grad = df.gradient_3D.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index())
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), rtol=1e-12)


def test_gradient_3D_tetrahedron():
Expand All @@ -577,7 +577,7 @@ def test_gradient_3D_tetrahedron():

grad = df.gradient_3D.gradient_of('f').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index())
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), rtol=1e-12)


def test_gradient_3D_tetrahedron_10_nodes():
Expand All @@ -599,7 +599,7 @@ def test_gradient_3D_tetrahedron_10_nodes():

grad = df.gradient_3D.gradient_of('f').sort_index()

pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index())
pd.testing.assert_frame_equal(grad.reset_index(), expected.reset_index(), rtol=1e-12)


def test_gradient_3D_tetrahedron_compare():
Expand All @@ -624,7 +624,7 @@ def test_gradient_3D_tetrahedron_compare():

# the following fails, which means that the Gradient class is less accurate for tet elements
#pd.testing.assert_frame_equal(grad_3D.reset_index(), grad.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad_3D.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad_3D.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_hex_compare_1():
Expand Down Expand Up @@ -658,8 +658,8 @@ def test_gradient_3D_hex_compare_1():
grad_3D = df.gradient_3D.gradient_of('fct').sort_index()
grad = df.gradient.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad_3D.reset_index(), grad.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad_3D.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad_3D.reset_index(), grad.reset_index(), check_dtype=False, rtol=1e-12)
pd.testing.assert_frame_equal(grad_3D.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)


def test_gradient_3D_hex_compare_2():
Expand Down Expand Up @@ -693,5 +693,5 @@ def test_gradient_3D_hex_compare_2():
grad_3D = df.gradient_3D.gradient_of('fct').sort_index()
grad = df.gradient.gradient_of('fct').sort_index()

pd.testing.assert_frame_equal(grad_3D.reset_index(), grad.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad_3D.reset_index(), expected.reset_index(), check_dtype=False)
pd.testing.assert_frame_equal(grad_3D.reset_index(), grad.reset_index(), check_dtype=False, rtol=1e-12)
pd.testing.assert_frame_equal(grad_3D.reset_index(), expected.reset_index(), check_dtype=False, rtol=1e-12)
Loading