-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmicrostructure_ve.py
803 lines (650 loc) · 24.9 KB
/
microstructure_ve.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
import pathlib
import shutil
import subprocess
from functools import partial, cache
from itertools import product
from os import PathLike
from typing import Optional, Sequence, List, Union, TextIO, Iterable, Literal, Dict
import numpy as np
from dataclasses import dataclass, field
BASE_PATH = pathlib.Path(__file__).parent
###################
# Keyword Classes #
###################
# Each keyword class represents a specific ABAQUS keyword.
# They know what the structure of the keyword section is and what data
# are needed to fill it out. They should minimize computation outside
# the to_inp method that actually writes directly to the input file.
@dataclass
class Heading:
text: str = ""
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*Heading
{self.text}
"""
)
# NOTE: every "1 +" you see is correcting the array indexing mismatch between
# python and abaqus (python has zero-indexed, abaqus has one-indexed arrays)
# however "+ 1" actually indicates an extra step
@dataclass
class GridNodes:
shape: np.ndarray
scale: float
nsets: Dict[str, NodeSet] = field(init=False)
@classmethod
def from_matl_img(cls, matl_img, scale):
nodes_shape = np.array(matl_img.shape) + 1
return cls(nodes_shape, scale)
def __post_init__(self):
self.node_nums = range(1, 1 + np.prod(self.shape)) # 1-indexing for ABAQUS
self.virtual_node = self.node_nums[-1] + 1
# create nsets
self.nsets = {}
# make_set = partial(NodeSet.from_side_name, nodes=self)
make_set = partial(NodeSet.from_slice, nodes=self)
if self.dim == 2:
# Declare nsets using "Sides_2d" slicer dictionary
for side, sl in Sides_2d.items():
self.nsets[side] = make_set(side, sl)
elif self.dim == 3:
# Declare nsets using "Sides_3d" slicer dictionary
for side, sl in Sides_3d.items():
self.nsets[side] = make_set(side, sl)
else:
raise ValueError('GridNodes has illegal number of dimensions', self.dim)
@property
def dim(self):
return len(self.shape)
def to_inp(self, inp_file_obj):
pos = self.scale * np.indices(self.shape)[::-1]
inp_file_obj.write("*Node\n")
for node_num, *p in zip(self.node_nums, *map(np.ravel, pos)):
inp_file_obj.write(f"{node_num:d}")
for d in p:
inp_file_obj.write(f",\t{d:.6e}")
inp_file_obj.write("\n")
# quirk: we abuse the loop variables to put another "virtual" node at the corner
inp_file_obj.write(f"{self.virtual_node:d}")
# noinspection PyUnboundLocalVariable
for d in p:
inp_file_obj.write(f",\t{d:.6e}")
inp_file_obj.write("\n")
for nset in self.nsets.values():
nset.to_inp(inp_file_obj)
@dataclass
class GridElements:
nodes: GridNodes
type: Literal["CPE4R", "CPS4R", "C3D8R"] = "C3D8R"
def __post_init__(self):
dim = self.nodes.dim
if dim == 2:
if self.type not in {"CPE4R", "CPS4R"}:
raise ValueError("Need a 2D element type, got:", self.type)
elif dim == 3:
if self.type not in {"C3D8R"}:
raise ValueError("Need a 3D element type, got:", self.type)
else:
raise ValueError('GridNodes has illegal number of dimensions', dim)
self.element_nums = range(1, 1 + np.prod(self.nodes.shape - 1))
def to_inp(self, inp_file_obj):
# strategy: generate one array representing all nodes, then make slices of it
# that represent offsets to e.g. the right or top to iterate
all_nodes = 1 + np.ravel_multi_index(
np.indices(self.nodes.shape), self.nodes.shape
)
node_slices = list(product(
(np.s_[:-1], np.s_[1:]),
repeat=self.nodes.dim
))
# elements are defined counterclockwise, but product produces zigzag
# swapping the third and fourth elements works for squares and cubes
node_slices[2], node_slices[3] = node_slices[3], node_slices[2]
try:
node_slices[6], node_slices[7] = node_slices[7], node_slices[6]
except IndexError:
pass # it's 2D
inp_file_obj.write(f"*Element, type={self.type}\n")
for elem_num, *ns in zip(
self.element_nums,
*(all_nodes[sl].ravel() for sl in node_slices),
):
inp_file_obj.write(f"{elem_num:d}")
for n in ns:
inp_file_obj.write(f",\t{n:d}")
inp_file_obj.write("\n")
Sides_3d = {
# Abaqus interprets this as [Z, Y, X]
# Remeber that np arrays are written in [H, W, D] or [Y, X, Z]
# FACES
"X0": np.s_[1:-1, 1:-1, 0], # Left (Face)
"X1": np.s_[1:-1, 1:-1, -1], # Right (Face)
"Y0": np.s_[1:-1, 0, 1:-1], # Bottom (Face)
"Y1": np.s_[1:-1, -1, 1:-1], # Top (Face)
"Z0": np.s_[0, 1:-1, 1:-1], # Back (Face)
"Z1": np.s_[-1, 1:-1, 1:-1], # Front (Face)
# EDGES
# on x axis
"Y0Z0": np.s_[0, 0, 1:-1], # Bottom Back (Edge)
"Y0Z1": np.s_[-1, 0, 1:-1], # Bottom Front (Edge)
"Y1Z0": np.s_[0, -1, 1:-1], # Top Back (Edge)
"Y1Z1": np.s_[-1, -1, 1:-1], # Top Front (Edge)
# on y axis
"X0Z0": np.s_[0, 1:-1, 0], # Left Back (Edge)
"X0Z1": np.s_[-1, 1:-1, 0], # Left Front (Edge)
"X1Z0": np.s_[0, 1:-1, -1], # Right Back (Edge)
"X1Z1": np.s_[-1, 1:-1, -1], # Right Front (Edge)
# on x axis
"X0Y0": np.s_[1:-1, 0, 0], # Left Bottom (Edge)
"X0Y1": np.s_[1:-1, -1, 0], # Left Top (Edge)
"X1Y0": np.s_[1:-1, 0, -1], # Right Bottom (Edge)
"X1Y1": np.s_[1:-1, -1, -1], # Right Top (Edge)
# VERTICES
"X0Y0Z0": np.s_[0, 0, 0], # Left Bottom Back (Vertex)
"X0Y1Z0": np.s_[0, -1, 0], # Left Top Back (Vertex)
"X1Y0Z0": np.s_[0, 0, -1], # Right Bottom Back (Vertex)
"X1Y1Z0": np.s_[0, -1, -1], # Right Top Back (Vertex)
"X0Y0Z1": np.s_[-1, 0, 0], # Left Bottom Front (Vertex)
"X0Y1Z1": np.s_[-1, -1, 0], # Left Top Front (Vertex)
"X1Y0Z1": np.s_[-1, 0, -1], # Right Bottom Front (Vertex)
"X1Y1Z1": np.s_[-1, -1, -1], # Right Top Front (Vertex)
}
Sides_2d = {
# 2D
# EDGES
# on x axis
"Y0": np.s_[0, 1:-1], # Bottom (Edge)
"Y1": np.s_[-1, 1:-1], # Top (Edge)
# on y axis
"X0": np.s_[1:-1, 0], # Left (Edge)
"X1": np.s_[1:-1, -1], # Right (Edge)
# VERTICES
"X0Y0": np.s_[0, 0], # Left Bottom (Vertex)
"X0Y1": np.s_[-1, 0], # Left Top (Vertex)
"X1Y0": np.s_[0, -1], # Right Bottom (Vertex)
"X1Y1": np.s_[-1, -1], # Right Top (Vertex)
}
@dataclass(eq=False)
class NodeSet:
name: str
node_inds: Union[np.ndarray, List[int]]
@classmethod
def from_slice(cls, name, slice_, nodes):
inds = np.indices(nodes.shape)
inds_list = []
for ind in inds:
inds_list.append(ind[slice_].ravel())
inds_tuple = tuple(inds_list)
node_inds = 1 + np.ravel_multi_index(
inds_tuple,
dims=nodes.shape,
)
return cls(name, node_inds)
def __str__(self):
return self.name
def to_inp(self, inp_file_obj):
inp_file_obj.write(f"*Nset, nset={self.name}\n")
for i in self.node_inds:
inp_file_obj.write(f"{i:d}\n")
@dataclass
class SequentialDifferenceEquation:
nsets: Sequence[Union[NodeSet, int]]
dof: int
def to_inp(self, inp_file_obj):
for i, node0 in enumerate(self.nsets[0].node_inds):
inp_file_obj.write(
f"""\
*Equation
4
{self.nsets[0].node_inds[i]}, {self.dof}, 1.
{self.nsets[1].node_inds[i]}, {self.dof}, -1.
{self.nsets[2]}, {self.dof}, -1.
{self.nsets[3]}, {self.dof}, 1.
"""
)
@dataclass
class EqualityEquation:
nsets: Sequence[Union[NodeSet, int]]
dof: int
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*Equation
2
{self.nsets[0]}, {self.dof}, 1.
{self.nsets[1]}, {self.dof}, -1.
"""
)
@dataclass
class DriveEquation(EqualityEquation):
drive_node: Union[NodeSet, int]
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*Equation
3
{self.nsets[0]}, {self.dof}, 1.
{self.nsets[1]}, {self.dof}, -1.
{self.drive_node}, {self.dof}, 1.
"""
)
@dataclass
class ElementSet:
matl_code: int
elements: np.ndarray
@classmethod
def from_matl_img(cls, matl_img):
"""Produce a list of ElementSets corresponding to unique pixel values.
Materials are ordered by the value in each of the pixels.
"""
matl_img = matl_img.ravel()
uniq = np.unique(matl_img) # sorted!
indices = np.arange(1, 1 + matl_img.size)
return [cls(matl_code, indices[matl_img == matl_code]) for matl_code in uniq]
def to_inp(self, inp_file_obj):
inp_file_obj.write(f"*Elset, elset=SET-{self.matl_code:d}\n")
for element in self.elements:
inp_file_obj.write(f"{element:d}\n")
#################
# Combo classes #
#################
# These represent the structure of several keywords that need to be
# ordered or depend on each other's information somehow. They create a graph
# of information for a complete conceptual component of the input file.
class BoundaryConditions:
def to_inp(self, inp_file_obj):
pass
@dataclass
class FixedBoundaryCondition(BoundaryConditions):
node: Union[NodeSet, int]
dofs: Iterable
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*Boundary
"""
)
for dof in self.dofs:
inp_file_obj.write(
f"""\
{self.node}, {dof}, {dof}
"""
)
@dataclass
class DisplacementBoundaryCondition(BoundaryConditions):
nset: Union[NodeSet, int]
first_dof: int
last_dof: int
displacement: float
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*Boundary, type=displacement
{self.nset}, {self.first_dof}, {self.last_dof}, {self.displacement}
"""
)
@dataclass
class Material:
elset: ElementSet
density: float # kg/micron^3
poisson: float
youngs: float # MPa, long term, low freq modulus
def to_inp(self, inp_file_obj):
self.elset.to_inp(inp_file_obj)
mc = self.elset.matl_code
inp_file_obj.write(
f"""\
*Solid Section, elset=SET-{mc:d}, material=MAT-{mc:d}
1.
*Material, name=MAT-{mc:d}
*Density
{self.density:.6e}
*Elastic
{self.youngs:.6e}, {self.poisson:.6e}
"""
)
@dataclass
class TabularViscoelasticMaterial(Material):
freq: np.ndarray # excitation freq in Hz
youngs_cplx: np.ndarray # complex youngs modulus
shift: float = 0.0 # frequency shift induced relative to nominal properties
left_broadening: float = 1.0 # 1 is no broadening
right_broadening: float = 1.0 # 1 is no broadening
def apply_shift(self):
"""Apply shift and broadening factors to frequency.
left and right refer to frequencies below and above tand peak"""
freq = np.log10(self.freq) - self.shift
# shift relative to tand peak
i = np.argmax(self.youngs_cplx.imag / self.youngs_cplx.real)
f = freq[i]
freq[:i] = self.left_broadening * (freq[:i] - f) + f
freq[i:] = self.right_broadening * (freq[i:] - f) + f
return 10**freq
def normalize_modulus(self):
"""Convert to abaqus's preferred normalized moduli"""
# Only works with frequency-dependent poisson's ratio
shear_cplx = self.youngs_cplx / (2 * (1 + self.poisson))
bulk_cplx = self.youngs_cplx / (3 * (1 - 2 * self.poisson))
# special normalized shear modulus used by abaqus
wgstar = np.empty_like(shear_cplx)
shear_inf = shear_cplx[0].real
wgstar.real = shear_cplx.imag / shear_inf
wgstar.imag = 1 - shear_cplx.real / shear_inf
# special normalized bulk modulus used by abaqus
wkstar = np.empty_like(shear_cplx)
bulk_inf = bulk_cplx[0].real
wkstar.real = bulk_cplx.imag / bulk_inf
wkstar.imag = 1 - bulk_cplx.real / bulk_inf
return wgstar, wkstar
def normalize_constant_bulk_modulus(self):
# assume bulk modulus of glassy system
bulk_inf = self.youngs_cplx[-1].real / (3 * (1 - 2 * self.poisson))
shear_cplx = 3 * bulk_inf * self.youngs_cplx / (9 * bulk_inf - self.youngs_cplx)
# special normalized shear modulus used by abaqus
wgstar = np.empty_like(shear_cplx)
shear_inf = shear_cplx[0].real
wgstar.real = shear_cplx.imag / shear_inf
wgstar.imag = 1 - shear_cplx.real / shear_inf
# special normalized bulk modulus used by abaqus
# if bulk_cplx = bulk_inf, wgstar is all zeros
wkstar = np.zeros_like(shear_cplx)
return wgstar, wkstar
def normalize_constant_nu_modulus(self):
# special normalized bulk modulus used by abaqus
# if poisson's ratio is frequency-independent, then
# youngs=shear=bulk when normalized
wgstar = np.empty_like(self.youngs_cplx)
youngs_inf = self.youngs_cplx[0].real
wgstar.real = self.youngs_cplx.imag / youngs_inf
wgstar.imag = 1 - self.youngs_cplx.real / youngs_inf
return wgstar, wgstar
def to_inp(self, inp_file_obj):
super().to_inp(inp_file_obj)
inp_file_obj.write("*Viscoelastic, frequency=TABULAR\n")
wgstar, wkstar = self.normalize_constant_nu_modulus()
freq = self.apply_shift()
for wgr, wgi, wkr, wki, f in zip(
wgstar.real, wgstar.imag, wkstar.real, wkstar.imag, freq
):
inp_file_obj.write(f"{wgr:.6e}, {wgi:.6e}, {wkr:.6e}, {wki:.6e}, {f:.6e}\n")
@dataclass
class PeriodicBoundaryCondition:
nodes: GridNodes
def __post_init__(self):
nsets = self.nodes.nsets
if self.nodes.dim == 2:
# 2D boundaries
self.node_pairs: List[List[NodeSet]] = [
# Vertices
[nsets["X1Y1"], nsets["X0Y1"], nsets["X1Y0"], nsets["X0Y0"]], # 2-1 = 4-3
# Edges
[nsets["X1"], nsets["X0"], nsets["X1Y0"], nsets["X0Y0"]], # e6-e5 = 4-3
[nsets["Y1"], nsets["Y0"], nsets["X0Y1"], nsets["X0Y0"]], # e10-e9 = 1-3
]
elif self.nodes.dim == 3:
# 3D boundaries
self.node_pairs: List[List[NodeSet]] = [
# Vertices
[nsets["X1Y1Z0"], nsets["X0Y1Z0"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # 2-1 = 4-3
[nsets["X1Y1Z1"], nsets["X0Y1Z1"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # 6-5 = 4-3
[nsets["X1Y0Z1"], nsets["X0Y0Z1"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # 8-7 = 4-3
[nsets["X0Y1Z1"], nsets["X0Y0Z1"], nsets["X0Y1Z0"], nsets["X0Y0Z0"]], # 5-7 = 1-3
# Edges
[nsets["X1Y0"], nsets["X0Y0"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # e2-e1 = 4-3
[nsets["X1Y1"], nsets["X0Y1"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # e3-e4 = 4-3
[nsets["X1Z0"], nsets["X0Z0"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # e6-e5 = 4-3
[nsets["X1Z1"], nsets["X0Z1"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # e7-e8 = 4-3
[nsets["Y1Z0"], nsets["Y0Z0"], nsets["X0Y1Z0"], nsets["X0Y0Z0"]], # e10-e9 = 1-3
[nsets["Y1Z1"], nsets["Y0Z1"], nsets["X0Y1Z0"], nsets["X0Y0Z0"]], # e11-e12 = 1-3
[nsets["X0Y1"], nsets["X0Y0"], nsets["X0Y1Z0"], nsets["X0Y0Z0"]], # e4-e1 = 1-3
[nsets["X0Z1"], nsets["X0Z0"], nsets["X0Y0Z1"], nsets["X0Y0Z0"]], # e8-e5 = 7-3
[nsets["Y0Z1"], nsets["Y0Z0"], nsets["X0Y0Z1"], nsets["X0Y0Z0"]], # e12-e9 = 7-3
# Faces
[nsets["X1"], nsets["X0"], nsets["X1Y0Z0"], nsets["X0Y0Z0"]], # xFront-xBack = 4-3
[nsets["Y1"], nsets["Y0"], nsets["X0Y1Z0"], nsets["X0Y0Z0"]], # yTop-yBottom = 1-3
[nsets["Z1"], nsets["Z0"], nsets["X0Y0Z1"], nsets["X0Y0Z0"]], # zLeft-zRight = 7-3
]
else:
raise ValueError('GridNodes has illegal number of dimensions', self.nodes.dim)
def to_inp(self, inp_file_obj):
for node_pair in self.node_pairs:
for i in range(self.nodes.dim):
SequentialDifferenceEquation(node_pair, i + 1).to_inp(inp_file_obj)
@dataclass
class PronyViscoelasticMaterial(Material):
shear_modulus_coefficients: np.ndarray
bulk_modulus_coefficients: np.ndarray
relaxation_times: np.ndarray
def to_inp(self, inp_file_obj):
super().to_inp(inp_file_obj)
# Abaqus wants these normalized by the instantaneous modulus
g_0 = self.youngs / 3 / (1 + self.poisson) + np.sum(
self.shear_modulus_coefficients
)
g_ratios = self.shear_modulus_coefficients / g_0
k_0 = self.youngs / 3 / (1 - 2 * self.poisson) + np.sum(
self.bulk_modulus_coefficients
)
k_ratios = self.bulk_modulus_coefficients / k_0
inp_file_obj.write("*Viscoelastic, frequency=PRONY\n")
for g, k, t in zip(g_ratios, k_ratios, self.relaxation_times):
inp_file_obj.write(f"{g:.6e}, {k:.6e}, {t:.6e}\n")
@dataclass
class OldPeriodicBoundaryCondition(DisplacementBoundaryCondition):
nodes: GridNodes
def __post_init__(self):
def make_set(name):
return NodeSet.from_slice(name, Sides_2d[name], self.nodes)
ndim = len(self.nodes.shape)
self.driven_nset = NodeSet.from_slice("X1ALL", np.s_[:, -1], self.nodes)
self.node_pairs: List[List[NodeSet]] = [
[NodeSet.from_slice("X0ALL", np.s_[:, 0], self.nodes), self.driven_nset],
[make_set("Y0"), make_set("Y1")],
[make_set("X1Y0"), make_set("X1Y1")],
]
# Displacement at any surface node is equal to the opposing surface
# node in both degrees of freedom unless one of the surfaces is a driver.
# in that case, add the avg displacement from the drive node
self.eq_pairs: List[List[EqualityEquation]] = [
[EqualityEquation(p, x + 1) for x in range(ndim)]
if (self.driven_nset not in p)
else [
DriveEquation(p, x + 1, drive_node=self.nset)
if x in range(self.first_dof - 1, self.last_dof)
else EqualityEquation(p, x + 1)
for x in range(ndim)
]
for p in self.node_pairs
]
def to_inp(self, inp_file_obj):
for node_pair, eq_pair in zip(self.node_pairs, self.eq_pairs):
node_pair[0].to_inp(inp_file_obj)
node_pair[1].to_inp(inp_file_obj)
eq_pair[0].to_inp(inp_file_obj)
eq_pair[1].to_inp(inp_file_obj)
super().to_inp(inp_file_obj)
@dataclass
class Static:
"""Data for an ABAQUS STATIC subsection of STEP"""
long_term: bool = False
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*STATIC{", LONG TERM" if self.long_term else ""}
"""
)
@dataclass
class Dynamic:
"""Data for an ABAQUS STEADY STATE DYNAMICS subsection of STEP"""
f_initial: float
f_final: float
f_count: int
bias: int
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*STEADY STATE DYNAMICS, DIRECT
{self.f_initial}, {self.f_final}, {self.f_count}, {self.bias}
"""
)
@dataclass
class Step:
subsections: Iterable
perturbation: bool = False
def to_inp(self, inp_file_obj):
inp_file_obj.write(
f"""\
*STEP{",PERTURBATION" if self.perturbation else ""}
"""
)
for n in self.subsections:
n.to_inp(inp_file_obj)
inp_file_obj.write(
f"""\
*END STEP
"""
)
@dataclass
class Model:
nodes: GridNodes
elements: GridElements
materials: Iterable[Material]
bcs: Iterable[BoundaryConditions] = ()
nsets: Iterable[NodeSet] = ()
def to_inp(self, inp_file_obj):
self.nodes.to_inp(inp_file_obj)
for nset in self.nsets:
nset.to_inp(inp_file_obj)
self.elements.to_inp(inp_file_obj)
for m in self.materials:
m.to_inp(inp_file_obj)
for bc in self.bcs:
bc.to_inp(inp_file_obj)
@dataclass
class Simulation:
model: Model
heading: Optional[Heading] = None
steps: Iterable[Step] = ()
def to_inp(self, inp_file_obj: TextIO):
if self.heading is not None:
self.heading.to_inp(inp_file_obj)
self.model.to_inp(inp_file_obj)
for step in self.steps:
step.to_inp(inp_file_obj)
####################
# Helper functions #
####################
# High level functions representing important transformations or steps.
# Probably the most important part is the name and docstring, to explain
# WHY a certain procedure is being taken/option being input.
def in_sorted(arr, val):
"""Determine if val is contained in arr, assuming arr is sorted"""
index = np.searchsorted(arr, val)
if index < len(arr):
return val == arr[index]
else:
return False
def load_matlab_microstructure(matfile, var_name):
"""Load the microstructure in .mat file into a 2D boolean ndarray.
@para: matfile --> the file name of the microstructure
var_name --> the name of the variable in the .mat file
that contains the 2D microstructure 0-1 matrix.
@return: 2D ndarray dtype=bool
"""
from scipy.io import loadmat
return loadmat(matfile, matlab_compatible=True)[var_name]
def assign_intph(microstructure: np.ndarray, num_layers_list: List[int]) -> np.ndarray:
"""Generate interphase layers around the particles.
Microstructure must have at least one zero value.
:rtype: numpy.ndarray
:param microstructure: The microstructure array. Particles must be zero,
matrix must be nonzero.
:type microstructure: numpy.ndarray
:param num_layers_list: The list of interphase thickness in pixels. The order of
the layer values is based on the sorted distances in num_layers_list from
the particles (near particles -> far from particles)
:type num_layers_list: List(int)
"""
from scipy.ndimage import distance_transform_edt
dists = distance_transform_edt(microstructure)
intph_img = (dists != 0).view("u1")
for num_layers in sorted(num_layers_list):
intph_img += dists > num_layers
return intph_img
def periodic_assign_intph(
microstructure: np.ndarray, num_layers_list: List[int]
) -> np.ndarray:
"""Generate interphase layers around the particles with periodic BC.
Microstructure must have at least one zero value.
:rtype: numpy.ndarray
:param microstructure: The microstructure array. Particles must be zero,
matrix must be nonzero.
:type microstructure: numpy.ndarray
:param num_layers_list: The list of interphase thickness in pixels. The order of
the layer values is based on the sorted distances in num_layers_list from
the particles (near particles -> far from particles)
:type num_layers_list: List(int)
"""
tiled = np.tile(microstructure, (3,) * microstructure.ndim)
intph_tiled = assign_intph(tiled, num_layers_list)
# trim tiling
intph = intph_tiled[tuple(slice(dim, dim + dim) for dim in microstructure.shape)]
# free intph's view on intph_tiled's memory
intph = intph.copy()
return intph
def load_viscoelasticity(matrl_name):
"""load VE data from a text file according to ABAQUS requirements
mainly the frequency array needs to be strictly increasing, but also having
the storage/loss data in complex numbers helps our calculations.
"""
freq, youngs_real, youngs_imag = np.loadtxt(matrl_name, unpack=True)
youngs = np.empty_like(youngs_real, dtype=complex)
youngs.real = youngs_real
youngs.imag = youngs_imag
sortind = np.argsort(freq)
return freq[sortind], youngs[sortind]
@cache
def find_command(command: str) -> Optional[PathLike]:
x = shutil.which(command)
if x is None:
# maybe it's a shell alias?
if shutil.which("bash") is None:
return None
p = subprocess.run(
["bash", "-i", "-c", f"alias {command}"],
capture_output=True,
)
if p.returncode:
return None
x = p.stdout.split(b"'")[1].decode()
try:
return pathlib.Path(x).resolve(strict=True)
except FileNotFoundError:
return None
def run_job(job_name, cpus):
"""feed .inp file to ABAQUS and wait for the result"""
subprocess.run(
[
find_command("abaqus"),
"job=" + job_name,
"cpus=" + str(cpus),
"interactive",
],
check=True,
)
def read_odb(job_name, drive_nset):
"""Extract viscoelastic response from abaqus output ODB
Uses abaqus python api which is stuck in python 2.7 ancient history,
so we need to farm it out to a subprocess.
"""
subprocess.run(
[
find_command("abaqus"),
"python",
BASE_PATH / "readODB.py",
job_name,
drive_nset.name,
],
check=True,
)