Skip to content

Commit

Permalink
Merge pull request #6 from StructuralPython/chores/github-actions
Browse files Browse the repository at this point in the history
Update python-pytest.yml
  • Loading branch information
connorferster authored Jun 20, 2024
2 parents b8761b5 + dcfef29 commit 162ddb7
Show file tree
Hide file tree
Showing 16 changed files with 214 additions and 183 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
python -m uv pip install black
- name: Test black formatted
run: |
black papermodels --check
black src --check
- name: Test with pytest
run: |
pytest
1 change: 1 addition & 0 deletions src/papermodels/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Load networks
"""

from . import db
from . import loads
from . import paper
Expand Down
72 changes: 41 additions & 31 deletions src/papermodels/datatypes/analysis_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ class AnalysisModel:
analyzed: bool = False
reactions: Optional[dict] = None


def create_model(self):
raise NotImplemented

Expand All @@ -34,7 +33,6 @@ def get_reaction_type(self):
raise NotImplemented



@dataclass
class PyNiteFEModel:
def create_model(self):
Expand All @@ -52,13 +50,15 @@ def create_model(self):
if support_type == "P":
beam_model.def_support(node_name, True, True, True, True, True, False)
elif support_type == "R":
beam_model.def_support(node_name, False, True, False, False, False, False)
beam_model.def_support(
node_name, False, True, False, False, False, False
)
elif support_type == "F":
beam_model.def_support(node_name, True, True, True, True, True, True)

shear_modulus = calc_shear_modulus(element_data["E"], element_data["nu"])
beam_model.add_material(
element_data['material_name'],
element_data["material_name"],
element_data["E"],
shear_modulus,
element_data["nu"],
Expand All @@ -71,7 +71,7 @@ def create_model(self):
element_data["Name"],
"N0",
node_name,
material=element_data['material_name'],
material=element_data["material_name"],
Iy=element_data["Iy"],
Iz=element_data["Iz"],
J=element_data["J"],
Expand All @@ -86,7 +86,7 @@ def add_load(self, load_data: dict) -> None:
Sets self.analyzed to False to force a re-analysis.
"""
load_cases = list(self.analysis_model.LoadCombos.keys())
if load_data['Type'] == "Point":
if load_data["Type"] == "Point":
self.analysis_model.add_member_pt_load(
self.structured_element_data["Name"],
load_data["Direction"],
Expand All @@ -98,21 +98,20 @@ def add_load(self, load_data: dict) -> None:
load_cases.append(load_data["Case"])

elif load_data["Type"] == "Dist":
self.analysis_model.add_member_dist_load(
self.structured_element_data["Name"],
load_data["Direction"],
load_data["Start Magnitude"],
load_data["End Magnitude"],
load_data["Start Location"],
load_data["End Location"],
case=load_data["Case"],
)
if load_data["Case"] not in load_cases:
load_cases.append(load_data["Case"])
self.analysis_model.add_member_dist_load(
self.structured_element_data["Name"],
load_data["Direction"],
load_data["Start Magnitude"],
load_data["End Magnitude"],
load_data["Start Location"],
load_data["End Location"],
case=load_data["Case"],
)
if load_data["Case"] not in load_cases:
load_cases.append(load_data["Case"])
self.analyzed = False
self.reactions = None


def _add_loads(self) -> None:
element_data = self.structured_element_data
load_cases = []
Expand Down Expand Up @@ -144,44 +143,55 @@ def _add_loads(self) -> None:
for load_case in load_cases:
self.analysis_model.add_load_combo(load_case, {load_case: 1.0})


def analyze(self, analyze_linear: bool = True, check_stability: bool = True, check_statics=False):
def analyze(
self,
analyze_linear: bool = True,
check_stability: bool = True,
check_statics=False,
):
if analyze_linear:
self.analysis_model.analyze_linear(check_stability=check_stability, check_statics=check_statics)
self.analysis_model.analyze_linear(
check_stability=check_stability, check_statics=check_statics
)
else:
self.analysis_model.analyze(check_stability=check_stability, check_statics=check_statics)
self.analysis_model.analyze(
check_stability=check_stability, check_statics=check_statics
)
self.analyzed = True
self.reactions = self.get_reactions()


def get_reactions(self):
if not self.analyzed:
raise UserWarning(f"{self.analyzed=}. Re-run .analyze() to ensure results are current.")
raise UserWarning(
f"{self.analyzed=}. Re-run .analyze() to ensure results are current."
)
model_nodes = self.analysis_model.Nodes

reactions = {}
for node_name, node_obj in model_nodes:
reactions[node_name] = {}
for reaction_dir in ('Fx', 'Fy', 'Fz', 'Mx', 'My', 'Mz'):
for reaction_dir in ("Fx", "Fy", "Fz", "Mx", "My", "Mz"):
reaction_key = f"Rxn{reaction_dir.upper()}"
reaction_combos = getattr(node_obj, reaction_key)
reactions[reaction_dir] = reaction_combos
return reactions


def get_forces(self, n_points: int = 200):
if not self.analyzed:
raise UserWarning(f"{self.analyzed=}. Re-run .analyze() to ensure results are current.")
member_name = self.structured_element_data['Name']
raise UserWarning(
f"{self.analyzed=}. Re-run .analyze() to ensure results are current."
)
member_name = self.structured_element_data["Name"]
member_obj = self.analysis_model.Members[member_name]
load_cases = list(self.analysis_model.LoadCombos.keys())

forces = {}
force_actions = ("N", "Vz", "Vy", "Mz", "My", "T")
for force_action in force_action:
if force_action == "N":
forces[force_action] = member_obj.axial_array(n_points=n_points, )

forces[force_action] = member_obj.axial_array(
n_points=n_points,
)


def get_node_locations(
Expand Down Expand Up @@ -216,4 +226,4 @@ def calc_shear_modulus(E: float, nu: float) -> float:
'nu': Poisson's ratio
"""
G = E / (2 * (1 + nu))
return G
return G
11 changes: 5 additions & 6 deletions src/papermodels/datatypes/element.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class Element:
They are generated in a separate process and are not part of the capability
of the class. These attributes are simply to keep track of pre-discovered
intersections and correspondents.
'tag': str, represent a unique name for this element, as per the designer's preference
'type': str, describing what "type" of element it is. This is not an enumeration
and can take any designer-defined value. It is for user-level categorization.
Expand Down Expand Up @@ -108,15 +108,14 @@ def get_elements_by_page(elements: list[Element]) -> dict[int, list[Element]]:
def get_normalized_coordinate(element: Element, intersection_point: Point) -> float:
"""
Returns a normalized x-coordinate for the given 'intersection_point' as it is located on the geometry
of 'element'.
of 'element'.
"""
geom = element.geometry
i_coord = Point(geom.coords[0])
distance = i_coord.distance(intersection_point)
return distance


def get_structured_model_data(element: Element) -> dict:
"""
"""
pass
""" """
pass
57 changes: 28 additions & 29 deletions src/papermodels/datatypes/element_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,39 +37,39 @@ class ElementModel:
@classmethod
def from_element(cls, element: Element):
return cls(element=element)

@classmethod
def from_file(cls, filepath: pathlib.Path | str):
raise NotImplemented

def determine_loads(self, loads: list[dict]) -> None:
"""
Returns None. Performs a geometric intersection with each load present
in 'loads' to determine if any of the loads intersect with the
in 'loads' to determine if any of the loads intersect with the
self.trib_area of the element.
"""
if self.trib_area is None:
return
for load in loads:
if load['geometry'].intersects(self.trib_region):
applied_load = go.get_applied_load(load['geometry'], self.trib_region)
if load["geometry"].intersects(self.trib_region):
applied_load = go.get_applied_load(load["geometry"], self.trib_region)
if self.structured_element_data is None:
self.structured_element_data = {}
self.structured_element_data['Loads'].append(applied_load)
self.structured_element_data["Loads"].append(applied_load)

def get_reactions(self) -> tuple[float]:
raise NotImplemented

def create_model(self) -> None:
raise NotImplemented

def analyze_model(self) -> None:
raise NotImplemented


@dataclass
class BeamModel(ElementModel):

def from_file(cls, filepath: pathlib.Path):
raw_data = read_csv_file(filepath)
structured_beam_data = get_structured_beam_data(raw_data)
Expand All @@ -78,10 +78,9 @@ def from_file(cls, filepath: pathlib.Path):
beam_model = cls(
structured_element_data=structured_beam_data,
reaction_type=ReactionType.POINT,

)
)
return beam_model


@dataclass
class ColumnModel(ElementModel):
Expand All @@ -99,16 +98,16 @@ class JoistArrayModel(ElementModel):
between the supports may vary linearly.
"""

initial_offset: float | int = 0.0,
joist_at_start: bool = True,
joist_at_end: bool = False,
cantilever_tolerance: float = 1e-2,
initial_offset: float | int = (0.0,)
joist_at_start: bool = (True,)
joist_at_end: bool = (False,)
cantilever_tolerance: float = (1e-2,)

def __post_init__(self):
joist_supports = [inter[2] for inter in self.element.intersections]
joist_prototype = self.element.geometry
self.id = self.element.tag
self.spacing = 400 # Need to include this in the legend and thus, the Element
self.spacing = 400 # Need to include this in the legend and thus, the Element
self.initial_offset = float(self.initial_offset)
self._joist_prototype = joist_prototype
self._cantilever_tolerance = self.cantilever_tolerance
Expand Down Expand Up @@ -136,23 +135,21 @@ def __post_init__(self):
self.joist_trib_areas = [
self.generate_trib_area(idx) for idx, _ in enumerate(self.joist_locations)
]

# def __repr__(self):
# return class_representation(self)

@classmethod
def from_element(
cls,
cls,
element: Optional[Element],
initial_offset: float | int = 0.0,
joist_at_start: bool = True,
joist_at_end: bool = False,
cantilever_tolerance: float = 1e-2) -> JoistArrayModel:
cantilever_tolerance: float = 1e-2,
) -> JoistArrayModel:
return cls(
element,
initial_offset,
joist_at_start,
joist_at_end,
cantilever_tolerance
element, initial_offset, joist_at_start, joist_at_end, cantilever_tolerance
)

def generate_joist(self, index: int):
Expand Down Expand Up @@ -238,9 +235,9 @@ def get_joist_trib_widths(self, index) -> tuple[float, float]:
if index < 0:
# Convert -ve index lookup to a +ve index lookup
index = len(self.joist_locations) + index
if index == 0: # The first joist
if index == 0: # The first joist
trib_widths = (0.0, self.joist_locations[1] / 2.0)
elif index == len(self.joist_locations) - 1: # The last joist
elif index == len(self.joist_locations) - 1: # The last joist
spacing_left = self.joist_locations[-1] - self.joist_locations[-2]
trib_widths = (spacing_left / 2.0, 0.0)
else:
Expand All @@ -265,7 +262,9 @@ def generate_trib_area(self, index: int) -> Polygon:
if trib_left != 0.0:
i_left = go.project_node(i_node, self.vector_normal, trib_left)
j_left = go.project_node(j_node, self.vector_normal, trib_left)
trib_area_left = go.get_convex_hull(go.create_multipolygon([i_left, j_left, j_node, i_node]))
trib_area_left = go.get_convex_hull(
go.create_multipolygon([i_left, j_left, j_node, i_node])
)
else:
trib_area_left = go.create_polygon([])

Expand Down
8 changes: 4 additions & 4 deletions src/papermodels/datatypes/geometry_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@ def __init__(self):
super().__init__()
self.node_hash = None


@classmethod
def from_elements(cls, elements: list[Element], floor_elevations: Optional[dict] = None) -> GeometryGraph:
def from_elements(
cls, elements: list[Element], floor_elevations: Optional[dict] = None
) -> GeometryGraph:
"""
Returns a LoadGraph (networkx.DiGraph) based upon the intersections and correspondents
of the 'elements'.
Expand All @@ -40,7 +41,6 @@ def from_elements(cls, elements: list[Element], floor_elevations: Optional[dict]
g.add_edge(element.tag, intersection[0])
return g


def hash_nodes(self):
"""
Returns None. Sets the value of self.node_hash based on the hashed values of
Expand All @@ -52,4 +52,4 @@ def hash_nodes(self):
element_hash = self.nodes[node_name]["sha256"]
hashes.append(element_hash)
graph_hash = hashlib.sha256(str(tuple(hashes)).encode()).hexdigest()
self.node_hash = graph_hash
self.node_hash = graph_hash
Loading

0 comments on commit 162ddb7

Please sign in to comment.