Skip to content

Commit

Permalink
added openMPI/mpich support + fix for issue 15 nicogodet#15
Browse files Browse the repository at this point in the history
  • Loading branch information
tomsail committed Mar 19, 2024
1 parent 7d9ad55 commit 2228ff1
Show file tree
Hide file tree
Showing 8 changed files with 1,183 additions and 38 deletions.
674 changes: 674 additions & 0 deletions LICENSE.txt

Large diffs are not rendered by default.

378 changes: 378 additions & 0 deletions global.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,378 @@
diff --git a/scripts/python3/pretel/extract_contour.py b/scripts/python3/pretel/extract_contour.py
index 9572723dc..e4473075d 100644
--- a/scripts/python3/pretel/extract_contour.py
+++ b/scripts/python3/pretel/extract_contour.py
@@ -2,6 +2,8 @@
import numpy as np
import matplotlib.path as mpltPath
from data_manip.extraction.telemac_file import TelemacFile
+from utils.progressbar import ProgressBar
+

def detecting_boundaries(connectivity_table, npoints):
"""
@@ -17,69 +19,36 @@ def detecting_boundaries(connectivity_table, npoints):
@return bnd_points (np.array of shape (number of boundary points, 1)) a
numpy array wich contains indexes of all boundary points of the mesh
"""
+ # Building adjacency list from connectivity table
+ adjacency_list = {i: [] for i in range(npoints)}
+ for row in connectivity_table:
+ for i in range(3):
+ adjacency_list[row[i]].extend(row[np.arange(3) != i])
+
buf = []
connectivity_bnd_elt = []

- div = (npoints - npoints % 10) // 10
- if div == 0:
- div = 1
-
- if np.__version__ < '1.19.0':
- import collections
-
- for i in range(npoints):
- if i % div == 0:
- print(i // div * 10, '%')
-
- connectivity_rows = collections.Counter(
- connectivity_table[
- np.where((connectivity_table[:, 0] == i)
- + (connectivity_table[:, 1] == i)
- + (connectivity_table[:, 2] == i))
- ].flatten())
-
-
- temp = np.array(
- list(connectivity_rows.keys()))[np.where(
- np.array(list(connectivity_rows.values())) == 1)].tolist()
- buf.extend(temp)
-
- if temp != []:
- #to handle overconstrained element
- if i in temp:
- temp.remove(i)
- temp.append(i)
- connectivity_bnd_elt.append(temp)
-
- else:
- for i in range(npoints):
- if i % div == 0:
- print(i // div * 10, '%')
-
- connectivity_rows = connectivity_table[
- np.where((connectivity_table[:, 0] == i)
- + (connectivity_table[:, 1] == i)
- + (connectivity_table[:, 2] == i))
- ].flatten()
-
- uniq, count = np.unique(connectivity_rows, return_counts=True)
- temp = uniq[np.where(count == 1)].tolist()
- buf.extend(temp)
+ pbar = ProgressBar(npoints)
+ for i in range(npoints):
+ pbar.update(i)
+ # Directly accessing the connections for each point
+ connections = adjacency_list[i]

- if temp != []:
- #to handle overconstrained element
- if i in temp:
- temp.remove(i)
- temp.append(i)
- connectivity_bnd_elt.append(temp)
+ uniq, count = np.unique(connections, return_counts=True)
+ temp = uniq[count == 1].tolist()
+ buf.extend(temp)

+ if temp:
+ if i in temp:
+ temp.remove(i)
+ temp.append(i)
+ connectivity_bnd_elt.append(temp)

- buf = np.array(buf)
- connectivity_bnd_elt = np.array(connectivity_bnd_elt)
-
+ pbar.finish()
bnd_points = np.unique(buf)

- return connectivity_bnd_elt, bnd_points
+ return np.array(connectivity_bnd_elt), bnd_points
+

def detecting_boundaries_with_bnd_file(connectivity_bnd_table, bnd_points):
"""
@@ -215,7 +184,45 @@ def sorting_boundary(tel, first_pt_idx, connectivity_bnd_elt, clockwise=True):

return bnd

-def sorting_boundaries(tel, bnd_points, connectivity_bnd_elt):
+def create_shifted_paths(path):
+ """
+ Duplicate paths across the -180/180 meridian.
+ This allows to find right inside / outside points in sorting_boundary function
+
+ @param path (mpltPath.Path) a mpltPath.Path object
+ @return left_path (mpltPath.Path) a mpltPath.Path object
+ @return right_path (mpltPath.Path) a mpltPath.Path object
+ """
+ left_path_vertices = path.vertices.copy()
+ right_path_vertices = path.vertices.copy()
+
+ # Shift for the left path
+ left_path_vertices[:, 0] = np.where(left_path_vertices[:, 0] >= -50,
+ left_path_vertices[:, 0] - 360,
+ left_path_vertices[:, 0])
+ # Shift for the right path
+ right_path_vertices[:, 0] = np.where(right_path_vertices[:, 0] < -50,
+ right_path_vertices[:, 0] + 360,
+ right_path_vertices[:, 0])
+ # -50 because of Eurasia: we don't want to have a path that jumps across the -180/180°
+ # so we choose a meridian that does not touch separate the left from the right part
+ left_path = mpltPath.Path(left_path_vertices)
+ right_path = mpltPath.Path(right_path_vertices)
+
+ return left_path, right_path
+
+def has_meridian_crossing(path):
+ """
+ Check if the path crosses the -180/180 meridian.
+ Returns True if it crosses, False otherwise.
+ """
+ for point1, point2 in zip(path.vertices[:-1], path.vertices[1:]):
+ if abs(point1[0] - point2[0]) > 180:
+ return True
+ return False
+
+
+def sorting_boundaries(tel, bnd_points, connectivity_bnd_elt, global_ = False):
"""
Sort boundaries in the case there is islands and detect
the case of separated domains
@@ -242,7 +249,6 @@ def sorting_boundaries(tel, bnd_points, connectivity_bnd_elt):
first_pt_idx = get_first_point(tel, bnd_points)
boundaries.append(sorting_boundary(tel, first_pt_idx, connectivity_bnd_elt))
if len(boundaries[0]) - 1 == len(bnd_points):
- print("No islands")
return boundaries, np.array([]), np.array([])

left_bnd_elt = connectivity_bnd_elt
@@ -250,6 +256,7 @@ def sorting_boundaries(tel, bnd_points, connectivity_bnd_elt):

poly = np.column_stack((tel.meshx[boundaries[0]], tel.meshy[boundaries[0]]))
path = mpltPath.Path(poly)
+
for i in range(len(boundaries[j])):
left_bnd_elt = np.delete(left_bnd_elt, np.where(
(left_bnd_elt[:, 0] == boundaries[0][i]) |
@@ -261,10 +268,23 @@ def sorting_boundaries(tel, bnd_points, connectivity_bnd_elt):
left_pts = np.column_stack((tel.meshx[left_pts_idx],
tel.meshy[left_pts_idx]))

- left_pts_idx_inside = left_pts_idx[path.contains_points(left_pts)]
+ if global_ and has_meridian_crossing(path):
+ # For inside points: Union of left and right points
+ left_path, right_path = create_shifted_paths(path)
+ left_pts_idx_inside1 = left_pts_idx[left_path.contains_points(left_pts)]
+ left_pts_idx_inside2 = left_pts_idx[right_path.contains_points(left_pts)]
+ left_pts_idx_inside = np.append(left_pts_idx_inside1, left_pts_idx_inside2)
+
+ # For outside points: Intersection of left and right points
+ left_pts_idx_outside_left = left_pts_idx[~left_path.contains_points(left_pts)]
+ left_pts_idx_outside_right = left_pts_idx[~right_path.contains_points(left_pts)]
+ left_pts_idx_outside = np.intersect1d(left_pts_idx_outside_left, left_pts_idx_outside_right)
+ else:
+ left_pts_idx_inside = left_pts_idx[
+ path.contains_points(left_pts)]
+ left_pts_idx_outside = left_pts_idx[
+ np.invert(path.contains_points(left_pts))]

- left_pts_idx_outside = left_pts_idx[
- np.invert(path.contains_points(left_pts))]
if left_pts_idx_outside.size > 0:
left_bnd_elt_outside = left_bnd_elt[
np.where(left_bnd_elt[:, 2] == left_pts_idx_outside)]
@@ -298,11 +318,8 @@ def sorting_boundaries(tel, bnd_points, connectivity_bnd_elt):
left_pts_idx_inside = left_bnd_elt_inside[:, 2]


-
-
return boundaries, left_pts_idx_outside, left_bnd_elt_outside

- print("No Islands")

return boundaries, left_pts_idx_outside, left_bnd_elt_outside

diff --git a/scripts/python3/pretel/generate_atm.py b/scripts/python3/pretel/generate_atm.py
index 4a77bc00b..cf4e7261e 100644
--- a/scripts/python3/pretel/generate_atm.py
+++ b/scripts/python3/pretel/generate_atm.py
@@ -30,8 +30,9 @@ from utils.exceptions import TelemacException
from data_manip.conversion.convert_utm import to_latlon
from data_manip.formats.selafin import Selafin
from data_manip.extraction.parser_selafin import \
- subset_variables_slf, get_value_history_slf
+ subset_variables_slf
from pretel.meshes import xys_locate_mesh
+from utils.geometry import get_weights, interp

# _____ ________________________________________________
# ____/ MAIN CALL /_______________________________________________/
@@ -77,7 +78,6 @@ def generate_atm(geo_file, slf_file, atm_file, ll2utm):
'seem to exist: {}\n\n'.format(geo_file))

# Find corresponding (x,y) in corresponding new mesh
- print(' +> getting hold of the GEO file')
geo = Selafin(geo_file)
if ll2utm is not None:
zone = int(ll2utm[:-1])
@@ -98,7 +98,6 @@ def generate_atm(geo_file, slf_file, atm_file, ll2utm):
slf.set_kd_tree()
slf.set_mpl_tri()

- print(' +> support extraction')
# Extract triangles and weights in 2D
support2d = []
ibar = 0
@@ -153,7 +152,6 @@ def generate_atm(geo_file, slf_file, atm_file, ll2utm):
atm.npoin3 = geo.npoin2*atm.nplan
atm.nelem2 = geo.nelem2

- print(' +> setting connectivity')
if atm.nplan > 1:
atm.nelem3 = geo.nelem2*(atm.nplan-1)
atm.ikle2 = geo.ikle2
@@ -181,7 +179,9 @@ def generate_atm(geo_file, slf_file, atm_file, ll2utm):
atm.meshx = geo.meshx
atm.meshy = geo.meshy

- print(' +> writing header')
+ in_xy = np.vstack((slf.meshx,slf.meshy)).T
+ out_xy = np.vstack((atm.meshx,atm.meshy)).T
+ vert, wgts, u_x, g_x = get_weights(in_xy, out_xy)
# Write header
atm.datetime = slf.datetime
atm.append_header_slf()
@@ -189,7 +189,6 @@ def generate_atm(geo_file, slf_file, atm_file, ll2utm):
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
# ~~~~ writes ATM core ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

- print(' +> setting variables')
# TIME and DATE extraction
atm.tags['times'] = slf.tags['times']
# VARIABLE extraction
@@ -199,10 +198,10 @@ def generate_atm(geo_file, slf_file, atm_file, ll2utm):
# Read / Write data, one time step at a time to support large files
pbar = ProgressBar(maxval=len(slf.tags['times'])).start()
for time in range(len(slf.tags['times'])):
-
- data = get_value_history_slf(slf.file, slf.tags, [time], support3d,
- slf.nvar, slf.npoin3, slf.nplan, vrs)
- # special cases ?
+ tmp = slf.get_values(time)
+ data = []
+ for i, v in zip(*vrs):
+ data = np.append(data,[interp(tmp[i], vert, wgts, u_x, g_x)])
atm.append_core_time_slf(time)
atm.append_core_vars_slf(np.reshape(np.transpose(\
np.reshape(np.ravel(data),
diff --git a/scripts/python3/utils/geometry.py b/scripts/python3/utils/geometry.py
index bb1a8abb4..c9b04c245 100644
--- a/scripts/python3/utils/geometry.py
+++ b/scripts/python3/utils/geometry.py
@@ -32,6 +32,8 @@ r"""
# ~~> dependencies towards standard python
import math
import numpy as np
+from scipy.spatial import Delaunay
+from scipy.spatial import cKDTree

# _____ __________________________________________
# ____/ Global Variables /_________________________________________/
@@ -41,6 +43,66 @@ import numpy as np
# ____/ General Toolbox /__________________________________________/
#

+def get_weights(in_xy, out_xy, d = 2):
+ """
+ Triangulate an output mesh based on a set of input points and return the
+ corresponding barycentric weights.
+
+ Parameters
+ ----------
+ @param in_xy (np.ndarray): Nx2 array of input point coordinates.
+ @param out_xy (np.ndarray): Nx2 array of output point coordinates.
+ @param d (int): Dimension of the input points, by default 2.
+
+ @returns (4-uple): (vert, wgts, out_idx_out, in_idx_out), with:
+ vert (np.ndarray): Nx3 array of triangle vertices.
+ wgts (np.ndarray): Nx3 array of barycentric weights.
+ out_idx_out (np.ndarray): Boolean array indicating which points
+ are outside the input mesh.
+ in_idx_out (np.ndarray): Index array of the nearest input points
+ for points outside the input mesh.
+
+ """
+ t = Delaunay(in_xy) # triangulate output mesh
+ s = t.find_simplex(out_xy)
+ vert = np.take(t.simplices, np.maximum(s, 0), axis=0) # Use max to avoid negative indices
+ t_ = np.take(t.transform, np.maximum(s, 0), axis=0)
+ delta = out_xy - t_[:, d]
+ bary = np.einsum('njk,nk->nj', t_[:, :d, :], delta)
+ wgts = np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True)))
+ # Points outside the out_xy
+ out_idx_out = s < 0
+ if np.any(out_idx_out):
+ # For points outside, find nearest neighbors
+ tree = cKDTree(in_xy)
+ _, in_idx_out = tree.query(out_xy[out_idx_out])
+ else :
+ in_idx_out = None
+ return vert, wgts, out_idx_out, in_idx_out
+
+
+def interp(values, vtx, wts, out_idx_out, in_idx_out):
+ """
+ Interpolate values using barycentric coordinates and weights
+
+ Parameters
+ ----------
+ @param values (np.ndarray): Array of values to be interpolated.
+ @param vtx (np.ndarray): Array of vertex indices.
+ @param wts (np.ndarray): Array of barycentric weights.
+ @param out_idx_out (np.ndarray): Boolean array indicating which points
+ are outside the input mesh.
+ @param in_idx_out (np.ndarray): Index array of the nearest input points
+ for points outside the input mesh.
+
+ @return (np.ndarray): Interpolated values
+
+ """
+ res = np.einsum('nj,nj->n', np.take(values, vtx), wts)
+ if in_idx_out is not None:
+ res[out_idx_out] = values[in_idx_out]
+ return res
+

def is_ccw(t_1, t_2, t_3):
"""@brief Checks if the element is conterclockwise oriented or not
diff --git a/sources/utils/partel/partel.F b/sources/utils/partel/partel.F
index b6c0ab0c0..6f276802e 100644
--- a/sources/utils/partel/partel.F
+++ b/sources/utils/partel/partel.F
@@ -637,6 +637,13 @@
CALL HASH_TABLE_INSERT(GELEGL,EF,I,NELEM_P(I))
ENDIF
ENDDO
+!
+ DO I = 1, NPOIN
+ IRAND(I) = 0
+ ENDDO
+ DO I = 1, NPTFR
+ IRAND(NBOR(I)) = 1
+ ENDDO!
!
CALL COMPUTE_BOUNDARY_AND_INTERFACE(NPARTS,NDP_2D,NPOIN_P,
& NPTFR_P, ELELG,
Loading

0 comments on commit 2228ff1

Please sign in to comment.