diff --git a/.gitignore b/.gitignore index 3d6d6f9877..7fe82af4b8 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,6 @@ *.coverage .cache/* bin/* +*.pvd +*.so +*.png diff --git a/Install.md b/Install.md index 7ac08626ca..bbf0a37f7c 100644 --- a/Install.md +++ b/Install.md @@ -18,10 +18,12 @@ The expectation is that it should work out nicely. If you try this, please let u # Setting up GMSH PorePy currently depends on `GMSH` for meshing of fractured domains. To make this work, you need gmsh installed on your system, and PorePy needs to know where to look for it. -First, visit the [GMSH webpage](http://gmsh.info) and download a suitable version. +First, visit the [Gmsh webpage](http://gmsh.info) and download a suitable version. Extract, and move the binary (probably located in the subfolder gmsh-x.x.x-Linux/bin or similar) to whereever you prefer. -Note to Linux users: Although GMSH is available through the standard packaging tools, it tends to be hopelessly outdated, +NOTE: We have experienced that some fracture geometries are best handled by somewhat older versions of Gmsh (2.11 seems to be a good version) - newer versions are often result in error messages. Until this is resolved, we therefore recommend to use Gmsh 2.11 with PorePy. + +Note to Linux users: Although Gmsh is available through the standard packaging tools, it tends to be hopelessly outdated, and resulted in severe issues for the fracture meshing last time we checked. Use the GMSH web page instead. diff --git a/examples/example3/test_simple_networks.py b/examples/example3/test_simple_networks.py index 75b38fefdf..acf1103393 100644 --- a/examples/example3/test_simple_networks.py +++ b/examples/example3/test_simple_networks.py @@ -199,7 +199,12 @@ def test_T_intersection_within_plane(**kwargs): domain = {'xmin':-1, 'xmax': 2, 'ymin': -2, 'ymax': 2, 'zmin': -1, 'zmax':2} - grids = meshing.simplex_grid([p1, p2], domain) + # This test, when used with certain versions of gmsh (<2.15?) gives a + # a mismatch between 2d cells and 3d faces on fracture surfaces. The bug + # can be located in the .msh-file. To function as a test, we disband the + # test of cell-face relations. + grids = meshing.simplex_grid([p1, p2], domain, + ensure_matching_face_cell=False) def test_T_intersection_one_outside_plane(**kwargs): p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T @@ -216,3 +221,45 @@ def test_T_intersection_both_outside_plane(**kwargs): domain = {'xmin':-1, 'xmax': 2, 'ymin': -2, 'ymax': 2, 'zmin': -1, 'zmax':2} grids = meshing.simplex_grid([p1, p2], domain) + +def test_T_intersection_both_on_boundary(**kwargs): + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + p2 = np.array([[0., 0.5, 0], [1, 0.5, 0], [0.5, 0.5, 1.]]).T + + domain = {'xmin':-1, 'xmax': 2, 'ymin': -2, 'ymax': 2, 'zmin': -1, + 'zmax':2} + grids = meshing.simplex_grid([p1, p2], domain) + +def test_T_intersection_one_boundary_one_outside(**kwargs): + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + p2 = np.array([[-0.2, 0.5, 0], [1, 0.5, 0], [0.5, 0.5, 1.]]).T + + domain = {'xmin':-1, 'xmax': 2, 'ymin': -2, 'ymax': 2, 'zmin': -1, + 'zmax':2} + grids = meshing.simplex_grid([p1, p2], domain) + +def test_T_intersection_one_boundary_one_inside(**kwargs): + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + p2 = np.array([[0.2, 0.5, 0], [1, 0.5, 0], [0.5, 0.5, 1.]]).T + + domain = {'xmin':-1, 'xmax': 2, 'ymin': -2, 'ymax': 2, 'zmin': -1, + 'zmax':2} + grids = meshing.simplex_grid([p1, p2], domain) + +def test_issue_54(): + mesh_kwargs = {} + mesh_size = 5e-1 + mesh_kwargs['mesh_size'] = {'mode': 'constant', + 'value': mesh_size, 'bound_value': 1.2*mesh_size} + mesh_kwargs['file_name'] = 'bounding_box_test' + domain = {'xmin': -1, 'xmax': 1, + 'ymin': 0, 'ymax': 1, + 'zmin': 0, 'zmax': 1} + f_1 = np.array([[.5,.5,.5,.5], + [.4,.5,.5,.4], + [0.2,0.2,.8,.8]]) + f_2 = np.array([[0,.8,.8,0], + [.5,.5,.5,.5], + [0.2,0.2,.8,.8]]) + grids = meshing.simplex_grid([f_1, f_2], domain, + ensure_matching_face_cell=False) diff --git a/src/porepy/fracs/extrusion.py b/src/porepy/fracs/extrusion.py index 1dfc50dbc3..17d2b5c5de 100644 --- a/src/porepy/fracs/extrusion.py +++ b/src/porepy/fracs/extrusion.py @@ -1,9 +1,107 @@ +""" Simple functionality for extrusion of fractures (2D->3D). The main entry +point is the function fractures_from_outcrop(), this wraps the other functions. + +The extrusion is carried out to be consistent with a set of 2D lines, e.g. an +outcrop. Fracture discs are constructed from the exposed lines. In case of +intersections in the exposed plane, the fractures will either cross +(if X-intersection), or the the abutting relation will be preserved (if +Y/T-intersection). If the extruded discs intersect outside the plane of +exposure, this will (almost surely, no checks are actually made) become a +X-intersection. + +The fractures can be assigned a dip from the vertical, again taken as +consistent with the exposed line. + +No attempts are made to create fractures that do not cross the confined plane. + +KNOWN ISSUES: + * When the extrusion is applied with rotations relative to outcrop plane, + two fractures may meet in a point only. A warning is issued in this case. + The consequences in terms of meshing are hard to predict. To fix this, it + is likely necessary to constrain rotation angles to the extrusion angles + in T-intersections. + +""" import numpy as np +import warnings from porepy.utils import comp_geom as cg from porepy.fracs.fractures import EllipticFracture, Fracture +def fractures_from_outcrop(pt, edges, ensure_realistic_cuts=True, family=None, + **kwargs): + """ Create a set of fractures compatible with exposed lines in an outcrop. + + See module-level documentation for futher comments. + + Parameters: + pt (np.array, 2 x num_pts): Coordinates of start and endpoints of + extruded lines. + edges (np.array, 2 x num_pts): Connections between points. Should not + have their crossings removed before. + ensure_realistic_cut (boolean, defaults to True): If True, we ensure + that T-intersections do not have cut fractures that extend beyond + the confining fracture. May overrule user-supplied controls on + fracture sizes. + **kwargs: Potentially user defined options. Forwarded to + discs_from_exposure() and impose_inclines() + + Returns: + list of Fracture: Fracture planes. + + """ + assert edges.shape[0] == 2, 'Edges have two endpoints' + edges = np.vstack((edges, np.arange(edges.shape[1], dtype=np.int))) + + # identify crossings + split_pt, split_edges = cg.remove_edge_crossings(pt, edges, **kwargs) + + # Find t-intersections + abutment_pts, prim_frac, sec_frac, other_pt = t_intersections(split_edges) + + # Calculate fracture lengths + lengths = fracture_length(pt, edges) + + # Extrude to fracture discs + fractures, extrude_ang = discs_from_exposure(pt, edges, **kwargs) + + p0 = pt[:, edges[0]] + p1 = pt[:, edges[1]] + exposure = p1 - p0 + + # Impose incline. + rot_ang = impose_inlcine(fractures, exposure, p0, frac_family=family, + **kwargs) + + # Cut fractures + for prim, sec, p in zip(prim_frac, sec_frac, other_pt): + _, radius = cut_fracture_by_plane(fractures[sec], fractures[prim], + split_pt[:, p], **kwargs) + # If specified, ensure that cuts in T-intersections appear realistic. + if ensure_realistic_cuts and radius is not None: + ang = np.arctan2(0.5*lengths[prim], radius) + + # Ensure the center of both fractures are on the same side of the + # exposed plane - if not, the cut will still be bad. + if extrude_ang[sec] > np.pi/2 and ang < np.pi/2: + ang = np.pi-ang + elif extrude_ang[sec] < np.pi/2 and ang > np.pi/2: + ang = np.pi-ang + + e0 = p0[:, prim] + e1 = p1[:, prim] + new_radius, center, _ = disc_radius_center(lengths[prim], e0, e1, + theta=ang) + strike = np.arctan2(e1[1] - e0[1], e1[0]- e0[0]) + f = create_fracture(center, new_radius, np.pi/2, strike, + np.vstack((e0, e1)).T) + rotate_fracture(f, e1-e0, rot_ang[prim], p0[:, prim]) + fractures[prim] = f + + return fractures + + def _intersection_by_num_node(edges, num): """ Find all edges involved in intersections with a certain number of intersecting lines. @@ -22,13 +120,13 @@ def _intersection_by_num_node(edges, num): num_crosses = crosses.size - edges_of_crosses = np.zeros((num_abut, num), dtype=np.int) - for i, pi in enumerate(abutments): + edges_of_crosses = np.zeros((num_crosses, num), dtype=np.int) + for i, pi in enumerate(crosses): edges_of_crosses[i] = np.where(np.any(edges[:2] == pi, axis=0))[0] return crosses, edges_of_crosses -def t_intersections(edges): +def t_intersections(edges, remove_three_families=True): """ Find points involved in T-intersections. A t-intersection is defined as a point involved in three fracture segments, @@ -41,6 +139,9 @@ def t_intersections(edges): edges (np.array, 3 x n): Fractures. First two rows give indices of start and endpoints. Last row gives index of fracture that the segment belongs to. + remove_three_families (boolean, defaults to True): If True, + T-intersections with where fractures from three different families + meet will be removed. Returns: abutments (np.ndarray, int): indices of points that are @@ -56,25 +157,47 @@ def t_intersections(edges): frac_num = edges[-1] abutments, edges_of_abutments = _intersection_by_num_node(edges, 3) + # Initialize fields for abutments. num_abut = abutments.size primal_frac = np.zeros(num_abut, dtype=np.int) sec_frac = np.zeros(num_abut, dtype=np.int) - for i, ei in enumerate(frac_num[edges_of_abutments]): - fi, count = np.unique(ei, return_counts=True) - assert fi.size == 2 + other_point = np.zeros(num_abut, dtype=np.int) + + # If fractures meeting in a T-intersection all have different family names, + # these will be removed from the list. + remove = np.zeros(num_abut, dtype=np.bool) + + for i, (pi, ei) in enumerate(zip(abutments, edges_of_abutments)): + # Count number of occurences for each fracture associated with this + # intersection. + fi_all = frac_num[ei] + fi, count = np.unique(fi_all, return_counts=True) + if fi.size > 2: + remove[i] = 1 + continue + # Find the fracture number associated with main and abutting edge. if count[0] == 1: primal_frac[i] = fi[1] sec_frac[i] = fi[0] else: primal_frac[i] = fi[0] sec_frac[i] = fi[1] - - other_point = np.zeros(num_abut, dtype=np.int) - for i, pi in enumerate(abutments): - if edges[0, sec_frac[i]] == pi: - other_point[i] = edges[1, sec_frac[i]] + # Also find the other point of the abutting edge + ind = np.where(fi_all == sec_frac[i]) + ei_abut = ei[ind] + assert ei_abut.size == 1 + if edges[0, ei_abut] == pi: + other_point[i] = edges[1, ei_abut] else: - other_point[i] = edges[0, sec_frac[i]] + other_point[i] = edges[0, ei_abut] + + # Remove any T-intersections that did not belong to + if remove_three_families and remove.any(): + remove = np.where(remove)[0] + abutments = np.delete(abutments, remove) + primal_frac = np.delete(primal_frac, remove) + sec_frac = np.delete(sec_frac, remove) + other_point = np.delete(other_point, remove) return abutments, primal_frac, sec_frac, other_point @@ -126,20 +249,23 @@ def fracture_length(pt, e): return np.sqrt(np.power(x1-x0, 2) + np.power(y1-y0, 2)) -def _disc_radius_center(lengths, p0, p1): +def disc_radius_center(lengths, p0, p1, theta=None): """ Compute radius and center of a disc, based on the length of a chord through the disc, and assumptions on the location of the chord. - For the moment, it is assumed that the chord is struck at an arbitrary - hight of the circle. Also, we assume that the chord is horizontal. In the - context of an exposed fracture, this implies that the exposure is - horizontal (I believe), and that an arbitrary portion of the original - (disc-shaped) fracture has been eroded. + The relation between the exposure and the center of the fracture is + given by the theta, which gives the angle between a vertical line through + the disc center and the line through the disc center and any of the + exposure endpoints. If no values are given a random value is assigned, + corresponding to an arbitrary portion of the original (disc-shaped) + fracture has been eroded. Parameters: length (np.array, double, size: num_frac): Of the chords p0 (np.array, 2 x num_frac): One endpoint of fractures. p1 (np.array, 2 x num_frac): Second endpoint of fractures + angle (np.array, num_frac, optional): Angle determining disc center, + see description above. Defaults to random values. Returns: np.array, num_frac: Radius of discs @@ -149,34 +275,45 @@ def _disc_radius_center(lengths, p0, p1): num_frac = lengths.size - # Angle between a vertical line through the disc center and the line - # through the disc center and one of the fracture endpoints. Assumed to be - # uniform, for the lack of more information. # Restrict the angle in the interval (0.1, 0.9) * pi to avoid excessively # large fractures. - theta = np.pi * (0.1 + 0.8 * np.random.rand(num_frac)) + if theta is None: + rnd = np.random.rand(num_frac) + # Angles of pi/2 will read to point contacts that cannot be handled + # of the FractureNetwork. Point contacts also make little physical + # sense, so we vaoid them. + hit = np.abs(rnd - 0.5) < 0.1 + rnd[hit] += 0.1 + theta = np.pi * (0.1 + 0.8 * rnd) radius = 0.5 * lengths / np.sin(theta) # Midpoint in (x, y)-coordinate given as midpoint of exposed line - mid_point = 0.5 * (p0 + p1) + mid_point = 0.5 * (p0 + p1).reshape((2, -1)) + # z-coordinate from angle depth = radius * np.cos(theta) - return radius, np.vstack((mid_point, depth)) + return radius, np.vstack((mid_point, depth)), theta -def discs_from_exposure(pt, edges): +def discs_from_exposure(pt, edges, exposure_angle=None, **kwargs): """ Create fracture discs based on exposed lines in an outrcrop. - The outcrop is assumed to be in the xy-plane. + The outcrop is assumed to be in the xy-plane. The returned disc will be + vertical, and the points on the outcrop will be included in the polygon + representation. - The returned disc will be vertical, and the points on the outcrop will be - included in the polygon representation. The disc center is calculated using - disc_radius_center(), see that function for assumptions. + The location of the center is calculated from the angle, see + disc_radius_center() for details. Parameters: pt (np.array, 2 x num_pts): Coordinates of exposed points. edges (np.array, 2 x num_fracs): Connections between fractures. + exposure_angle (np.array, num_fracs, optional): See above, and + disc_radius_center() for description. Values very close to pi/2, 0 + and pi will be modified to avoid unphysical extruded fractures. If + not provided, random values will be drawn. Measured in radians. + Should be between 0 and pi. Returns: list of Fracture: One per fracture trace. @@ -192,28 +329,107 @@ def discs_from_exposure(pt, edges): v = p1 - p0 strike_angle = np.arctan2(v[1], v[0]) - radius, center = _disc_radius_center(lengths, p0, p1) + if exposure_angle is not None: + # Angles of pi/2 will give point contacts + hit = np.abs(exposure_angle - np.pi/2) < 0.01 + exposure_angle[hit] = exposure_angle[hit] + 0.01 + + # Angles of 0 and pi give infinite fractures. + hit = exposure_angle < 0.05 + exposure_angle[hit] = 0.05 + hit = np.pi - exposure_angle < 0.05 + exposure_angle[hit] = 0.05 + + radius, center, ang = disc_radius_center(lengths, p0, p1, exposure_angle) fracs = [] for i in range(num_fracs): - # The simplest way of distributing points along the disc seems to be to - # create an elliptic fracture, and pick out the points. - f = EllipticFracture(center=center[:, i], major_axis=radius[i], - minor_axis=radius[i], dip_angle=np.pi/2, - strike_angle=strike_angle[i], major_axis_angle=0) + z = 2 * center[2, i] + extra_point_depth = np.array([0, 0, z, z]) + extra_points = np.vstack((np.vstack((p0[:, i], p1[:, i], p0[:, i], + p1[:, i])).T, + extra_point_depth)) + + fracs.append(create_fracture(center[:, i], radius[i], np.pi/2, + strike_angle[i], extra_points)) + return fracs, ang - # Add the points on the exposed surface. This creates an unequal - # distribution of the points, but it is the only hard information we - # have on the fracture - f.add_points(np.vstack((np.hstack((p0[:, i], 0)), - np.hstack((p1[:, i], 0)))).T) - fracs.append(Fracture(f.p)) - return fracs +def create_fracture(center, radius, dip, strike, extra_points): + """ Create a single circular fracture consistent with a given exposure. + The exposed points will be added to the fracture description. -def impose_inlcine(fracs, exposure, family, family_mean, family_std): + Parameters: + center (np.array-like, dim 3): Center of the fracture. + radius (double): Fracture radius. + dip (double): dip angle of the fracture. See EllipticFracture for + details. + strike (np.array-like, dim 3): Strike angle for rotation. See + EllipticFracture for details. + extra_points (np.array, 3xnpt): Extra points to be added to the + fracture. The points are assumed to lie on the ellipsis. + + Returns: + Fracture: New fracture, according to the specifications. + + """ + if extra_points.shape[0] == 2: + extra_points = np.vstack((extra_points, + np.zeros(extra_points.shape[1]))) + + # The simplest way of distributing points along the disc seems to be to + # create an elliptic fracture, and pick out the points. + f = EllipticFracture(center=center, major_axis=radius, minor_axis=radius, + dip_angle=dip, strike_angle=strike, + major_axis_angle=0) + # Add the points on the exposed surface. This creates an unequal + # distribution of the points, but it is the only hard information we have + # on the fracture + f.add_points(extra_points, check_convexity=False) + # Not sure if f still shoudl be EllipticFracture here, or if we should + # create a new fracture with the same point distribution. + return f + + +def rotate_fracture(frac, vec, angle, exposure): + """ Rotate a fracture along a specified strike vector, and centered on a + given point on the fracture surface. + + Modification of the fracture coordinates is done in place. + + TODO: Move this to the fracture itself? + + Parameters: + frac (Fracture): To be rotated. Points are modified in-place. + vec (np.array-like): Rotation will be around this vector. + ang (double). Rotation angle. Measured in radians. + exposure (np.array-like): Point on the strike vector, rotation will be + centered around the line running through this point. + + """ + vec = np.asarray(vec) + exposure = np.asarray(exposure) + + if vec.size == 2: + vec = np.append(vec, 0) + if exposure.size == 2: + exposure = np.append(exposure, 0) + exposure = exposure.reshape((3, 1)) + + rot = cg.rot(angle, vec) + p = frac.p + frac.p = exposure + rot.dot(p - exposure) + + frac.points_2_ccw() + frac.compute_centroid() + frac.compute_normal() + + +def impose_inlcine(fracs, exposure_line, exposure_point, frac_family=None, + family_mean_incline=None, family_std_incline=None, + **kwargs): """ Impose incline on the fractures from family-based parameters. The incline for each family is specified in terms of its mean and standard @@ -223,36 +439,50 @@ def impose_inlcine(fracs, exposure, family, family_mean, family_std): Parameters: fracs (list of Frature): Fractures to be inclined. - exposure (np.array, 3xnum_frac): Exposed line for each fracture + exposure_line (np.array, 3xnum_frac): Exposed line for each fracture (visible in outcrop). Rotation will be around this line. + exposure_point (np.array, 3xnum_frac): Point on exposure line. This + point will not be rotated, it's a fixture. family (np.array, num_fracs): For each fracture, which family does it - belong to. - family_mean (np.array, num_family): Mean value of incline for each - family. In radians. - family_std (np.array, num_family): Standard deviation of incine for - each family. In radians. + belong to. If not provided, all fractures are considered to belong + to the same family. + family_mean_incline (np.array, num_family): Mean value of incline for each + family. In radians. Defaults to zero. + family_std_incline (np.array, num_family): Standard deviation of incine for + each family. In radians. Defaultst to zero. + + To set value for each fracture, set family = np.arange(len(fracs)), + family_mean_incline=prescribed_value, and family_std_incline=None. + + Returns: + np.array, size num_frac: Rotation angles. """ - def rotate_fracture(frac, vec, angle): - # Helper function to carry out rotation. - rot = cg.rot(angle, vec) - p = frac.p - center = np.mean(p, axis=1).reshape((-1, 1)) - frac.p = center + rot.dot(p - center) - frac.points_2_ccw() - - exposure = np.vstack((exposure, np.zeros(len(fracs)))) + if frac_family is None: + frac_family = np.zeros(len(fracs), dtype=np.int) + if family_mean_incline is None: + family_mean_incline = np.zeros(np.unique(frac_family).size) + if family_std_incline is None: + family_std_incline = np.zeros(np.unique(frac_family).size) + + exposure_line = np.vstack((exposure_line, np.zeros(len(fracs)))) + all_ang = np.zeros(len(fracs)) for fi, f in enumerate(fracs): - fam = family[fi] - ang = np.random.normal(loc=family_mean[fam], scale=family_std[fam]) - rotate_fracture(f, exposure[:, fi], ang) + fam = frac_family[fi] + ang = np.random.normal(loc=family_mean_incline[fam], + scale=family_std_incline[fam]) + rotate_fracture(f, exposure_line[:, fi], ang, exposure_point[:, fi]) + all_ang[fi] = ang + return all_ang -def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4): +def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4, + recompute_center=True, **kwargs): """ Cut a fracture by a plane, and confine it to one side of the plane. Intended use is to confine abutting fractures (T-intersections) to one side - of the fracture it hits. + of the fracture it hits. This is done by deleting points on the abutting + fracture. Parameters: main_frac (Fracture): The fracture to be cut. @@ -263,6 +493,10 @@ def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4): Returns: Fracture: A copy of the main fracture, cut by the other fracture. + double: In cases where one interseciton point extends beyond the other + fracture, this is the distance between the center of the other + fracture and the intersection point. If both intersections are + within the polygon, None will be returned. Raises: ValueError if the points in the other fracture is too close. This could @@ -278,7 +512,7 @@ def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4): main_min = main_frac.p.min(axis=1) main_max = main_frac.p.max(axis=1) - # Equation for the plane through the other fracture, on the form + # Equation for the plane through the other fracture, on the form # n_x(x-c_x) + n_y(y-c_y) + n_z(z-c_z) = 0 n = cg.compute_normal(other_frac.p).reshape((-1, 1)) c = other_frac.center @@ -290,7 +524,7 @@ def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4): # Define points in the plane of the second fracture with min and max # coordinates picked from the main fracture. # The below tricks with indices are needed to find a dimension with a - # non-zero gradient of the plane, so that we can divide safely. + # non-zero gradient of the plane, so that we can divide safely. # Implementation note: It might have been possible to do this with a # rotation to the natural plane of the other fracture, but it is not clear # this will really be simpler. @@ -299,7 +533,7 @@ def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4): # We should perhaps do a scaling of coordinates. non_zero = np.where(np.abs(n) > 1e-8)[0] if non_zero.size == 0: - raise ValueError('Points of second fracture too close') + raise ValueError('Could not compute normal vector of other fracture') ind = np.setdiff1d(np.arange(3), non_zero[0]) i0 = ind[0] i1 = ind[1] @@ -309,44 +543,84 @@ def cut_fracture_by_plane(main_frac, other_frac, reference_point, tol=1e-4): # A for-loop might have been possible here. p[i0, 0] = main_min[i0] p[i1, 0] = main_min[i1] - p[i2, 0] = c[i0] - (n[i0] * (main_min[i0] - c[i0]) - + n[i1] * (main_min[i1] - c[i1])) / n[i0] + p[i2, 0] = c[i2] - (n[i0] * (main_min[i0] - c[i0]) + + n[i1] * (main_min[i1] - c[i1])) / n[i2] p[i0, 1] = main_max[i0] p[i1, 1] = main_min[i1] - p[i2, 1] = c[i0] - (n[i0] * (main_max[i0] - c[i0]) - + n[i1] * (main_min[i1] - c[i1])) / n[i0] + p[i2, 1] = c[i2] - (n[i0] * (main_max[i0] - c[i0]) + + n[i1] * (main_min[i1] - c[i1])) / n[i2] p[i0, 2] = main_max[i0] p[i1, 2] = main_max[i1] - p[i2, 2] = c[i0] - (n[i0] * (main_max[i0] - c[i0]) - + n[i1] * (main_max[i1] - c[i1])) / n[i0] + p[i2, 2] = c[i2] - (n[i0] * (main_max[i0] - c[i0]) + + n[i1] * (main_max[i1] - c[i1])) / n[i2] p[i0, 3] = main_min[i0] p[i1, 3] = main_max[i1] - p[i2, 3] = c[i0] - (n[i0] * (main_min[i0] - c[i0]) - + n[i1] * (main_max[i1] - c[i1])) / n[i0] + p[i2, 3] = c[i2] - (n[i0] * (main_min[i0] - c[i0]) + + n[i1] * (main_max[i1] - c[i1])) / n[i2] # Create an auxiliary fracture that spans the same plane as the other # fracture, and with a larger extension than the main fracture. - aux_frac = Fracture(p) + aux_frac = Fracture(p, check_convexity=False) isect_pt, _, _ = main_frac.intersects(aux_frac, tol) + # The extension of the abutting fracture will cross the other fracture + # with a certain angle to the vertical. If the other fracture is rotated + # with a similar angle, point contact results. + if isect_pt.size == 0: + warnings.warn("""No intersection found in cutting of fractures. This is + likely caused by an unfortunate combination of + extrusion and rotation angles, which created fractures + that only intersect in a single point (the outcrop + plane. Will try to continue, but this may cause + trouble for meshing etc.""") + return main_frac, None + # Next step is to eliminate points in the main fracture that are on the # wrong side of the other fracture. v = main_frac.p - other_frac.center.reshape((-1, 1)) - sgn = np.sign(v * n) - right_sign = np.sign(reference_point * v) + sgn = np.sign(np.sum(v * n, axis=0)) + ref_v = reference_point - other_frac.center.reshape((-1, 1)) + right_sign = np.sign(np.sum(ref_v * n, axis=0)) + + # Eliminate points that are on the other side. + eliminate = np.where(sgn * right_sign < 0)[0] + main_frac.remove_points(eliminate) - # Eliminate points that are on the other side. - eliminate = np.where(sgn * right_sign < 0) # Add intersection points on the main fracture. One of these may already be # present, as the point of extrusion, but add_point will uniquify the point # cloud. # We add the points after elimination, to ensure that the points on the # plane are present in the final fracture. - main_frac.add_points(isect_pt) - - return main_frac + main_frac.add_points(isect_pt, check_convexity=False) + + if recompute_center: + main_frac.compute_centroid() + + # If the main fracture is too large compared to the other, the cut line + # will extend beyond the confining plane. In these cases, compute the + # distance from the fracture center to the outside intersection point. This + # can be used to extend the other fracture so as to avoid such strange + # configurations. + other_center = other_frac.center.reshape((-1, 1)) + other_p = other_frac.p + rot = cg.project_plane_matrix(other_p - other_center) + + other_rot = rot.dot(other_p - other_center)[:2] + isect_rot = rot.dot(isect_pt - other_center)[:2] + + is_inside = cg.is_inside_polygon(other_rot, isect_rot, tol, default=True) + # At one point (the exposed point) must be in the polygon of the other + # fracture. + assert is_inside.any() + + if not is_inside.all(): + hit = np.logical_not(is_inside) + r = np.sqrt(np.sum(isect_pt[:, hit]**2)) + return main_frac, r + else: + return main_frac, None diff --git a/src/porepy/fracs/fractures.py b/src/porepy/fracs/fractures.py index b366fa43e8..651dd1d376 100644 --- a/src/porepy/fracs/fractures.py +++ b/src/porepy/fracs/fractures.py @@ -47,17 +47,21 @@ class Fracture(object): - def __init__(self, points, index=None): + def __init__(self, points, index=None, check_convexity=True): self.p = points # Ensure the points are ccw self.points_2_ccw() self.compute_centroid() - self.normal = cg.compute_normal(points)[:, None] + self.compute_normal() self.orig_p = self.p.copy() self.index = index + assert self.is_planar(), 'Points define non-planar fracture' + if check_convexity: + assert self.check_convexity(), 'Points form non-convex polygon' + def set_index(self, i): self.index = i @@ -118,7 +122,7 @@ def points_2_ccw(self): """ Ensure that the points are sorted in a counter-clockwise order. - Implementation note: + mplementation note: For now, the ordering of nodes in based on a simple angle argument. This will not be robust for general point clouds, but we expect the fractures to be regularly shaped in this sense. In particular, we @@ -142,12 +146,17 @@ def points_2_ccw(self): def add_points(self, p, check_convexity=True, tol=1e-4): """ - Add a point to the polygon, and enforce ccw sorting. + Add a point to the polygon with ccw sorting enforced. + + Always run a test to check that the points are still planar. By + default, a check of convexity is also performed, however, this can be + turned off to speed up simulations (the test uses sympy, which turns + out to be slow in many cases). Parameters: p (np.ndarray, 3xn): Points to add check_convexity (boolean, optional): Verify that the polygon is - convex. Defaults to true + convex. Defaults to true. tol (double): Tolerance used to check if the point already exists. Return: @@ -157,9 +166,26 @@ def add_points(self, p, check_convexity=True, tol=1e-4): self.p = np.hstack((self.p, p)) self.p, _, _ = setmembership.unique_columns_tol(self.p, tol=tol) - self.points_2_ccw() + # Sort points to ccw + self.p = self.p[:, self.points_2_ccw()] + + if check_convexity: + return self.check_convexity() and self.is_planar(tol) + else: + return self.is_planar() - return self.check_convexity() + def remove_points(self, ind, keep_orig=False): + """ Remove points from the fracture definition + + Parameters: + ind (np array-like): Indices of points to remove. + keep_orig (boolean, optional): Whether to keep the original points + in the attribute orig_p. Defaults to False. + + """ + self.p = np.delete(self.p, ind, axis=1) + if not keep_orig: + self.orig_p = self.p def plane_coordinates(self): """ @@ -191,7 +217,23 @@ def check_convexity(self): boolean, true if the polygon is convex. """ - return self.as_sp_polygon().is_convex() + p_2d = self.plane_coordinates() + return self.as_sp_polygon(p_2d).is_convex() + + def is_planar(self, tol=1e-4): + """ Check if the points forming this fracture lies in a plane. + + Parameters: + tol (double): Tolerance for non-planarity. Treated as an absolute + quantity (no scaling with fracture extent) + + Returns: + boolean, True if the polygon is planar. False if not. + """ + p = self.p - np.mean(self.p, axis=1).reshape((-1, 1)) + rot = cg.project_plane_matrix(p) + p_2d = rot.dot(p) + return np.max(np.abs(p_2d[2])) < tol def compute_centroid(self): """ @@ -220,11 +262,29 @@ def compute_centroid(self): center = np.sum(cc * area, axis=1) / np.sum(area) # Project back again. - self.center = rot.transpose().dot(np.append(center, z)) + self.center = rot.transpose().dot(np.append(center, z)).reshape((3, 1)) + + def compute_normal(self): + """ Compute normal to the polygon. + """ + self.normal = cg.compute_normal(self.p)[:, None] + + def as_sp_polygon(self, p=None): + """ Represent polygon as a sympy object. - def as_sp_polygon(self): - sp = [sympy.geometry.Point(self.p[:, i]) - for i in range(self.p.shape[1])] + Parameters: + p (np.array, nd x npt, optional): Points for the polygon. Defaults + to None, in which case self.p is used. + + Returns: + sympy.geometry.Polygon: Representation of the polygon formed by p. + + """ + if p is None: + p = self.p + + sp = [sympy.geometry.Point(p[:, i]) + for i in range(p.shape[1])] return sympy.geometry.Polygon(*sp) def intersects(self, other, tol, check_point_contact=True): @@ -312,8 +372,14 @@ def intersects(self, other, tol, check_point_contact=True): # segment of the other. # Compute intersections, with both polygons as first argument - isect_self_other = cg.polygon_segment_intersect(self.p, other.p, tol=tol) - isect_other_self = cg.polygon_segment_intersect(other.p, self.p, tol=tol) + # Do not include boundary points of the polygons here, these will be + # processed later + isect_self_other = cg.polygon_segment_intersect(self.p, other.p, + tol=tol, + include_bound_pt=True) + isect_other_self = cg.polygon_segment_intersect(other.p, self.p, + tol=tol, + include_bound_pt=True) # Process data if isect_self_other is not None: @@ -335,14 +401,21 @@ def intersects(self, other, tol, check_point_contact=True): if int_points.shape[1] > 1: int_points, _, _ \ = setmembership.unique_columns_tol(int_points, tol=tol) - # There should be at most two of these points - assert int_points.shape[1] <= 2 - # There at at the most two intersection points between the fractures - # (assuming convexity). If two interior points are found, we can simply - # cut it short here. - if int_points.shape[1] == 2: - return int_points, on_boundary_self, on_boundary_other + # There should be at most two of these points. + # In some cases, likely involving extrusion, several segments may lay + # essentially in the fracture plane, producing more than two segments. + # Thus, if more than two colinear points are found, pick out the first + # and last one. + if int_points.shape[1] > 2: + if cg.is_collinear(int_points, tol): + sort_ind = cg.argsort_point_on_line(int_points, tol) + int_points = int_points[:, [sort_ind[0], sort_ind[-1]]] + else: + # This is a bug + raise ValueError(''' Found more than two intersection between + fracture polygons. + ''') #### # Next, check for intersections between the polygon boundaries @@ -357,25 +430,55 @@ def intersects(self, other, tol, check_point_contact=True): # points if len(bound_sect_self_other) == 0 and len(bound_sect_other_self) == 0: if check_point_contact and int_points.shape[1] == 1: - raise ValueError('Contact in a single point not allowed') + # contacts are not implemented. Give a warning, return no + # interseciton, and hope the meshing software is merciful + if hasattr(self, 'index') and hasattr(other, 'index'): + logger.warning("""Found a point contact between fracture + %i + and %i at (%.5f, %.5f, %.5f)""", self.index, + other.index, *int_points) + else: + logger.warning("""Found point contact between fractures + with no index. Coordinate: (%.5f, %.5f, + %.5f)""", *int_points) + return np.empty((3, 0)), on_boundary_self, on_boundary_other # None of the intersection points lay on the boundary - return int_points, on_boundary_self, on_boundary_other + else: + def point_on_segment(ip, poly): + # Check if a set of points are located on a single segment + if ip.shape[1] == 0: + return False + start = poly + end = np.roll(poly, 1, axis=1) + for si in range(start.shape[1]): + dist, cp = cg.dist_points_segments(ip, start[:, si], + end[:, si]) + if np.all(dist < tol): + return True + return False + + # The 'interior' points can still be on the boundary (naming + # of variables should be updated). The points form a boundary + # segment if they all lie on the a single segment of the + # fracture. + on_boundary_self = point_on_segment(int_points, self.p) + on_boundary_other = point_on_segment(int_points, other.p) + return int_points, on_boundary_self, on_boundary_other # Else, we have boundary intersection, and need to process them - bound_pt_self, self_segment, _, self_cuts_through = \ - self._process_segment_isect(bound_sect_self_other, self.p, tol) + bound_pt_self, self_segment, is_vertex_self, self_cuts_through = \ + self._process_segment_isect(bound_sect_self_other, self.p, tol, + other.p) - bound_pt_other, other_segment, _, other_cuts_through = \ + bound_pt_other, other_segment, is_vertex_other, other_cuts_through = \ self._process_segment_isect(bound_sect_other_self, other.p, - tol) + tol, self.p) # Run some sanity checks # Convex polygons can intersect each other in at most two points (and a # line inbetween) - if int_points.shape[1] > 1: - assert bound_pt_self.shape[1] == 0 and bound_pt_other.shape[1] == 0 - elif int_points.shape[1] == 1: + if int_points.shape[1] == 1: # There should be exactly one unique boundary point. bp = np.hstack((bound_pt_self, bound_pt_other)) if bp.shape[1] == 0: @@ -401,8 +504,6 @@ def intersects(self, other, tol, check_point_contact=True): # If a segment runs through the polygon, there should be no interior points. # This may be sensitive to tolerances, should be a useful test to gauge # that. - if self_cuts_through or other_cuts_through: - assert int_points.shape[1] == 0 # Storage for intersection points located on the boundary bound_pt = [] @@ -411,28 +512,45 @@ def intersects(self, other, tol, check_point_contact=True): if self_segment: assert other_segment # This should be reflexive assert bound_pt_self.shape[1] == 2 - assert np.allclose(bound_pt_self, bound_pt_other) + + # Depending on whether the intersection points are also vertexes + # of the polygons, bound_pt_self and other need not be identical. + # Return the uniquified combination of the two + bound_pt = np.hstack((bound_pt_other, bound_pt_self)) + bound_pt, _, _ = setmembership.unique_columns_tol(bound_pt, + tol=tol) + on_boundary_self = [True, True] on_boundary_other = [True, True] - return bound_pt_self, on_boundary_self, on_boundary_other + return bound_pt, on_boundary_self, on_boundary_other # Case where one boundary segment of one fracture cuts through two # boundary segment (thus the surface) of the other fracture. Gives a # T-type intersection if self_cuts_through: - assert np.allclose(bound_pt_self, bound_pt_other) on_boundary_self = True on_boundary_other = False - return bound_pt_self, on_boundary_self, on_boundary_other + # Depending on whether the intersection points are also vertexes + # of the polygons, bound_pt_self and other need not be identical. + # Return the uniquified combination of the two + bound_pt = np.hstack((bound_pt_other, bound_pt_self)) + bound_pt, _, _ = setmembership.unique_columns_tol(bound_pt, + tol=tol) + + return bound_pt, on_boundary_self, on_boundary_other elif other_cuts_through: - assert np.allclose(bound_pt_self, bound_pt_other) on_boundary_self = False on_boundary_other = True - return bound_pt_self, on_boundary_self, on_boundary_other - # By now, there should be a single member of bound_pt - assert bound_pt_self.shape[1] == 1 or bound_pt_self.shape[1] == 2 - assert bound_pt_other.shape[1] == 1 or bound_pt_other.shape[1] == 2 + # Depending on whether the intersection points are also vertexes + # of the polygons, bound_pt_self and other need not be identical. + # Return the uniquified combination of the two + bound_pt = np.hstack((bound_pt_other, bound_pt_self)) + bound_pt, _, _ = setmembership.unique_columns_tol(bound_pt, + tol=tol) + + return bound_pt, on_boundary_self, on_boundary_other + bound_pt = np.hstack((int_points, bound_pt_self)) # Check if there are two boundary points on the same segment. If so, @@ -448,22 +566,25 @@ def intersects(self, other, tol, check_point_contact=True): # Special case if a segment is cut as one interior point (by its # vertex), and a boundary point on the same segment, this is a boundary # segment - if int_points.shape[1] == 1 and bound_pt_self.shape[1] == 1: - is_vert, vi = self.is_vertex(int_points) - seg_i = bound_sect_self_other[0][0] - nfp = self.p.shape[1] - if is_vert and (vi == seg_i or (seg_i+1) % nfp == vi): - on_boundary_self = True - if int_points.shape[1] == 1 and bound_pt_other.shape[1] == 1: - is_vert, vi = other.is_vertex(int_points) - seg_i = bound_sect_other_self[0][0] - nfp = other.p.shape[1] - if is_vert and (vi == seg_i or (seg_i+1) % nfp == vi): - on_boundary_other = True - + if bound_pt_self.shape[1] == 1: + for ind in range(int_points.shape[1]): + is_vert, vi = self.is_vertex(int_points[:, ind]) + seg_i = bound_sect_self_other[0][0] + nfp = self.p.shape[1] + if is_vert and (vi == seg_i or (seg_i+1) % nfp == vi): + on_boundary_self = True + if bound_pt_other.shape[1] == 1: + for ind in range(int_points.shape[1]): + is_vert, vi = other.is_vertex(int_points[:, ind]) + seg_i = bound_sect_other_self[0][0] + nfp = other.p.shape[1] + if is_vert and (vi == seg_i or (seg_i+1) % nfp == vi): + on_boundary_other = True + + bound_pt, _, _ = setmembership.unique_columns_tol(bound_pt, tol=tol) return bound_pt, on_boundary_self, on_boundary_other - def _process_segment_isect(self, isect_bound, poly, tol): + def _process_segment_isect(self, isect_bound, poly, tol, other_poly): """ Helper function to interpret result from polygon_boundaries_intersect @@ -488,11 +609,23 @@ def _process_segment_isect(self, isect_bound, poly, tol): # Counter for how many times each segment is found num_occ = np.zeros(poly.shape[1]) + # Number of vertexes on the other polygon - used below to identify + # whether the intersection coincides with these. + num_pt_other = other_poly.shape[1] + # Loop over all intersections, process information for bi in isect_bound: # Index of the intersecting segments - num_occ[bi[0]] += 1 + + # Special case: For T-type intersection where the vertexes of one + # polygon lies on the segment of the other, they do not count + # towards cutting through the segment. + expanded = np.hstack((other_poly, bi[2])) + other_unique, _, _ = setmembership.unique_columns_tol(expanded, + tol=tol) + if other_unique.shape[1] > num_pt_other: + num_occ[bi[0]] += 1 # Coordinates of the intersections ip = bi[2] @@ -510,8 +643,8 @@ def _process_segment_isect(self, isect_bound, poly, tol): elif ip_unique.shape[1] == 1: # Either a vertex or single intersection point. poly_ext, _, _ = setmembership.unique_columns_tol( - np.hstack((self.p, ip_unique)), tol=tol) - if poly_ext.shape[1] == self.p.shape[1]: + np.hstack((poly, ip_unique)), tol=tol) + if poly_ext.shape[1] == poly.shape[1]: # This is a vertex, we skip it pass else: @@ -614,20 +747,26 @@ def outside_box(p, bound_i): # by the box, and extend the two others to cover segments that run # through the extension of the plane only. west = Fracture(np.array([[x0_box, x0_box, x0_box, x0_box], - [y0, y1, y1, y0], [z0, z0, z1, z1]])) + [y0, y1, y1, y0], [z0, z0, z1, z1]]), + check_convexity=False) east = Fracture(np.array([[x1_box, x1_box, x1_box, x1_box], [y0, y1, y1, y0], - [z0, z0, z1, z1]])) + [z0, z0, z1, z1]]), + check_convexity=False) south = Fracture(np.array([[x0, x1, x1, x0], [y0_box, y0_box, y0_box, y0_box], - [z0, z0, z1, z1]])) + [z0, z0, z1, z1]]), + check_convexity=False) north = Fracture(np.array([[x0, x1, x1, x0], [y1_box, y1_box, y1_box, y1_box], - [z0, z0, z1, z1]])) + [z0, z0, z1, z1]]), + check_convexity=False) bottom = Fracture(np.array([[x0, x1, x1, x0], [y0, y0, y1, y1], - [z0_box, z0_box, z0_box, z0_box]])) + [z0_box, z0_box, z0_box, z0_box]]), + check_convexity=False) top = Fracture(np.array([[x0, x1, x1, x0], [y0, y0, y1, y1], - [z1_box, z1_box, z1_box, z1_box]])) + [z1_box, z1_box, z1_box, z1_box]]), + check_convexity=False) # Collect in a list to allow iteration bound_planes = [west, east, south, north, bottom, top] @@ -737,7 +876,7 @@ def outside_box(p, bound_i): break def __repr__(self): - return str(self.as_sp_polygon()) + return self.__str__() def __str__(self): s = 'Points: \n' @@ -800,6 +939,8 @@ def __init__(self, center, major_axis, minor_axis, major_axis_angle, np.pi/6) """ + if center.ndim == 1: + center = center.reshape((-1, 1)) self.center = center # First, populate polygon in the xy-plane @@ -830,7 +971,7 @@ def __init__(self, center, major_axis, minor_axis, major_axis_angle, assert cg.is_planar(dip_pts) # Set the points, and store them in a backup. - self.p = center[:, np.newaxis] + dip_pts + self.p = center + dip_pts self.orig_p = self.p.copy() # Compute normal vector @@ -889,9 +1030,12 @@ def __init__(self, fractures=None, verbose=0, tol=1e-4): self.tol = tol self.verbose = verbose - # Initialize with an empty domain. Can be modified later by a call to - # 'impose_external_boundary()' - self.domain = None + # Initialize mesh size parameters as empty + self.h_min = None + self.h_ideal = None + + # No auxiliary points have been added + self.auxiliary_points_added = False def add(self, f): # Careful here, if we remove one fracture and then add, we'll have @@ -908,25 +1052,30 @@ def get_normal(self, frac): def get_center(self, frac): return self._fractures[frac].center - def get_intersections(self, frac): - """ - Get all intersections in the plane of a given fracture. + def intersections_of_fracture(self, frac): + """ Get all known intersections for a fracture. + + If called before find_intersections(), the returned list will be empty. + + Paremeters: + frac: A fracture in the network + + Returns: + np.array (Intersection): Array of intersections + """ - if frac is None: - frac = np.arange(len(self._fractures)) - if isinstance(frac, np.ndarray): - frac = list(frac) - elif isinstance(frac, int): - frac = [frac] - isect = [] - for fi in frac: - f = self[fi] - isect_loc = [] - for i in self.intersections: - if i.first.index == f.index or i.second.index == f.index: - isect_loc.append(i) - isect.append(isect_loc) - return isect + if isinstance(frac, int): + fi = frac + else: + fi = frac.index + frac_arr = [] + for i in self.intersections: + if i.coord.size == 0: + continue + if i.first.index == fi or i.second.index == fi: + frac_arr.append(i) + return frac_arr + def find_intersections(self, use_orig_points=False): """ @@ -1040,14 +1189,6 @@ def split_intersections(self): if self.verbose > 1: self._verify_fractures_in_plane(all_p, edges, edges_2_frac) - - # With all edges being non-intersecting, the next step is to split the - # fractures into polygons formed of boundary edges, intersection edges - # and auxiliary edges that connect the boundary and intersections. -# all_p, edges, is_boundary_edge, poly_segments, poly_2_frac\ -# = self._split_into_polygons(all_p, edges, edges_2_frac, -# is_boundary_edge) - # Store the full decomposition. self.decomposition = {'points': all_p, 'edges': edges.astype('int'), @@ -1087,6 +1228,22 @@ def split_intersections(self): logger.info('Finished fracture splitting after %.5f seconds', time.time() - start_time) + def _fracs_2_edges(self, edges_2_frac): + """ Invert the mapping between edges and fractures. + + Returns: + List of list: For each fracture, index of all edges that points to + it. + """ + f2e = [] + for fi in len(self._fractures): + f_l = [] + for ei, e in enumerate(edges_2_frac): + if fi in e: + f_l.append(ei) + f2e.append(f_l) + return f2e + def _point_and_edge_lists(self): """ Obtain lists of all points and connections necessary to describe @@ -1122,8 +1279,8 @@ def _point_and_edge_lists(self): # will deal with coinciding points later. for fi, frac in enumerate(self._fractures): num_p = all_p.shape[1] - num_p_loc = frac.orig_p.shape[1] - all_p = np.hstack((all_p, frac.orig_p)) + num_p_loc = frac.p.shape[1] + all_p = np.hstack((all_p, frac.p)) loc_e = num_p + np.vstack((np.arange(num_p_loc), (np.arange(num_p_loc) + 1) % num_p_loc)) @@ -1175,8 +1332,7 @@ def _uniquify_points_and_edges(self, all_p, edges, edges_2_frac, logger.info("""Uniquify points and edges, starting with %i points, %i edges""", all_p.shape[1], edges.shape[1]) - relative = np.amax(np.linalg.norm(all_p, axis=0)) - all_p = cg.snap_to_grid(all_p, tol=self.tol/relative) +# all_p = cg.snap_to_grid(all_p, tol=self.tol) # We now need to find points that occur in multiple places p_unique, unique_ind_p, \ @@ -1301,8 +1457,9 @@ def _remove_edge_intersections(self, all_p, edges, edges_2_frac, # obtain a more robust algorithm. Not sure about how to do this # consistent. p_new, edges_new = cg.remove_edge_crossings(p_2d, edges_2d, - tol=self.tol*5, - verbose=self.verbose) + tol=self.tol, + verbose=self.verbose, + snap=False) # Then, patch things up by converting new points to 3D, # From the design of the functions in cg, we know that new points @@ -1389,329 +1546,6 @@ def _remove_edge_intersections(self, all_p, edges, edges_2_frac, # way that ensures that no polygon lies on both sides of an # intersection edge. - def _split_into_polygons(self, all_p, edges, edges_2_frac, is_boundary_edge): - """ - Split the fracture surfaces into non-intersecting polygons. - - Starting from a description of the fracture polygons and their - intersection lines (which should be non-intersecting), the fractures - are split into sub-polygons which they may share a boundary, but no - sub-polygon intersect the surface of another. This step is necessary - for the fracture network to be ammenable to gmsh. - - The sub-polygons are defined by drawing auxiliary lines between - fracture points. This expands the global set of edges, but not the - points. The auxiliary lines will be sent to gmsh as constraints on the - gridding algorithm. - - TODO: The restriction that the auxiliary lines can only be drawn - between existing lines can create unfortunate constraints in the - gridding, in terms of sharp angles etc. This can be alleviated by - adding additional points to the global description. However, if a point - is added to an intersection edge, this will require an update of all - fractures sharing that intersection. Not sure what the right thing to - do here is. - - Parameters: - all_p (np.ndarray, 3xn): Coordinates of all points used to describe - the fracture polygons, and their intersections. Should be - unique. - edges (np.ndarray, 2xn): Connections between points, formed either - by a fracture boundary, or a fracture intersection. - edges_2_frac (list): For each edge, index of all fractures that - point to the edge. - is_boundary_edge (np.ndarray of bool, size=num_edges): A flag - telling whether the edge is on the boundary of a fracture. - - Returns: - np.ndarray (3xn_pt): Coordinates of all points used to describe the - fractures. - np.ndarray (3xn_edges): Connections between points, formed by - fracture boundaries, fracture intersections, or auxiliary lines - needed to form sub-polygons. The auxiliary lines will be added - towards the end of the array. - np.ndarray (boolean, 3xn_edges_orig): Flag telling if an edge is on - the boundary of a fracture. NOTE: The list is *not* expanded as - auxiliary lines are added; these are known to not be on the - boundary. The legth of this output, relative to the second, can - thus be used to flag auxiliary lines. - list of np.ndarray: Each item contains a sub-polygon, specified by - the global indices of its vertices. - list of int: For each sub-polygon, index of the fracture it forms a - part of. - - Note that for the moment, the first and third outputs are not - modified compared to respective input. This will change if the - splitting is allowed to introduce new additional points. - - """ - logger.info('Split fractures into non-intersecting polygons') - start_time = time.time() - - # For each polygon, list of segments that make up the polygon - poly_segments = [] - # Which fracture is the polygon part of - poly_2_frac = [] - - for fi, _ in enumerate(self._fractures): - - if self.verbose > 1: - print(' Split fracture no ' + str(fi)) - - # Identify the edges associated with this fracture - # It would have been more convenient to use an inverse - # relationship frac_2_edge here, but that would have made the - # update for new edges (towards the end of this loop) more - # cumbersome. - edges_loc_ind = [] - for ei, e in enumerate(edges_2_frac): - if np.any(e == fi): - edges_loc_ind.append(ei) - - edges_loc = np.vstack((edges[:, edges_loc_ind], - np.array(edges_loc_ind))) - p_ind_loc = np.unique(edges_loc[:2]) - p_loc = all_p[:, p_ind_loc] - - p_2d, edges_2d, _, _ = self._points_2_plane(p_loc, edges_loc, - p_ind_loc) - - - # Add a tag to trace the edges during splitting - edges_2d[2] = edges_loc[2] - - # Flag of whether the constraint is an internal boundary (as - # opposed to boundary of the polygon) - internal_boundary = np.where([not is_boundary_edge[i] - for i in edges_2d[2]])[0] - - # Create a dictionary of input information to triangle - triangle_opts = {'vertices': p_2d.transpose(), - 'segments': edges_2d[:2].transpose(), - 'segment_markers': - edges_2d[2].transpose().astype('int32') - } - # Run a constrained Delaunay triangulation. - t = triangle.triangulate(triangle_opts, 'p') - - # Pull out information on the triangulation - segment_markers = \ - np.squeeze(np.asarray(t['segment_markers']).transpose()) - tri = np.sort(np.asarray(t['triangles']).transpose(), axis=0) - p = np.asarray(t['vertices']).transpose() - segments = np.sort(np.asarray(t['segments']).transpose(), axis=0) - - relative = np.amax(np.linalg.norm(p, axis=0)) - p = cg.snap_to_grid(p, tol=self.tol/relative) - - # It turns out that Triangle sometime creates (almost) duplicate - # points. Our algorithm are based on the points staying, thus these - # need to go. - # The problem is characterized by output from Triangle having more - # points than the input. - if p.shape[1] > p_2d.shape[1]: - - # Identify duplicate points - # Not sure about tolerance used here - p, _, old_2_new = \ - setmembership.unique_columns_tol(p, tol=np.sqrt(self.tol)) - # Update segment and triangle coordinates to refer to unique - # points - segments = old_2_new[segments] - tri = old_2_new[tri] - - # Remove segments with the same start and endpoint - segments_remove = np.argwhere(np.diff(segments, axis=0)[0] == 0) - segments = np.delete(segments, segments_remove, axis=1) - segment_markers = np.delete(segment_markers, segments_remove) - - # Remove segments that exist in duplicates (a, b) and (b, a) - segments.sort(axis=0) - segments, new_2_old_seg, _ = \ - setmembership.unique_columns_tol(segments) - segment_markers = segment_markers[new_2_old_seg] - - # Look for degenerate triangles, where the same point occurs - # twice - tri.sort(axis=0) - degenerate_tri = np.any(np.diff(tri, axis=0) == 0, axis=0) - tri = tri[:, np.logical_not(degenerate_tri)] - - # Remove duplicate triangles, if any - tri, _, _ = setmembership.unique_columns_tol(tri) - - - # We need a connection map between cells. The simplest option was - # to construct a simplex grid, and use the associated function. - # While we were at it, we could have used this to find cell - # centers, cell-face (segment) mappings etc, but for reason, that - # did not happen. - g = simplex.TriangleGrid(p, tri) - g.compute_geometry() - c2c = g.cell_connection_map() - - def cells_of_segments(cells, s): - # Find the cells neighboring - hit_0 = np.logical_or(cells[0] == s[0], cells[0] == - s[1]).astype('int') - hit_1 = np.logical_or( - cells[1] == s[0], cells[1] == s[1]).astype('int') - hit_2 = np.logical_or( - cells[2] == s[0], cells[2] == s[1]).astype('int') - hit = np.squeeze(np.where(hit_0 + hit_1 + hit_2 == 2)) - return hit - - px = p[0] - py = p[1] - # Cell centers (in 2d coordinates) - cell_c = np.vstack((np.mean(px[tri], axis=0), - np.mean(py[tri], axis=0))) - - # The target sub-polygons should have intersections as an external - # boundary. The sub-polygons are grown from triangles on each side of - # the intersection, denoted cw and ccw. - cw_cells = [] - ccw_cells = [] - - # For each internal boundary, find the cells on each side of the - # boundary; these will be assigned to different polygons. For the - # moment, there should be a single cell along each side of the - # boundary, but this may change in the future. - - if internal_boundary.size == 0: - # For non-intersecting fractures, we just need a single seed - cw_cells = [0] - else: - # Full treatment - for ib in internal_boundary: - segment_match = np.squeeze(np.where(segment_markers == - edges_2d[2, ib])) - loc_segments = segments[:, segment_match] - loc_cells = cells_of_segments(tri, loc_segments) - - p_0 = p_2d[:, edges_2d[0, ib]] - p_1 = p_2d[:, edges_2d[1, ib]] - - cw_loc = [] - ccw_loc = [] - - for ci in np.atleast_1d(loc_cells): - if cg.is_ccw_polyline(p_0, p_1, cell_c[:, ci]): - ccw_loc.append(ci) - else: - cw_loc.append(ci) - cw_cells.append(cw_loc) - ccw_cells.append(ccw_loc) - - polygon = np.zeros(tri.shape[1], dtype='int') - counter = 1 - for c in cw_cells + ccw_cells: - polygon[c] = counter - counter += 1 - - # A cell in the partitioning may border more than one internal - # constraint, and thus be assigned two values (one overwritten) in - # the above loop. This should be fine; the corresponding index - # will have no cells assigned to it, but that is taken care of - # below. - - num_iter = 0 - - # From the seeds, assign a polygon index to all triangles - while np.any(polygon == 0): - - num_occurences = np.bincount(polygon) - # Disregard the 0 (which we know is there, since the while loop - # took another round). - order = np.argsort(num_occurences[1:]) - for pi in order: - vec = 0 * polygon - # We know that the values in polygon are mapped to - vec[np.where(polygon == pi + 1)] = 1 - new_val = c2c * vec - not_found = np.where(np.logical_and(polygon == 0, - new_val > 0)) - polygon[not_found] = pi + 1 - - num_iter += 1 - if num_iter >= tri.shape[1]: - # Sanity check, since we're in a while loop. The code - # should terminate long before this, but I have not - # tried to find an exact maximum number of iterations. - raise ValueError('This must be too many iterations') - - logger.debug('Split fracture %i into %i polygons', fi, - np.unique(polygon).size) - - # For each cell, find its boundary - for poly in np.unique(polygon): - tri_loc = tri[:, np.squeeze(np.where(polygon == poly))] - # Special case of a single triangle - if tri_loc.size == 3: - tri_loc = tri_loc.reshape((-1, 1)) - - seg_loc = np.sort(np.hstack((tri_loc[:2], - tri_loc[1:], - tri_loc[::2])), axis=0) - # Really need something like accumarray here.. - unique_s, _, all_2_unique = setmembership.unique_columns_tol( - seg_loc) - bound_segment_ind = np.squeeze(np.where(np.bincount(all_2_unique) - == 1)) - bound_segment = unique_s[:, bound_segment_ind] - boundary = sort_points.sort_point_pairs(bound_segment) - - # Ensure that the boundary is stored ccw - b_points = p_2d[:, boundary[:, 0]] - if not cg.is_ccw_polygon(b_points): - boundary = boundary[::-1] - - # The boundary is expressed in terms of the local point - # numbering. Convert to global numbering. - boundary_glob = p_ind_loc[boundary].astype('int') - poly_segments.append(boundary_glob) - poly_2_frac.append(fi) - # End of processing this sub-polygon - - # End of processing this fracture. - - # We have now split all fractures into non-intersecting polygons. The - # polygon boundary consists of fracture boundaries, fracture - # intersections and auxiliary edges. The latter must be identified, and - # added to the global edge list. - all_edges = np.empty((2, 0), dtype='int') - for p in poly_segments: - all_edges = np.hstack((all_edges, p)) - - # Find edges of the polygons that can be found in the list of existing - # edges - edge_exist, _ = setmembership.ismember_rows(all_edges, edges) - # Also check with the reverse ordering of edge points - edge_exist_reverse, _ = setmembership.ismember_rows(all_edges[::-1], - edges) - # The edge exists if it is found of the orderings - edge_exist = np.logical_or(edge_exist, edge_exist_reverse) - # Where in the list of all edges are the new ones located? - new_edge_ind = np.where([~i for i in edge_exist])[0] - new_edges = all_edges[:, new_edge_ind] - - # If we have found new edges (will most likely happen if there are - # intersecting fractures in the network), these should be added to the - # edge list, in a unique representation. - if new_edges.size > 0: - # Sort the new edges to root out the same edge being found twice - # from different directions - new_edges = np.sort(new_edges, axis=0) - # Find unique representation - unique_new_edges, _, _\ - = setmembership.unique_columns_tol(new_edges) - edges = np.hstack((edges, unique_new_edges)) - - logger.info('Fracture splitting done. Elapsed time %.5f', time.time() - - start_time) - - return all_p, edges, is_boundary_edge, poly_segments, poly_2_frac - def report_on_decomposition(self, do_print=True, verbose=None): """ @@ -1849,8 +1683,9 @@ def _points_2_plane(self, p_loc, edges_loc, p_ind_loc): rot = cg.project_plane_matrix(p_loc, tol=self.tol) p_2d = rot.dot(p_loc) - assert np.amax(np.abs(p_2d[2]))/np.amax(np.abs(p_2d[:2])) < \ - 2*self.tol * np.sqrt(3) + extent = p_2d.max(axis=1) - p_2d.min(axis=1) + lateral_extent = np.maximum(np.max(extent[2]), 1) + assert extent[2] < lateral_extent * self.tol * 30 # Dump third coordinate p_2d = p_2d[:2] @@ -1885,41 +1720,14 @@ def plot_fractures(self, ind=None): f.plot_frame(ax) return fig - def compute_distances(self): - nf = len(self._fractures) - - # Distance between vertexes - p_2_p = np.zeros((nf, nf)) - # Distance between segments - s_2_s = np.zeros((nf, nf)) - - for i, f_1 in enumerate(self._fractures): - for j, f_2 in enumerate(self._fractures[i + 1:]): - d = np.Inf - for p_1 in f_1.points(): - for p_2 in f_2.points(): - d = np.minimum(d, cg.dist_point_pointset(p_1, p_2)[0]) - p_2_p[i, i + j + 1] = d - - d = np.Inf - for s_1 in f_1.segments(): - for s_2 in f_2.segments(): - d = np.minimum(d, - cg.distance_segment_segment(s_1[:, 0], - s_1[:, 1], - s_2[:, 0], - s_2[:, 1])) - s_2_s[i, i + j + 1] = d - return p_2_p, s_2_s - - - def impose_external_boundary(self, box, truncate_fractures=True): + def impose_external_boundary(self, box=None, truncate_fractures=True): """ Set an external boundary for the fracture set. The boundary takes the form of a 3D box, described by its minimum and - maximum coordinates. + maximum coordinates. If no bounding box is provided, a box will be + fited outside the fracture network. If desired, the fratures will be truncated to lay within the bounding box; that is, Fracture.p will be modified. The orginal coordinates of @@ -1937,6 +1745,20 @@ def impose_external_boundary(self, box, truncate_fractures=True): boundary will be truncated. """ + if box is None: + OVERLAP = 0.05 + cmin = np.ones((3, 1)) * float('inf') + cmax = -np.ones((3, 1)) * float('inf') + for f in self._fractures: + cmin = np.min(np.hstack((cmin, f.p)), axis=1).reshape((-1, 1)) + cmax = np.max(np.hstack((cmax, f.p)), axis=1).reshape((-1, 1)) + cmin = cmin[:, 0] + cmax = cmax[:, 0] + dx = OVERLAP * (cmax - cmin) + box = {'xmin': cmin[0] - dx[0], 'xmax': cmax[0] + dx[0], + 'ymin': cmin[1] - dx[1], 'ymax': cmax[1] + dx[1], + 'zmin': cmin[2] - dx[2], 'zmax': cmax[2] + dx[2]} + # Insert boundary in the form of a box, and kick out (parts of) # fractures outside the box self.domain = box @@ -2075,7 +1897,7 @@ def _determine_mesh_size(self, **kwargs): See the gmsh manual for further details. """ - mode = kwargs.get('mode', 'constant') + mode = kwargs.get('mode', 'distance') num_pts = self.decomposition['points'].shape[1] @@ -2091,13 +1913,139 @@ def _determine_mesh_size(self, **kwargs): else: mesh_size_bound = None return mesh_size, mesh_size_bound + elif mode == 'distance': + if self.h_min is None or self.h_ideal is None: + print('Found no information on mesh sizes. Returning') + return None, None + + p = self.decomposition['points'] + num_pts = p.shape[1] + dist = cg.dist_pointset(p, max_diag=True) + mesh_size = np.min(dist, axis=1) + print('Minimal distance between points encountered is ' + str(np.min(dist))) + mesh_size = np.maximum(mesh_size, self.h_min * np.ones(num_pts)) + mesh_size = np.minimum(mesh_size, self.h_ideal * np.ones(num_pts)) + + mesh_size_bound = self.h_ideal + return mesh_size, mesh_size_bound else: raise ValueError('Unknown mesh size mode ' + mode) - def distance_point_segment(): + def insert_auxiliary_points(self, h_ideal=None, h_min=None): + """ Insert auxiliary points on fracture edges. Used to guide gmsh mesh + size parameters. + + The function should only be called once to avoid insertion of multiple + layers of extra points, this will likely kill gmsh. + + The function is motivated by similar functionality for 2d domains, but + is considerably less mature. + + The function should be used in conjunction with _determine_mesh_size(), + called with mode='distance'. The ultimate goal is to set the mesh size + for geometrical points in Gmsh. To that end, this function inserts + additional points on the fracture boundaries. The mesh size is then + determined as the distance between all points in the fracture + description. + + Parameters: + h_ideal: Ideal mesh size. Will be added to all points that are + sufficiently far away from other points. + h_min: Minimal mesh size; we will make no attempts to enforce + even smaller mesh sizes upon Gmsh. + + """ + + + if self.auxiliary_points_added: + print('Auxiliary points already added. Returning.') + else: + self.auxiliary_points_added = True + + self.h_ideal = h_ideal + self.h_min = h_min + + def dist_p(a, b): + a = a.reshape((-1, 1)) + b = b.reshape((-1, 1)) + d = b - a + return np.sqrt(np.sum(d**2)) + + intersecting_fracs = [] + # Loop over all fractures + for f in self._fractures: + + # First compare segments with intersections to this fracture + nfp = f.p.shape[1] + + # Keep track of which other fractures are intersecting - will be + # needed later on + isect_f = [] + for i in self.intersections_of_fracture(f): + if f is i.first: + isect_f.append(i.second.index) + else: + isect_f.append(i.first.index) + + # Assuming the fracture is convex, the closest point for + dist, cp = cg.dist_points_segments(i.coord, f.p, + np.roll(f.p, 1, axis=1)) + # Insert a (candidate) point only at the segment closest to the + # intersection point. If the intersection line runs parallel + # with a segment, this may be insufficient, but we will deal + # with this if necessary. + closest_segment = np.argmin(dist, axis=1) + min_dist = dist[np.arange(i.coord.shape[1]), closest_segment] + + for pi, (si, di) in enumerate(zip(closest_segment, min_dist)): + if di < h_ideal: + d_1 = dist_p(cp[pi, si], f.p[:, si]) + d_2 = dist_p(cp[pi, si], f.p[:, (si+1)%nfp]) + # If the intersection point is not very close to any of + # the points on the segment, we split the segment. + if d_1 > h_min and d_2 > h_min: + np.insert(f.p, (si+1)%nfp, cp[pi, si], axis=1) + + # Take note of the intersecting fractures + intersecting_fracs.append(isect_f) + + for fi, f in enumerate(self._fractures): + nfp = f.p.shape[1] + for of in self._fractures: + # Can do some box arguments here to avoid computations + + # First, check if we are intersecting, this is covered already + if of.index in intersecting_fracs[fi]: + continue + + f_start = f.p + f_end = np.roll(f_start, 1, axis=1) + # Then, compare distance between segments + # Loop over fracture segments of this fracture + for si, fs in enumerate(f.segments()): + of_start = of.p + of_end = np.roll(of_start, 1, axis=1) + # Compute distance to all fractures of the other fracture, + # and find the closest segment on the other fracture + fs = f_start[:, si].squeeze()#.reshape((-1, 1)) + fe = f_end[:, si].squeeze()#.reshape((-1, 1)) + d, cp_f, _ = cg.dist_segment_segment_set(fs, fe, of_start, + of_end) + mi = np.argmin(d) + # If the distance is smaller than ideal length, but the + # closets point is not too close to the segment endpoints, + # we add a new point + if d[mi] < h_ideal: + d_1 = dist_p(cp_f[:, mi], f.p[:, si]) + d_2 = dist_p(cp_f[:, mi], f.p[:, (si+1)%nfp]) + if d_1 > h_min and d_2 > h_min: + np.insert(f.p, (si+1)%nfp, cp_f[:, mi], axis=1) + + + def distance_point_segment(self): pass - def distance_segment_segment(): + def distance_segment_segment(self): # What happens if two segments are close? pass @@ -2198,12 +2146,13 @@ def to_gmsh(self, file_name, in_3d=True, **kwargs): self.zero_d_pt = intersection_points if 'mesh_size' in kwargs.keys(): + # Legacy option, this should be removed. + print('Using old version of mesh size determination') mesh_size, mesh_size_bound = \ determine_mesh_size(self.decomposition['points'], **kwargs['mesh_size']) else: - mesh_size = None - mesh_size_bound = None + mesh_size, mesh_size_bound = self._determine_mesh_size() # The tolerance applied in gmsh should be consistent with the tolerance # used in the splitting of the fracture network. The documentation of diff --git a/src/porepy/fracs/importer.py b/src/porepy/fracs/importer.py index 25f076b8e0..e9b56035e0 100644 --- a/src/porepy/fracs/importer.py +++ b/src/porepy/fracs/importer.py @@ -41,7 +41,7 @@ def from_csv(f_name, mesh_kwargs, domain=None, pause=False,\ # Define the domain as bounding-box if not defined if domain is None: overlap = kwargs.get('domain_overlap', 0) - domain = _bounding_box(pts, overlap) + domain = bounding_box(pts, overlap) if return_domain: return meshing.simplex_grid(f_set, domain, **mesh_kwargs), domain @@ -472,7 +472,7 @@ def _dfn_grid_1d(grid_2d, nodes_id): #------------------------------------------------------------------------------# -def _bounding_box(pts, overlap=0): +def bounding_box(pts, overlap=0): """ Obtain a bounding box for a point cloud. Parameters: diff --git a/src/porepy/fracs/meshing.py b/src/porepy/fracs/meshing.py index e3f4879f46..ece4254279 100644 --- a/src/porepy/fracs/meshing.py +++ b/src/porepy/fracs/meshing.py @@ -9,6 +9,7 @@ import numpy as np import scipy.sparse as sps import warnings +import time from porepy.fracs import structured, simplex, split_grid from porepy.fracs.fractures import Intersection @@ -19,7 +20,7 @@ from porepy.utils import setmembership, mcolon -def simplex_grid(fracs, domain, **kwargs): +def simplex_grid(fracs=None, domain=None, network=None, verbose=0, **kwargs): """ Main function for grid generation. Creates a fractured simiplex grid in 2 or 3 dimensions. @@ -64,15 +65,23 @@ def simplex_grid(fracs, domain, **kwargs): gb = simplex_grid(fracs, domain) """ - if 'zmax' in domain: + if domain is None: + ndim = 3 + elif 'zmax' in domain: ndim = 3 elif 'ymax' in domain: ndim = 2 else: raise ValueError('simplex_grid only supported for 2 or 3 dimensions') + if verbose > 0: + print('Construct mesh') + tm_msh = time.time() + tm_tot = time.time() # Call relevant method, depending on grid dimensions. if ndim == 2: + assert fracs is not None, '2d requires definition of fractures' + assert domain is not None, '2d requires definition of domain' # Convert the fracture to a fracture dictionary. if len(fracs) == 0: f_lines = np.zeros((2, 0)) @@ -83,18 +92,42 @@ def simplex_grid(fracs, domain, **kwargs): frac_dic = {'points': f_pts, 'edges': f_lines} grids = simplex.triangle_grid(frac_dic, domain, **kwargs) elif ndim == 3: - grids = simplex.tetrahedral_grid(fracs, domain, **kwargs) + grids = simplex.tetrahedral_grid(fracs, domain, network, **kwargs) else: raise ValueError('Only support for 2 and 3 dimensions') + + if verbose > 0: + print('Done. Elapsed time ' + str(time.time() - tm_msh)) + # Tag tip faces tag_faces(grids) # Assemble grids in a bucket + + if verbose > 0: + print('Assemble in bucket') + tm_bucket = time.time() gb = assemble_in_bucket(grids, **kwargs) + if verbose > 0: + print('Done. Elapsed time ' + str(time.time() - tm_bucket)) + print('Compute geometry') + tm_geom = time.time() + gb.compute_geometry() # Split the grids. + if verbose > 0: + print('Done. Elapsed time ' + str(time.time() - tm_geom)) + print('Split fractures') + tm_split = time.time() split_grid.split_fractures(gb, **kwargs) + if verbose > 0: + print('Done. Elapsed time ' + str(time.time() - tm_split)) gb.assign_node_ordering() + + if verbose > 0: + print('Mesh construction completed. Total time ' + + str(time.time() - tm_tot)) + return gb #------------------------------------------------------------------------------# @@ -406,7 +439,7 @@ def assemble_in_bucket(grids, **kwargs): cell_2_face, cell = obtain_interdim_mappings( lg, fn, n_per_face, **kwargs) face_cells = sps.csc_matrix( - (np.array([True] * cell.size), (cell, cell_2_face)), + (np.ones(cell.size, dtype=bool), (cell, cell_2_face)), (lg.num_cells, hg.num_faces)) # This if may be unnecessary, but better safe than sorry. @@ -451,11 +484,11 @@ def obtain_interdim_mappings(lg, fn, n_per_face, if not (np.all(is_mem) or np.all(~is_mem)): if ensure_matching_face_cell: raise ValueError( - '''Either all cells should have a corresponding face in a higher + '''Either all cells should have a corresponding face in a higher dim grid or no cells should have a corresponding face in a higher dim grid. This likely is related to gmsh behavior. ''') else: warnings.warn('''Found inconsistency between cells and higher - dimensional faces. Continuing, faces crossed''') + dimensional faces. Continuing, fingers crossed''') low_dim_cell = np.where(is_mem)[0] return cell_2_face, low_dim_cell diff --git a/src/porepy/fracs/simplex.py b/src/porepy/fracs/simplex.py index 6a88671780..018b77aac0 100644 --- a/src/porepy/fracs/simplex.py +++ b/src/porepy/fracs/simplex.py @@ -7,12 +7,12 @@ import numpy as np from meshio import gmsh_io -import porepy from porepy.grids import constants from porepy.grids.gmsh import gmsh_interface, mesh_2_grid from porepy.fracs import fractures, utils import porepy.utils.comp_geom as cg from porepy.utils.setmembership import unique_columns_tol +import porepy def tetrahedral_grid(fracs=None, box=None, network=None, **kwargs): @@ -74,21 +74,28 @@ def tetrahedral_grid(fracs=None, box=None, network=None, **kwargs): frac_list = [] for f in fracs: - if isinstance(f, fractures.Fracture) \ - or isinstance(f, porepy.Fracture): + # Input can be either numpy arrays or predifined fractures. As a + # guide, we treat f as a fracture if it has an attribute p which is + # a numpy array. + # If f turns out not to be a fracture, strange errors will result + # as the further program tries to access non-existing methods. + # The correct treatment here would be several + # isinstance-statements, but that became less than elegant. To + # revisit. + if hasattr(f, 'p') and isinstance(f.p, np.ndarray): + # Convert the fractures from numpy representation to our 3D + # fracture data structure. frac_list.append(f) else: - # Convert the fractures from numpy representation to our 3D - # fracture data structure.. frac_list.append(fractures.Fracture(f)) # Combine the fractures into a network network = fractures.FractureNetwork(frac_list, verbose=verbose, tol=kwargs.get('tol', 1e-4)) - # Impose domain boundary. - if box is not None: - network.impose_external_boundary(box) + # Impose external boundary. If box is None, a domain size somewhat larger + # than the network will be assigned. + network.impose_external_boundary(box) # Find intersections and split them, preparing the way for dumping the # network to gmsh @@ -97,8 +104,42 @@ def tetrahedral_grid(fracs=None, box=None, network=None, **kwargs): else: print('Use existing intersections') - start_time = time.time() - pts, cells, cell_info, phys_names = _run_gmsh(file_name, network, **kwargs) + # If fields h_ideal and h_min are provided, try to estimate mesh sizes. + h_ideal = kwargs.get('h_ideal', None) + h_min = kwargs.get('h_min', None) + if h_ideal is not None and h_min is not None: + network.insert_auxiliary_points(h_ideal, h_min) + # In this case we need to recompute intersection decomposition anyhow. + network.split_intersections() + + if not hasattr(network, 'decomposition'): + network.split_intersections() + else: + print('Use existing decomposition') + + in_file = file_name + '.geo' + out_file = file_name + '.msh' + + network.to_gmsh(in_file, **kwargs) + + gmsh_opts = kwargs.get('gmsh_opts', {}) + gmsh_verbose = kwargs.get('gmsh_verbose', verbose) + gmsh_opts['-v'] = gmsh_verbose + gmsh_status = gmsh_interface.run_gmsh(in_file, out_file, dims=3, + **gmsh_opts) + + if verbose > 0: + start_time = time.time() + if gmsh_status == 0: + print('Gmsh processed file successfully') + else: + print('Gmsh failed with status ' + str(gmsh_status)) + + pts, cells, _, cell_info, phys_names = gmsh_io.read(out_file) + + # Invert phys_names dictionary to map from physical tags to corresponding + # physical names + phys_names = {v: k for k, v in phys_names.items()} # Call upon helper functions to create grids in various dimensions. # The constructors require somewhat different information, reflecting the diff --git a/src/porepy/fracs/split_grid.py b/src/porepy/fracs/split_grid.py index 8d10a590ea..7fdad2d9bc 100644 --- a/src/porepy/fracs/split_grid.py +++ b/src/porepy/fracs/split_grid.py @@ -7,7 +7,7 @@ import warnings from porepy.utils.half_space import half_space_int -from porepy.utils import sparse_mat +from porepy.utils import sparse_mat, setmembership from porepy.utils.graph import Graph from porepy.utils.mcolon import mcolon from porepy.grids.grid import Grid, FaceTag @@ -172,7 +172,7 @@ def split_nodes(gh, gl, gh_2_gl_nodes, offset=0): node_count = duplicate_nodes(gh, nodes, offset) # We remove the old nodes. - #gh = remove_nodes(gh, nodes) + # gh = remove_nodes(gh, nodes) # Update the number of nodes gh.num_nodes = gh.num_nodes + node_count # - nodes.size @@ -224,9 +224,9 @@ def duplicate_faces(gh, face_cells): node_start = gh.face_nodes.indptr[frac_id] node_end = gh.face_nodes.indptr[frac_id + 1] - #frac_nodes = gh.face_nodes[:, frac_id] + # frac_nodes = gh.face_nodes[:, frac_id] - #gh.face_nodes = sps.hstack((gh.face_nodes, frac_nodes)) + # gh.face_nodes = sps.hstack((gh.face_nodes, frac_nodes)) # We also copy the attributes of the original faces. gh.num_faces += frac_id.size gh.face_normals = np.hstack( @@ -253,12 +253,29 @@ def update_face_cells(face_cells, face_id, i): # The duplications should also be associated with grid i. # For the other lower-dim grids we just add zeros to conserve # the right matrix dimensions. + if face_id.size == 0: + return face_cells + for j, f_c in enumerate(face_cells): + assert f_c.getformat() == 'csc' if j == i: - f_c = sps.hstack((f_c, f_c[:, face_id])) + f_c_sliced = sparse_mat.slice_mat(f_c, face_id) + new_indptr = f_c_sliced.indptr + f_c.indptr[-1] + new_ind = f_c_sliced.indices + new_data = f_c_sliced.data + + f_c.indptr = np.append(f_c.indptr, new_indptr[1:]) + f_c.indices = np.append(f_c.indices, new_ind) + f_c.data = np.append(f_c.data, new_data) + f_c._shape = (f_c._shape[0], f_c._shape[1] + face_id.size) + #f_c = sps.hstack((f_c, f_c[:, face_id])) else: - empty = sps.csc_matrix((f_c.shape[0], face_id.size)) - f_c = sps.hstack((f_c, empty)) + new_indptr = f_c.indptr[-1] * \ + np.ones(face_id.size, dtype=f_c.indptr.dtype) + f_c.indptr = np.append(f_c.indptr, new_indptr) + f_c._shape = (f_c._shape[0], f_c._shape[1] + face_id.size) +# empty = sps.csc_matrix((f_c.shape[0], face_id.size)) +# f_c = sps.hstack((f_c, empty)) face_cells[j] = f_c return face_cells @@ -283,6 +300,7 @@ def update_cell_connectivity(g, face_id, normal, x0): """ # We find the cells attached to the tagged faces. + g.cell_faces = g.cell_faces.tocsr() cell_frac = g.cell_faces[face_id, :] cell_face_id = np.argwhere(cell_frac) @@ -311,7 +329,7 @@ def update_cell_connectivity(g, face_id, normal, x0): row = cell_face_id[left_cell, 0] data = np.ravel(g.cell_faces[np.ravel(face_id[row]), col]) assert data.size == face_id.size - cell_frac_left = sps.csc_matrix((data, (row, col)), + cell_frac_left = sps.csr_matrix((data, (row, col)), (face_id.size, g.cell_faces.shape[1])) # We now update the cell_faces map of the faces on the right side of @@ -321,15 +339,21 @@ def update_cell_connectivity(g, face_id, normal, x0): col = cell_face_id[~left_cell, 1] row = cell_face_id[~left_cell, 0] data = np.ravel(g.cell_faces[np.ravel(face_id[row]), col]) - cell_frac_right = sps.csc_matrix((data, (row, col)), + cell_frac_right = sps.csr_matrix((data, (row, col)), (face_id.size, g.cell_faces.shape[1])) - g.cell_faces[face_id, :] = cell_frac_right + + assert g.cell_faces.getformat() == 'csr' + + sparse_mat.merge_matrices(g.cell_faces, cell_frac_right, face_id) + # g.cell_faces[face_id, :] = cell_frac_right # And then we add the new left-faces to the cell_face map. We do not # change the sign of the matrix since we did not flip the normals. # This means that the normals of right and left cells point in the same # direction, but their cell_faces values have oposite signs. - g.cell_faces = sps.vstack((g.cell_faces, cell_frac_left), format='csc') + sparse_mat.stack_mat(g.cell_faces, cell_frac_left) + g.cell_faces = g.cell_faces.tocsc() + #g.cell_faces = sps.vstack((g.cell_faces, cell_frac_left), format='csc') return 0 @@ -387,16 +411,17 @@ def duplicate_nodes(g, nodes, offset): _, iv = sort_sub_list(g.face_nodes.indices, g.face_nodes.indptr) g.face_nodes = g.face_nodes.tocsr() - # Iterate over each internal node and split it according to the graph. # For each cell attached to the node, we check wich color the cell has. # All cells with the same color is then attached to a new copy of the # node. + cell_nodes = g.cell_nodes().tocsr() for node in nodes: # t_node takes into account the added nodes. t_node = node + node_count # Find cells connected to node - cells = sparse_mat.slice_indices(g.cell_nodes().tocsr(), t_node) + + cells = sparse_mat.slice_indices(cell_nodes, node) # cell_nodes = g.cell_nodes().tocsr() # ind_ptr = cell_nodes.indptr # cells = cell_nodes.indices[ @@ -414,21 +439,27 @@ def duplicate_nodes(g, nodes, offset): new_nodes = np.repeat(g.nodes[:, t_node, None], colors.size, axis=1) faces = np.array([], dtype=int) face_pos = np.array([g.face_nodes.indptr[t_node]]) + assert g.cell_faces.getformat() == 'csc' + assert g.face_nodes.getformat() == 'csr' + faces_of_node_t = sparse_mat.slice_indices(g.face_nodes, t_node) for j in range(colors.size): # For each color we wish to add one node. First we find all faces that - # are connected to the fracture node, and have the correct cell + # are connected to the fracture node and have the correct cell # color - local_faces = (g.cell_faces[:, cells[ix == j]]).nonzero()[0] - local_faces = np.unique(local_faces) - con_to_node = np.ravel(g.face_nodes[t_node, local_faces].todense()) - faces = np.append(faces, local_faces[con_to_node]) - # These faces is then attached to new node number j. - face_pos = np.append(face_pos, face_pos[-1] + np.sum(con_to_node)) + colored_faces = sparse_mat.slice_indices( + g.cell_faces, cells[ix == j]) + colored_faces = np.unique(colored_faces) + is_colored = np.in1d( + faces_of_node_t, colored_faces, assume_unique=True) + + faces = np.append(faces, faces_of_node_t[is_colored]) + # These faces are then attached to new node number j. + face_pos = np.append(face_pos, face_pos[-1] + np.sum(is_colored)) # If an offset is given, we will change the position of the nodes. # We move the nodes a length of offset away from the fracture(s). if offset > 0 and colors.size > 1: new_nodes[:, j] -= avg_normal(g, - local_faces[con_to_node]) * offset + faces_of_node_t[is_colored]) * offset # The total number of faces should not have changed, only their # connection to nodes. We can therefore just update the indices and # indptr map. @@ -454,7 +485,7 @@ def duplicate_nodes(g, nodes, offset): def sort_sub_list(indices, indptr): ix = np.zeros(indices.size, dtype=int) for i in range(indptr.size - 1): - sub_ind = mcolon(indptr[i], indptr[i + 1]) + sub_ind = slice(indptr[i], indptr[i + 1]) loc_ix = np.argsort(indices[sub_ind]) ix[sub_ind] = loc_ix + indptr[i] indices = indices[ix] @@ -483,13 +514,16 @@ def find_cell_color(g, cells): """ c = np.sort(cells) # Local cell-face and face-node maps. - cf_sub, _ = __extract_submatrix(g.cell_faces, c) - child_cell_ind = np.array([-1] * g.num_cells, dtype=np.int) - child_cell_ind[c] = np.arange(cf_sub.shape[1]) + assert g.cell_faces.getformat() == 'csc' + cell_faces = sparse_mat.slice_mat(g.cell_faces, c) + child_cell_ind = -np.ones(g.num_cells, dtype=np.int) + child_cell_ind[c] = np.arange(cell_faces.shape[1]) # Create a copy of the cell-face relation, so that we can modify it at # will - cell_faces = cf_sub.copy() + # com Runar: I don't think this is neccessary as slice_mat creates a copy +# cell_faces = cf_sub.copy() + # Direction of normal vector does not matter here, only 0s and 1s cell_faces.data = np.abs(cell_faces.data) @@ -536,17 +570,3 @@ def remove_nodes(g, rem): g.face_nodes = g.face_nodes[rows_to_keep, :] g.nodes = g.nodes[:, rows_to_keep] return g - - -def __extract_submatrix(mat, ind): - """ From a matrix, extract the column specified by ind. All zero columns - are stripped from the sub-matrix. Mappings from global to local row numbers - are also returned. - """ - sub_mat = mat[:, ind] - cols = sub_mat.indptr - rows = sub_mat.indices - data = sub_mat.data - unique_rows, rows_sub = np.unique(sub_mat.indices, - return_inverse=True) - return sps.csc_matrix((data, rows_sub, cols)), unique_rows diff --git a/src/porepy/grids/gmsh/gmsh_interface.py b/src/porepy/grids/gmsh/gmsh_interface.py index b77573aef8..6eb3d00b63 100644 --- a/src/porepy/grids/gmsh/gmsh_interface.py +++ b/src/porepy/grids/gmsh/gmsh_interface.py @@ -41,6 +41,9 @@ def __init__(self, pts, lines, polygons=None, domain=None, nd=None, self.domain = domain + self.mesh_size = mesh_size + self.mesh_size_bound = mesh_size_bound + # Points that should be decleared physical (intersections between 3 # fractures) self.intersection_points = intersection_points @@ -153,10 +156,10 @@ def __write_boundary_3d(self): zmax = str(self.domain['zmax']) # + ', ' # Add mesh size on boundary points if these are provided - if self.lchar_bound is not None: + if self.mesh_size_bound is not None: zmin += ', ' zmax += ', ' - h = str(self.lchar_bound) + '};' + h = str(self.mesh_size_bound) + '};' else: h = '};' ls = '\n' @@ -208,8 +211,8 @@ def __write_points(self): s += 'p' + str(i) + ' = newp; Point(p' + str(i) + ') = ' s += '{' + str(p[0, i]) + ', ' + str(p[1, i]) + ', '\ + str(p[2, i]) - if self.lchar is not None: - s += ', ' + str(self.lchar[i]) + ' };\n' + if self.mesh_size is not None: + s += ', ' + str(self.mesh_size[i]) + ' };\n' else: s += '};\n' diff --git a/src/porepy/grids/grid.py b/src/porepy/grids/grid.py index e55c5f00e7..c0d9a47b4b 100644 --- a/src/porepy/grids/grid.py +++ b/src/porepy/grids/grid.py @@ -700,6 +700,32 @@ def bounding_box(self): """ return np.amin(self.nodes, axis=1), np.amax(self.nodes, axis=1) + def closest_cell(self, p): + """ For a set of points, find closest cell by cell center. + + Parameters: + p (np.ndarray, 3xn): Point coordinates. If p.shape[0] < 3, + additional points will be treated as zeros. + + Returns: + np.ndarray of ints: For each point, index of the cell with center + closest to the point. + """ + dim_p = p.shape[0] + if p.shape[0] < 3: + z = np.zeros((3 - p.shape[0], p.shape[1])) + p = np.vstack((p, z)) + + def min_dist(pts): + c = self.cell_centers + d = np.sum(np.power(c - pts, 2), axis=0) + return np.argmin(d) + + ci = np.empty(p.shape[1], dtype=np.int) + for i in range(p.shape[1]): + ci[i] = min_dist(p[:, i].reshape((3, -1))) + return ci + def add_face_tag(self, f, tag): self.face_tags[f] = np.bitwise_or(self.face_tags[f], tag) diff --git a/src/porepy/numerics/fv/biot.py b/src/porepy/numerics/fv/biot.py index 31a0237b70..edaa3d83d5 100644 --- a/src/porepy/numerics/fv/biot.py +++ b/src/porepy/numerics/fv/biot.py @@ -496,7 +496,7 @@ def slv(b): x = la.spsolve(A, b) return x elif solver == 'factorized': - slv = la.factorized(A.to_csc()) + slv = la.factorized(A.tocsc()) else: raise ValueError('Unknown solver ' + solver) @@ -587,9 +587,10 @@ def compute_stress(self, g, u, data): all stress values on the first face, then the second etc. """ + param = data['param'] stress_discr = data['stress'] bound_stress = data['bound_stress'] - bound_val = data['bc_val_mech'] + bound_val = param.get_bc_val_mechanics() d = self.extractD(g, u, as_vector=True) stress = np.squeeze(stress_discr * d) + (bound_stress * bound_val) return stress diff --git a/src/porepy/params/data.py b/src/porepy/params/data.py index 41e0e6ce22..8d5999ea70 100644 --- a/src/porepy/params/data.py +++ b/src/porepy/params/data.py @@ -478,7 +478,7 @@ def get_bc_transport(self): else: return BoundaryCondition(self.g) - conductivity = property(get_conductivity) + bc_transport = property(get_bc_transport) def get_bc_mechanics(self): """ Stiffness matrix, defined as fourth order tensor @@ -488,7 +488,7 @@ def get_bc_mechanics(self): else: return BoundaryCondition(self.g) - stiffness = property(get_stiffness) + bc_mechanics = property(get_bc_mechanics) # Boundary value diff --git a/src/porepy/utils/comp_geom.py b/src/porepy/utils/comp_geom.py index 75e466741c..2e9d6d153a 100644 --- a/src/porepy/utils/comp_geom.py +++ b/src/porepy/utils/comp_geom.py @@ -384,7 +384,7 @@ def _split_edge(vertices, edges, edge_ind, new_pt, **kwargs): #------------------------------------------------------**kwargs------------------------# -def _add_point(vertices, pt, tol=1e-3, **kwargs): +def _add_point(vertices, pt, tol=1e-3, snap=True, **kwargs): """ Add a point to a point set, unless the point already exist in the set. @@ -414,8 +414,9 @@ def _add_point(vertices, pt, tol=1e-3, **kwargs): nd = vertices.shape[0] # Before comparing coordinates, snap both existing and new point to the # underlying grid - vertices = snap_to_grid(vertices, **kwargs) - pt = snap_to_grid(pt, **kwargs) + if snap: + vertices = snap_to_grid(vertices, **kwargs) + pt = snap_to_grid(pt, **kwargs) new_pt = np.empty((nd, 0)) ind = [] @@ -442,7 +443,8 @@ def _add_point(vertices, pt, tol=1e-3, **kwargs): #-----------------------------------------------------------------------------# -def remove_edge_crossings(vertices, edges, tol=1e-3, verbose=0, **kwargs): +def remove_edge_crossings(vertices, edges, tol=1e-3, verbose=0, snap=True, + **kwargs): """ Process a set of points and connections between them so that the result is an extended point set and new connections that do not intersect. @@ -491,8 +493,9 @@ def remove_edge_crossings(vertices, edges, tol=1e-3, verbose=0, **kwargs): # Add tolerance to kwargs, this is later passed to split_edges, and further # on. kwargs['tol'] = tol - - vertices = snap_to_grid(vertices, **kwargs) + kwargs['snap'] = snap + if snap: + vertices = snap_to_grid(vertices, **kwargs) # Field used for debugging of edge splits. To see the meaning of the values # of each split, look in the source code of split_edges. @@ -621,7 +624,8 @@ def __min_dist(p): if new_pt is None: logger.debug('No intersection found') else: - new_pt = snap_to_grid(new_pt, tol=tol) + if snap: + new_pt = snap_to_grid(new_pt, tol=tol) # The case of segment intersections need special treatment. if new_pt.shape[-1] == 1: logger.debug('Found intersection (%.5f, %.5f)', new_pt[0], @@ -785,8 +789,7 @@ def is_ccw_polyline(p1, p2, p3, tol=0, default=False): if np.abs(cross_product) <= tol: return default - - return cross_product > 0 + return cross_product > -tol #----------------------------------------------------------------------------- @@ -815,6 +818,14 @@ def is_inside_polygon(poly, p, tol=0, default=False): else: pt = p + # The test uses is_ccw_polyline, and tacitly assumes that the polygon + # vertexes is sorted in a ccw fashion. If this is not the case, flip the + # order of the nodes on a copy, and use this for the testing. + # Note that if the nodes are not cw nor ccw (e.g. they are crossing), the + # test cannot be trusted anyhow. + if not is_ccw_polygon(poly): + poly = poly.copy()[:, ::-1] + poly_size = poly.shape[1] inside = np.ones(pt.shape[1], dtype=np.bool) @@ -994,10 +1005,10 @@ def segments_intersect_3d(start_1, end_1, start_2, end_2, tol=1e-8): """ # Convert input to numpy if necessary - start_1 = np.asarray(start_1).astype(np.float) - end_1 = np.asarray(end_1).astype(np.float) - start_2 = np.asarray(start_2).astype(np.float) - end_2 = np.asarray(end_2).astype(np.float) + start_1 = np.asarray(start_1).astype(np.float).ravel() + end_1 = np.asarray(end_1).astype(np.float).ravel() + start_2 = np.asarray(start_2).astype(np.float).ravel() + end_2 = np.asarray(end_2).astype(np.float).ravel() # Short hand for component of start and end points, as well as vectors # along lines. @@ -1246,15 +1257,15 @@ def polygon_boundaries_intersect(poly_1, poly_2, tol=1e-8): #---------------------------------------------------------- -def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): +def polygon_segment_intersect(poly_1, poly_2, tol=1e-8, include_bound_pt=True): """ Find intersections between polygons embeded in 3D. The intersections are defined as between the interior of the first polygon - and the boundary of the second. - - TODO: - 1) Also cover case where the one polygon ends in the plane of the other. + and the boundary of the second, although intersections on the boundary of + both polygons can also be picked up sometimes. If you need to distinguish + between the two, the safer option is to also call + polygon_boundary_intersect(), and compare the results. Parameters: poly_1 (np.ndarray, 3xn1): Vertexes of polygon, assumed ordered as cw or @@ -1263,6 +1274,9 @@ def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): as cw or ccw. tol (double, optional): Tolerance for when two points are equal. Defaults to 1e-8. + include_bound_pt (boolean, optional): Include cases where a segment is + in the plane of the first ploygon, and the segment crosses the + polygon boundary. Defaults to True. Returns: np.ndarray, size 3 x num_isect, coordinates of intersection points; or @@ -1284,13 +1298,13 @@ def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): # Obtain the rotation matrix that projects p1 to the xy-plane rot_p_1 = project_plane_matrix(poly_1) irot = rot_p_1.transpose() - poly_1_xy = rot_p_1.dot(poly_1) + poly_1_rot = rot_p_1.dot(poly_1) # Sanity check: The points should lay on a plane assert np.amax(np.abs(poly_1_xy[2]))/np.amax(np.abs(poly_1_xy[:2])) < tol # Drop the z-coordinate - poly_1_xy = poly_1_xy[:2] + poly_1_xy = poly_1_rot[:2] # Make sure the xy-polygon is ccw. if not is_ccw_polygon(poly_1_xy): @@ -1301,9 +1315,9 @@ def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): # If the rotation of whole second point cloud lies on the same side of z=0, # there are no intersections. - if poly_2_rot[2].min() > 0: + if poly_2_rot[2].min() > tol: return None - elif poly_2_rot[2].max() < 0: + elif poly_2_rot[2].max() < -tol: return None # Check if the second plane is parallel to the first (same xy-plane) @@ -1347,7 +1361,7 @@ def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): pt_2 = poly_2_rot[:, ind[i+1]] # Check if segment crosses z=0 in the rotated coordinates - if max(pt_1[2], pt_2[2]) < 0 or min(pt_1[2], pt_2[2]) > 0: + if max(pt_1[2], pt_2[2]) < -tol or min(pt_1[2], pt_2[2]) > tol: continue dx = pt_2[0] - pt_1[0] @@ -1362,7 +1376,8 @@ def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): # Sanity check. We have ruled out segments not crossing the # origin above. - assert t >= 0 and t <= 1 + if t < -tol or t > 1+tol: + continue # x and y-coordinate for z=0 x0 = pt_1[0] + dx * t @@ -1370,17 +1385,57 @@ def polygon_segment_intersect(poly_1, poly_2, tol=1e-8): # Representation as point p_00 = np.array([x0, y0]).reshape((-1, 1)) - # Check if the first polygon encloses the point. If the - # intersection is on the border, this will not be detected. + # Check if the first polygon encloses the point. When applied + # to fracture intersections of T-type (segment embedded in the + # plane of another fracture), it turned out to be useful to be + # somewhat generous with the definition of the intersection. + # Therefore, allow for intersections that are slightly outside + # the polygon, and use the projection onto the polygon. + dist, cp, ins = dist_points_polygon(_to3D(p_00), + _to3D(poly_1_xy)) + if (dist[0] < tol and include_bound_pt) or dist[0] < 1e-12: + isect = np.hstack((isect, irot.dot(cp) + center_1)) + + elif np.abs(pt_1[2]) < tol and np.abs(pt_2[2]) < tol: + # The segment lies completely within the polygon plane. + both_pts = np.vstack((pt_1, pt_2)).T + # Find points within tho polygon itself + inside = is_inside_polygon(poly_1_xy, both_pts[:2],tol=tol) + + if inside.all(): + # Both points are inside, add and go on + isect = np.hstack((isect, irot.dot(both_pts) + center_1)) + else: + # A single point is inside. Need to find the intersection between this line segment and the polygon + if inside.any(): + isect_loc = both_pts[:2, inside].reshape((2, -1)) + p1 = both_pts[:, inside] + p2 = both_pts[:, np.logical_not(inside)] + else: + isect_loc = np.empty((2, 0)) + p1 = both_pts[:, 0] + p2 = both_pts[:, 1] + + # If a single internal point is found + if isect_loc.shape[1] == 1 or include_bound_pt: + poly_1_start = poly_1_rot + poly_1_end = np.roll(poly_1_rot, 1, axis=1) + for j in range(poly_1.shape[1]): + ip = segments_intersect_3d(p1, p2, + poly_1_start[:, j], + poly_1_end[:, j]) + if ip is not None: + isect_loc = np.hstack((isect_loc, ip[:2])) + + isect = np.hstack((isect, irot.dot(_to3D(isect_loc)) + center_1)) - if is_inside_polygon(poly_1_xy, p_00, tol=tol): - # Back to physical coordinates by 1) expand to 3D, 2) - # inverse rotation, 3) translate to original coordinate. - isect = np.hstack((isect, irot.dot(_to3D(p_00)) + - center_1)) if isect.shape[1] == 0: isect = None + # For points lying in the plane of poly_1, the same points may be found + # several times + if isect is not None: + isect, _, _ = setmembership.unique_columns_tol(isect, tol=tol) return isect #------------------------------------------------------------------------------# @@ -1745,8 +1800,8 @@ def dist_segment_set(start, end): for j in range(i+1, ns): dl, cpi, cpj = dist_two_segments(start[:, i], end[:, i], start[:, j], end[:, j]) - d[i, j] = d - d[j, i] = d + d[i, j] = dl + d[j, i] = dl cp[i, j, :] = cpi cp[j, i, :] = cpj @@ -1754,6 +1809,34 @@ def dist_segment_set(start, end): #------------------------------------------------------------------------------# +def dist_segment_segment_set(start, end, start_set, end_set): + """ Compute distance and closest points between a segment and a set of + segments. + + Parameters: + + """ + start = np.squeeze(start) + end = np.squeeze(end) + + nd = start.shape[0] + ns = start_set.shape[1] + + d = np.zeros( ns) + cp_set = np.zeros(( nd, ns)) + cp = np.zeros((nd, ns)) + + for i in range(ns): + dl, cpi, cpj = dist_two_segments(start, end, start_set[:, i], + end_set[:, i]) + d[i] = dl + cp[:, i] = cpi + cp_set[:, i] = cpj + + return d, cp, cp_set + +#------------------------------------------------------------------------------# + def dist_two_segments(s1_start, s1_end, s2_start, s2_end): """ Compute the distance between two line segments. @@ -1799,7 +1882,7 @@ def dist_two_segments(s1_start, s1_end, s2_start, s2_end): dot_2_starts = d2.dot(d_starts) discr = dot_1_1 * dot_2_2 - dot_1_2 ** 2 # Sanity check - assert discr >= 0 + assert discr >= -SMALL_TOLERANCE * dot_1_1 * dot_2_2 sc = sN = sD = discr tc = tN = tD = discr @@ -1958,11 +2041,13 @@ def dist_point_pointset(p, pset, exponent=2): #----------------------------------------------------------------------------# -def dist_pointset(p): +def dist_pointset(p, max_diag=False): """ Compute mutual distance between all points in a point set. Parameters: p (np.ndarray, 3xn): Points + max_diag (boolean, defaults to True): If True, the diagonal values will + are set to a large value, rather than 0. Returns: np.array (nxn): Distance between points. @@ -1977,6 +2062,10 @@ def dist_pointset(p): for i in range(n): d[i] = dist_point_pointset(p[:, i], p) + if max_diag: + for i in range(n): + d[i, i] = 2 * np.max(d[i]) + return d #----------------------------------------------------------------------------# @@ -1997,6 +2086,8 @@ def dist_points_polygon(p, poly, tol=1e-5): np.array (n_pts): Distance from points to polygon np.array (nd x n_pts): For each point, the closest point on the polygon. + np.array (n_pts, bool): True if the point is found in the interior, + false if on a bounding segment. """ @@ -2053,7 +2144,7 @@ def dist_points_polygon(p, poly, tol=1e-5): cp[:, in_poly] = center + irot.dot(cp_inpoly) if np.all(in_poly): - return d, cp + return d, cp, in_poly # Next, points that are outside the extruded polygons. These will have # their closest point among one of the edges @@ -2070,7 +2161,7 @@ def dist_points_polygon(p, poly, tol=1e-5): d[pi] = d_outside[i, mi] cp[:, pi] = p_outside[i, mi, :] - return d, cp + return d, cp, in_poly #----------------------------------------------------------------------------# @@ -2181,8 +2272,8 @@ def dist_segments_polygon(start, end, poly, tol=1e-5): end = orig_end # Distance from endpoints to - d_start_poly, cp_s_p = dist_points_polygon(start, poly) - d_end_poly, cp_e_p = dist_points_polygon(end, poly) + d_start_poly, cp_s_p, s_in_poly = dist_points_polygon(start, poly) + d_end_poly, cp_e_p, e_in_poly = dist_points_polygon(end, poly) # Loop over all segments that did not cross the polygon. The minimum is # found either by the endpoints, or as between two segments. diff --git a/src/porepy/utils/sparse_mat.py b/src/porepy/utils/sparse_mat.py index 64835aef7e..a7a4e6e6e1 100644 --- a/src/porepy/utils/sparse_mat.py +++ b/src/porepy/utils/sparse_mat.py @@ -1,12 +1,15 @@ """ module for operations on sparse matrices """ +import numpy as np +import scipy.sparse as sps + from porepy.utils.mcolon import mcolon def zero_columns(A, cols): ''' - Function to zero out columns in matrix A. Note that this function does not + Function to zero out columns in matrix A. Note that this function does not change the sparcity structure of the matrix, it only changes the column values to 0 @@ -29,6 +32,122 @@ def zero_columns(A, cols): A.data[col_indptr] = 0 +def merge_matrices(A, B, lines): + ''' + Replace rows/coloms of matrix A with rows/cols of matrix B. + If A and B are csc matrices this function is equivalent with + A[:, lines] = B + If A and B are csr matrices this funciton is equivalent iwth + A[lines, :] = B + + Parameter + --------- + A (scipy.sparse.spmatrix): A sparce matrix + B (scipy.sparse.spmatrix): A sparce matrix + lines (ndarray): Lines of A to be replaced by B. + + Return + ------ + None + + + ''' + if A.getformat() != 'csc' and A.getformat() != 'csr': + raise ValueError('Need a csc or csr matrix') + elif A.getformat() != B.getformat(): + raise ValueError('A and B must be of same matrix type') + if A.getformat() == 'csc': + if A.shape[0] != B.shape[0]: + raise ValueError('A.shape[0] must equal B.shape[0]') + if A.getformat() == 'csr': + if A.shape[1] != B.shape[1]: + raise ValueError('A.shape[0] must equal B.shape[0]') + + if B.getformat() == 'csc': + if lines.size != B.shape[1]: + raise ValueError('B.shape[1] must equal size of lines') + if B.getformat() == 'csr': + if lines.size != B.shape[0]: + raise ValueError('B.shape[0] must equal size of lines') + + indptr = A.indptr + indices = A.indices + data = A.data + + ind_ix = mcolon(indptr[lines], indptr[lines + 1]) + + # First we remove the old data + num_rem = np.zeros(indptr.size, dtype=np.int32) + num_rem[lines + 1] = indptr[lines + 1] - indptr[lines] + num_rem = np.cumsum(num_rem, dtype=num_rem.dtype) + + indptr = indptr - num_rem + + keep = np.ones(A.data.size, dtype=np.bool) + keep[ind_ix] = False + indices = indices[keep] + data = data[keep] + + # Then we add the new + b_indptr = B.indptr + b_indices = B.indices + b_data = B.data + + num_added = np.zeros(indptr.size, dtype=np.int32) + num_added[lines + 1] = b_indptr[1:] - b_indptr[:-1] + num_added = np.cumsum(num_added, dtype=num_added.dtype) + + rep = np.diff(b_indptr) + indPos = np.repeat(indptr[lines], rep) + + A.indices = np.insert(indices, indPos, b_indices) + A.data = np.insert(data, indPos, b_data) + A.indptr = indptr + num_added + + +def stack_mat(A, B): + ''' + Stack matrix B at the end of matrix A. + If A and B are csc matrices this function is equivalent to + A = scipy.sparse.hstack((A, B)) + If A and B are csr matrices this function is equivalent to + A = scipy.sparse.vstack((A, B)) + + Parameters: + ----------- + A (scipy.sparse.spmatrix): A sparce matrix + B (scipy.sparse.spmatrix): A sparce matrix + + Return + ------ + None + + + ''' + if A.getformat() != 'csc' and A.getformat() != 'csr': + raise ValueError('Need a csc or csr matrix') + elif A.getformat() != B.getformat(): + raise ValueError('A and B must be of same matrix type') + if A.getformat() == 'csc': + if A.shape[0] != B.shape[0]: + raise ValueError('A.shape[0] must equal B.shape[0]') + if A.getformat() == 'csr': + if A.shape[1] != B.shape[1]: + raise ValueError('A.shape[0] must equal B.shape[0]') + + if B.indptr.size == 1: + return + + A.indptr = np.append(A.indptr, B.indptr[1:] + A.indptr[-1]) + A.indices = np.append(A.indices, B.indices) + A.data = np.append(A.data, B.data) + + if A.getformat() == 'csc': + A._shape = (A._shape[0], A._shape[1] + B._shape[1]) + if A.getformat() == 'csr': + A._shape = (A._shape[0] + B._shape[0], A._shape[1]) + + def slice_indices(A, slice_ind): """ Function for slicing sparse matrix along rows or columns. @@ -52,5 +171,63 @@ def slice_indices(A, slice_ind): rows = slice_indices(A, np.array([0,2,3])) """ assert A.getformat() == 'csc' or A.getformat() == 'csr' - indices = A.indices[mcolon(A.indptr[slice_ind], A.indptr[slice_ind + 1])] + if isinstance(slice_ind, int): + indices = A.indices[slice( + A.indptr[int(slice_ind)], A.indptr[int(slice_ind + 1)])] + elif slice_ind.size == 1: + indices = A.indices[slice( + A.indptr[int(slice_ind)], A.indptr[int(slice_ind + 1)])] + else: + indices = A.indices[mcolon( + A.indptr[slice_ind], A.indptr[slice_ind + 1])] return indices + + +def slice_mat(A, ind): + """ + Function for slicing sparse matrix along rows or columns. + If A is a csc_matrix A will be sliced along columns, while if A is a + csr_matrix A will be sliced along the rows. + + Parameters + ---------- + A (scipy.sparse.csc/csr_matrix): A sparse matrix. + ind (np.array): Array containing indices to be sliced. + + Returns + ------- + A_sliced (scipy.sparse.csc/csr_matrix): The sliced matrix + if A is a csc_matrix A_sliced = A[:, ind] + if A is a csr_matrix A_slice = A[ind, :] + + Examples + -------- + A = sps.csc_matrix(np.eye(10)) + rows = slice_mat(A, np.array([0,2,3])) + """ + assert A.getformat() == 'csc' or A.getformat() == 'csr' + + if isinstance(ind, int): + N = 1 + indptr = np.zeros(2) + ind_slice = slice( + A.indptr[int(ind)], A.indptr[int(ind + 1)]) + elif ind.size == 1: + N = 1 + indptr = np.zeros(2) + ind_slice = slice( + A.indptr[int(ind)], A.indptr[int(ind + 1)]) + else: + N = ind.size + indptr = np.zeros(ind.size + 1) + ind_slice = mcolon( + A.indptr[ind], A.indptr[ind + 1]) + + indices = A.indices[ind_slice] + indptr[1:] = np.cumsum(A.indptr[ind + 1] - A.indptr[ind]) + data = A.data[ind_slice] + + if A.getformat() == 'csc': + return sps.csc_matrix((data, indices, indptr), shape=(A.shape[0], N)) + elif A.getformat() == 'csr': + return sps.csr_matrix((data, indices, indptr), shape=(N, A.shape[1])) diff --git a/test/integration/test_fracture_extrusion.py b/test/integration/test_fracture_extrusion.py new file mode 100644 index 0000000000..ca4f5c8798 --- /dev/null +++ b/test/integration/test_fracture_extrusion.py @@ -0,0 +1,40 @@ +import unittest +import numpy as np + +from porepy.fracs import extrusion + +class TestFractureExtrusion(unittest.TestCase): + + def test_cut_off(self): + # Two fractures, the second abutting. Check that the second fracture is + # cut correctly. + tol = 1e-4 + p = np.array([[0, 0], [2, 0], [1, 0], [1, 10]]).T + e = np.array([[0, 1], [2, 3]]).T + + fractures = extrusion.fractures_from_outcrop(p, e, tol=tol) + assert np.all(fractures[1].p[1] > -tol) + + def test_not_aligned_outcrop(self): + # The fractures are not aligned with the mesh + p = np.array([[0, 0], [2, 2], [1, 1], [1, 3]]).T + e = np.array([[0, 1], [2, 3]]).T + + tol = 1e-4 + + fractures = extrusion.fractures_from_outcrop(p, e, tol=tol) + # + assert np.all(fractures[1].p[1] >= fractures[1].p[0]-tol) + + + def test_no_family_given(self): + # Assign fracture family automatically + tol = 1e-2 + p = np.array([[0, 0], [2, 0], [1, 0], [1, 3]]).T + e = np.array([[0, 1], [2, 3]]).T + + fractures = extrusion.fractures_from_outcrop(p, e, tol=tol) + assert np.all(fractures[1].p[1] >= -tol) + + if __name__ == '__main__': + unittest.main() diff --git a/test/unit/test_distance_computations.py b/test/unit/test_distance_computations.py index ea67d760f8..5efb4c0917 100644 --- a/test/unit/test_distance_computations.py +++ b/test/unit/test_distance_computations.py @@ -226,11 +226,12 @@ def test_norot_poly(self): p = np.array([[0.5, 0.5, 0], [0.5, 0, 0], [0.5, 0.5, 1], [0, 0, 1], [0.5, 0, 1], [2, 0.5, 0], [2, 0, 1]]).T - d, cp = cg.dist_points_polygon(p, poly) + d, cp, in_poly = cg.dist_points_polygon(p, poly) known_d = np.array([0, 0, 1, 1, 1, 1, np.sqrt(2)]) known_cp = np.array([[0.5, 0.5, 0], [0.5, 0, 0], [0.5, 0.5, 0], [0, 0, 0], [0.5, 0, 0], [1, 0.5, 0], [1, 0, 0]]).T + known_inp = np.array([1, 0, 0, 0, 0, 0], dtype=np.bool) assert np.allclose(d, known_d) assert np.allclose(cp, known_cp) @@ -241,12 +242,13 @@ def test_rot_poly(self): p = np.array([[0, 0, 0], [0, 0.5, 0.5], [2, 0.5, 0.5], [0, 0, 0.5], [0, -1, 0.5], [1, 0, 0], [1, 0.5, 0.5]]).T - d, cp = cg.dist_points_polygon(p, poly) + d, cp, in_poly = cg.dist_points_polygon(p, poly) known_d = np.array([1, 1, 1, 1, np.sqrt(2), 0, 0]) known_cp = np.array([[1, 0, 0], [1, 0.5, 0.5], [1, 0.5, 0.5], [1, 0, 0.5], [1, 0, 0.5], [1, 0, 0], [1, 0.5, 0.5]]).T + known_inp = np.array([0, 1, 0, 0, 0, 0, 1], dtype=np.bool) assert np.allclose(d, known_d) assert np.allclose(cp, known_cp) diff --git a/test/unit/test_fracture_center.py b/test/unit/test_fracture_center.py index 149afdf006..8cf0451325 100644 --- a/test/unit/test_fracture_center.py +++ b/test/unit/test_fracture_center.py @@ -9,31 +9,31 @@ class TestFractureCenters(unittest.TestCase): def test_frac_1(self): f_1 = Fracture(np.array([[0, 2, 2, 0], [0, 2, 2, 0], - [-1, -1, 1, 1]])) - c_known = np.array([1, 1, 0]) + [-1, -1, 1, 1]]), check_convexity=False) + c_known = np.array([1, 1, 0]).reshape((3, 1)) assert np.allclose(c_known, f_1.center) def test_frac_2(self): f_1 = Fracture(np.array([[0, 2, 2, 0], [0, 2, 2, 0], - [0, 0, 1, 1]])) - c_known = np.array([1, 1, 0.5]) + [0, 0, 1, 1]]), check_convexity=False) + c_known = np.array([1, 1, 0.5]).reshape((3, 1)) assert np.allclose(c_known, f_1.center) def test_frac_3(self): # Fracture plane defined by x + y + z = 1 f_1 = Fracture(np.array([[0, 1, 1, 0], [0, 0, 1, 1], - [1, 0, -1, 0]])) - c_known = np.array([0.5, 0.5, 0]) + [1, 0, -1, 0]]), check_convexity=False) + c_known = np.array([0.5, 0.5, 0]).reshape((3, 1)) assert np.allclose(c_known, f_1.center) def test_frac_4(self): # Fracture plane defined by x + y + z = 4 f_1 = Fracture(np.array([[0, 1, 1, 0], [0, 0, 1, 1], - [4, 3, 2, 3]])) - c_known = np.array([0.5, 0.5, 3]) + [4, 3, 2, 3]]), check_convexity=False) + c_known = np.array([0.5, 0.5, 3]).reshape((3, 1)) assert np.allclose(c_known, f_1.center) def test_frac_rand(self): @@ -41,8 +41,8 @@ def test_frac_rand(self): x = np.array([0, 1, 1, 0]) y = np.array([0, 0, 1, 1]) z = (r[0] - r[1] * x - r[2] * y) / r[3] - f = Fracture(np.vstack((x, y, z))) + f = Fracture(np.vstack((x, y, z)), check_convexity=False) z_cc = (r[0] - 0.5 * r[1] - 0.5 * r[2]) / r[3] - c_known = np.array([0.5, 0.5, z_cc]) + c_known = np.array([0.5, 0.5, z_cc]).reshape((3, 1)) assert np.allclose(c_known, f.center) diff --git a/test/unit/test_fracture_extrusion.py b/test/unit/test_fracture_extrusion.py new file mode 100644 index 0000000000..735aca0864 --- /dev/null +++ b/test/unit/test_fracture_extrusion.py @@ -0,0 +1,151 @@ +import unittest +import numpy as np + +from porepy.fracs.fractures import Fracture +from porepy.fracs import extrusion + +class TestFractureExtrusion(unittest.TestCase): + + def test_single_t_intersection(self): + # Three fractures meeting in a T, same family tag for two of them + edges = np.array([[0, 1, 0], [1, 2, 0], [1, 3, 1]]).T + abut, prim, sec, other = extrusion.t_intersections(edges) + + known_abut = [1] + known_prim = [0] + known_sec = [1] + known_other = [3] + + assert np.all(known_abut == abut) + assert np.all(known_prim == prim) + assert np.all(known_sec == sec) + assert np.all(known_other == other) + + def test_single_t_intersection_different_family(self): + # Three fractures meeting in a T. Different family on all, so no + # intersections found + edges = np.array([[0, 1, 0], [1, 2, 1], [1, 3, 2]]).T + abut, prim, sec, other = extrusion.t_intersections(edges) + + assert abut.size == 0 + assert prim.size == 0 + assert sec.size == 0 + assert other.size == 0 + + def test_single_t_intersection_disregard_family_tag(self): + # Three fractures meeting in a T. Different family on all, but we + # disregard family tags, so a T-intersection is found + edges = np.array([[0, 1, 0], [1, 2, 1], [1, 3, 2]]).T + abut, prim, sec, other = \ + extrusion.t_intersections(edges, remove_three_families=True) + known_abut = [1] + known_prim = [0] + known_sec = [1] + known_other = [3] + + assert np.all(known_abut == abut) + assert np.all(known_prim == prim) + assert np.all(known_sec == sec) + assert np.all(known_other == other) + + def test_disc_radius_center(self): + + p0 = np.array([[0], [0]]) + p1 = np.array([[0], [1]]) + length = np.array([1]) + theta = np.array([np.pi/2]) + + known_cc = np.array([[0], [0.5], [0]]) + + rad, cc, _ = extrusion.disc_radius_center(length, p0, p1, theta) + assert rad[0] == 0.5 + assert np.allclose(cc, known_cc) + + def compare_arrays(self, a, b): + assert a.shape == b.shape + + for i in range(a.shape[1]): + ai = a[:, i].reshape((3, 1)) + assert np.min(np.sum((b - ai)**2, axis=0)) < 1e-5 + bi = b[:, i].reshape((3, 1)) + assert np.min(np.sum((a - bi)**2, axis=0)) < 1e-5 + + def test_fracture_rotation_90_deg(self): + + p = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + f = Fracture(p, check_convexity=False) + + p_rot_1_known = np.array([[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]]).T + exposure = np.array([[0], [0], [0]]) + extrusion.rotate_fracture(f, [1, 0, 0], np.pi/2, exposure) + self.compare_arrays(f.p, p_rot_1_known) + + p_rot_2_known = np.array([[0, 0.5, -0.5], [1, 0.5, -0.5], + [1, 0.5, 0.5], [0, 0.5, 0.5]]).T + exposure = np.array([[0], [0.5], [0]]) + f = Fracture(p, check_convexity=False) + extrusion.rotate_fracture(f, [1, 0, 0], np.pi/2, exposure) + self.compare_arrays(f.p, p_rot_2_known) + + def test_cut_fracture_no_intersection(self): + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + f1 = Fracture(p1, check_convexity=False) + + p2 = np.array([[0.5, -0.5, 0.2], [0.5, 0.5, 0.2], [0.5, 0.5, 0.8], + [0.5, -0.5, 0.8]]).T + p_known = p2.copy() + f2 = Fracture(p2, check_convexity=False) + + ref_pt = np.array([[0], [1], [0]]) + extrusion.cut_fracture_by_plane(f2, f1, ref_pt) + + self.compare_arrays(p_known, f2.p) + + def test_cut_fracture_simple_intersection(self): + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 0, 1], [0, 0, 1]]).T + f1 = Fracture(p1, check_convexity=False) + + p2 = np.array([[0.5, -0.5, 0.2], [0.5, 0.5, 0.2], [0.5, 0.5, 0.8], + [0.5, -0.5, 0.8]]).T + p_known = np.array([[0.5, 0, 0.2], [0.5, 0.5, 0.2], [0.5, 0.5, 0.8], + [0.5, 0, 0.8]]).T + f2 = Fracture(p2, check_convexity=False) + + ref_pt = np.array([[0], [1], [0]]) + extrusion.cut_fracture_by_plane(f2, f1, ref_pt) + + self.compare_arrays(p_known, f2.p) + + def test_cut_fracture_one_inclined(self): + p1 = np.array([[0, 1, -0.5], [1, 1, -0.5], [1, -1, 1.5], [0, -1, 1.5]]).T + f1 = Fracture(p1, check_convexity=False) + + p2 = np.array([[0.5, -0.5, 0.2], [0.5, 0.5, 0.2], [0.5, 0.5, 0.8], + [0.5, -1.5, 0.8]]).T + p_known = np.array([[0.5, 0.3, 0.2], [0.5, 0.5, 0.2], [0.5, 0.5, 0.8], + [0.5, -0.3, 0.8]]).T + f2 = Fracture(p2, check_convexity=False) + + ref_pt = np.array([[0], [1], [0]]) + extrusion.cut_fracture_by_plane(f2, f1, ref_pt) + + self.compare_arrays(p_known, f2.p) + + def test_rotation(self): + # Test a case with a fracture terminated in both ends (H-configuration) + # This turned out to be problematic for certain cases. + # The test is simply that it runs - should have something more + # insightful here + pt = np.array([[ 0.4587156 , 0.76605504, 0.60586278, 0.74934585, + 0.46570401, 0.55721055], + [ 0.28593274, 0.35321103, 0.31814406, 0.028759 , + 0.44178218, 0.30749384]]) + edges = np.array([[0, 1], [2, 3], [4, 5]]).T + + np.random.seed(7) + family = np.zeros(3, dtype=np.int) + extrusion.fractures_from_outcrop(pt, edges, family=family, + family_std_incline=[0.3]) + + if __name__ == '__main__': + unittest.main() diff --git a/test/unit/test_fracture_intersect_boundary.py b/test/unit/test_fracture_intersect_boundary.py index 4b7dc122ea..55267b6e5d 100644 --- a/test/unit/test_fracture_intersect_boundary.py +++ b/test/unit/test_fracture_intersect_boundary.py @@ -16,7 +16,7 @@ class TestFractureBoundaryIntersection(unittest.TestCase): def setup(self): self.f_1 = Fracture(np.array([[0, 1, 1, 0], [.5, .5, .5, .5], - [0, 0, 1, 1]])) + [0, 0, 1, 1]]), check_convexity=False) self.domain = {'xmin': 0, 'xmax': 1, 'ymin': 0, 'ymax': 1, 'zmin': 0, 'zmax': 1} @@ -96,7 +96,7 @@ def test_full_incline(self): p = np.array([[-0.5, 0.5, 0.5, -0.5], [0.5, 0.5, 1.5, 1.5], [-0.5, -0.5, 1, 1]]) - f = Fracture(p) + f = Fracture(p, check_convexity=False) network = FractureNetwork([f]) network.impose_external_boundary(self.domain) p_known = np.array([[0., 0.5, 0.5, 0], diff --git a/test/unit/test_inside_polygon.py b/test/unit/test_inside_polygon.py index 2b33a701a0..8334a64074 100644 --- a/test/unit/test_inside_polygon.py +++ b/test/unit/test_inside_polygon.py @@ -52,6 +52,25 @@ def test_multiple_points(self): assert inside[0] assert not inside[1] + def test_large_polygon(self): + a = np.array([[ -2.04462568e-01, -1.88898782e-01, -1.65916617e-01, + -1.44576869e-01, -7.82444375e-02, -1.44018389e-16, + 7.82444375e-02, 1.03961083e-01, 1.44576869e-01, + 1.88898782e-01, 2.04462568e-01, 1.88898782e-01, + 1.44576869e-01, 7.82444375e-02, 1.44018389e-16, + -7.82444375e-02, -1.44576869e-01, -1.88898782e-01], + [ -1.10953147e-16, 7.82444375e-02, 1.19484803e-01, + 1.44576869e-01, 1.88898782e-01, 2.04462568e-01, + 1.88898782e-01, 1.76059749e-01, 1.44576869e-01, + 7.82444375e-02, 1.31179355e-16, -7.82444375e-02, + -1.44576869e-01, -1.88898782e-01, -2.04462568e-01, + -1.88898782e-01, -1.44576869e-01, -7.82444375e-02]]) + b = np.array([[ 0.1281648 , 0.04746067], + [-0.22076491, 0.16421546]]) + inside = cg.is_inside_polygon(a, b) + assert not inside[0] + assert inside[1] + if __name__ == '__main__': unittest.main() diff --git a/test/unit/test_polygon_segment_intersection.py b/test/unit/test_polygon_segment_intersection.py index bd80a6cb37..db3293de21 100644 --- a/test/unit/test_polygon_segment_intersection.py +++ b/test/unit/test_polygon_segment_intersection.py @@ -38,7 +38,7 @@ def test_mutual_intersection(self): p_i_known_2 = np.array([0, 0, 1.0]).reshape((-1, 1)) assert np.min(np.sum(np.abs(p_1_3 - p_i_known_1), axis=0)) < 1e-5 - # Then intersection of plane of 3 by edges of 1. + # Then intersection of plane of 3 by edges of 1. p_3_1 = cg.polygon_segment_intersect(p3, p1) p_i_known_2 = np.array([0, 0, 1.0]).reshape((-1, 1)) @@ -56,7 +56,7 @@ def test_mutual_intersection_not_at_origin(self): p_i_known_1 = np.array([0, 0, 0.5]).reshape((-1, 1)) + incr assert np.min(np.sum(np.abs(p_1_3 - p_i_known_1), axis=0)) < 1e-5 - # Then intersection of plane of 3 by edges of 1. + # Then intersection of plane of 3 by edges of 1. p_3_1 = cg.polygon_segment_intersect(p3, p1) p_i_known_2 = np.array([0, 0, 1.0]).reshape((-1, 1)) + incr @@ -76,18 +76,27 @@ def test_extension_would_intersect(self): assert isect is None def test_segments_intersect(self): - # Test where the planes intersect in a way where segments only touches - # segments, which does not qualify as intersection. + # Test where the planes intersect in a way where vertex only touches + # vertex. This is now updated to count as an intersection. p_1, _, _, p_4 = self.setup_polygons() + isect = cg.polygon_segment_intersect(p_1, p_4) - assert isect is None + + isect_known_1 = np.array([[0, 0, -1]]).T + isect_known_2 = np.array([[0, 0, 1]]).T + + assert np.min(np.sum(np.abs(isect - isect_known_1), axis=0)) < 1e-5 + assert np.min(np.sum(np.abs(isect - isect_known_2), axis=0)) < 1e-5 + # Also try the other way around isect = cg.polygon_segment_intersect(p_4, p_1) - assert isect is None + assert np.min(np.sum(np.abs(isect - isect_known_1), axis=0)) < 1e-5 + assert np.min(np.sum(np.abs(isect - isect_known_2), axis=0)) < 1e-5 def test_issue_16(self): # Test motivated from debuging Issue #16 (GitHub) - # Two polygons meet in a common vertex; return should be None + # After updates of the code, we should find both intersection at vertex, + # and internal to segments of both polygons frac1 = np.array([[1, 2, 4], [1, 4, 1], [2, 2, 2]]) @@ -95,12 +104,49 @@ def test_issue_16(self): frac2 = np.array([[2, 2, 2], [2, 4, 1], [1, 2, 4]]) - - isect_known = np.array([[2], [5/3], [2]]) + + # Segment + isect_known_1 = np.array([[2, 5/3, 2]]).T + isect_known_2 = np.array([[2, 4, 2]]).T isect = cg.polygon_segment_intersect(frac1, frac2) - assert np.allclose(isect, isect_known) + assert np.min(np.sum(np.abs(isect - isect_known_1), axis=0)) < 1e-5 + assert np.min(np.sum(np.abs(isect - isect_known_2), axis=0)) < 1e-5 isect = cg.polygon_segment_intersect(frac1[:, [0, 2, 1]], frac2) + assert np.min(np.sum(np.abs(isect - isect_known_1), axis=0)) < 1e-5 + assert np.min(np.sum(np.abs(isect - isect_known_2), axis=0)) < 1e-5 + + def test_segment_in_polygon_plane(self): + # One segment lies fully within a plane + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + p2 = np.array([[0.3, 0.5, 0], + [0.7, 0.5, 0], + [0.7, 0.5, 1], + [0.3, 0.5, 1]]).T + isect_known = np.array([[0.3, 0.5, 0], + [0.7, 0.5, 0]]).T + isect = cg.polygon_segment_intersect(p1, p2) + assert np.allclose(isect, isect_known) + + def test_segment_in_plane_but_not_in_polygon(self): + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + p2 = np.array([[1.3, 0.5, 0], + [1.7, 0.5, 0], + [1.7, 0.5, 1], + [1.3, 0.5, 1]]).T + isect = cg.polygon_segment_intersect(p1, p2) + assert isect is None + + def test_segment_partly_in_polygon(self): + # Segment in plane, one point inside, one point outside polygon + p1 = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0]]).T + p2 = np.array([[0.3, 0.5, 0], + [1.7, 0.5, 0], + [0.7, 0.5, 1], + [0.3, 0.5, 1]]).T + isect_known = np.array([[0.3, 0.5, 0], + [1, 0.5, 0]]).T + isect = cg.polygon_segment_intersect(p1, p2) assert np.allclose(isect, isect_known) if __name__ == '__main__': diff --git a/test/unit/test_sparse_mat.py b/test/unit/test_sparse_mat.py index df43adebb0..8a868c1c4a 100644 --- a/test/unit/test_sparse_mat.py +++ b/test/unit/test_sparse_mat.py @@ -28,8 +28,9 @@ def test_csr_slice(self): A = sps.csr_matrix(np.array([[0, 0, 0], [1, 0, 0], [0, 0, 3]])) + cols_0 = sparse_mat.slice_indices(A, np.array([0])) - cols_2 = sparse_mat.slice_indices(A, np.array([2])) + cols_2 = sparse_mat.slice_indices(A, 2) cols0_2 = sparse_mat.slice_indices(A, np.array([0, 1, 2])) assert cols_0.size == 0 @@ -41,8 +42,8 @@ def test_csc_slice(self): A = sps.csc_matrix(np.array([[0, 0, 0], [1, 0, 0], [0, 0, 3]])) - rows_0 = sparse_mat.slice_indices(A, np.array([0])) - rows_2 = sparse_mat.slice_indices(A, np.array([2])) + rows_0 = sparse_mat.slice_indices(A, np.array([0], dtype=int)) + rows_2 = sparse_mat.slice_indices(A, 2) rows0_2 = sparse_mat.slice_indices(A, np.array([0, 1, 2])) assert rows_0 == np.array([1]) @@ -67,13 +68,140 @@ def test_zero_columns(self): A0 = A.copy() A2 = A.copy() A0_2 = A.copy() - sparse_mat.zero_columns(A0, np.array([0])) - sparse_mat.zero_columns(A2, np.array([2])) + sparse_mat.zero_columns(A0, np.array([0], dtype=int)) + sparse_mat.zero_columns(A2, 2) sparse_mat.zero_columns(A0_2, np.array([0, 1, 2])) assert np.sum(A0 != A0_t) == 0 assert np.sum(A2 != A2_t) == 0 assert np.sum(A0_2 != A0_2_t) == 0 + #------------------ Test sliced_mat() ----------------------- + def test_sliced_mat_columns(self): + # Test slicing of csr_matrix + A = sps.csc_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3]])) + + A0_t = sps.csc_matrix(np.array([[0, 0], + [0, 0], + [0, 3]])) + A1_t = sps.csc_matrix(np.array([[0, 0], + [1, 0], + [0, 0]])) + A2_t = sps.csc_matrix(np.array([[0], + [0], + [3]])) + A3_t = sps.csc_matrix(np.array([[], + [], + []])) + + A0 = sparse_mat.slice_mat(A, np.array([1, 2], dtype=int)) + A1 = sparse_mat.slice_mat(A, np.array([0, 1])) + A2 = sparse_mat.slice_mat(A, 2) + A3 = sparse_mat.slice_mat(A, np.array([], dtype=np.int)) + + assert np.sum(A0 != A0_t) == 0 + assert np.sum(A1 != A1_t) == 0 + assert np.sum(A2 != A2_t) == 0 + assert np.sum(A3 != A3_t) == 0 + + def test_sliced_mat_columns(self): + # Test slicing of csr_matrix + A = sps.csr_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3]])) + + A0_t = sps.csr_matrix(np.array([[1, 0, 0], + [0, 0, 3]])) + A1_t = sps.csr_matrix(np.array([[0, 0, 0], + [1, 0, 0]])) + A2_t = sps.csr_matrix(np.array([[0, 0, 3]])) + A3_t = sps.csr_matrix(np.atleast_2d(np.array([[], [], []])).T) + + A0 = sparse_mat.slice_mat(A, np.array([1, 2], dtype=int)) + A1 = sparse_mat.slice_mat(A, np.array([0, 1])) + A2 = sparse_mat.slice_mat(A, 2) + A3 = sparse_mat.slice_mat(A, np.array([], dtype=np.int)) + + assert np.sum(A0 != A0_t) == 0 + assert np.sum(A1 != A1_t) == 0 + assert np.sum(A2 != A2_t) == 0 + assert np.sum(A3 != A3_t) == 0 + + #------------------ Test stack_mat() ----------------------- + def test_stack_mat_columns(self): + # Test slicing of csr_matrix + A = sps.csc_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3]])) + + B = sps.csc_matrix(np.array([[0, 2], + [3, 1], + [1, 0]])) + + A_t = sps.csc_matrix(np.array([[0, 0, 0, 0, 2], + [1, 0, 0, 3, 1], + [0, 0, 3, 1, 0]])) + + sparse_mat.stack_mat(A, B) + + assert np.sum(A != A_t) == 0 + + def test_stack_mat_rows(self): + # Test slicing of csr_matrix + A = sps.csr_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3]])) + + B = sps.csr_matrix(np.array([[0, 2, 2], + [3, 1, 3]])) + + A_t = sps.csr_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3], + [0, 2, 2], + [3, 1, 3]])) + + sparse_mat.stack_mat(A, B) + + assert np.sum(A != A_t) == 0 + #------------------ Test merge_mat() ----------------------- + + def test_merge_mat_columns(self): + # Test slicing of csr_matrix + A = sps.csc_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3]])) + + B = sps.csc_matrix(np.array([[0, 2], + [3, 1], + [1, 0]])) + + A_t = sps.csc_matrix(np.array([[0, 0, 2], + [1, 3, 1], + [0, 1, 0]])) + + sparse_mat.merge_matrices(A, B, np.array([1, 2], dtype=np.int)) + + assert np.sum(A != A_t) == 0 + + def test_merge_mat_rows(self): + # Test slicing of csr_matrix + A = sps.csr_matrix(np.array([[0, 0, 0], + [1, 0, 0], + [0, 0, 3]])) + + B = sps.csr_matrix(np.array([[0, 2, 2], + [3, 1, 3]])) + + A_t = sps.csr_matrix(np.array([[0, 2, 2], + [3, 1, 3], + [0, 0, 3]])) + + sparse_mat.merge_matrices(A, B, np.array([0, 1], dtype=np.int)) + + assert np.sum(A != A_t) == 0 + if __name__ == '__main__': unittest.main()