From 6fd9c8a3c99ee18702464d648b9ebfb1c8501472 Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Tue, 19 Dec 2023 23:48:29 -0600 Subject: [PATCH 01/93] First stab at barycentric splitting, macro quadrature rule --- FIAT/macro.py | 78 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 FIAT/macro.py diff --git a/FIAT/macro.py b/FIAT/macro.py new file mode 100644 index 000000000..9af0b0008 --- /dev/null +++ b/FIAT/macro.py @@ -0,0 +1,78 @@ +import numpy as np + +from FIAT.quadrature import QuadratureRule, map_quadrature +from FIAT.quadrature_schemes import create_quadrature +from FIAT.reference_element import Cell, ufc_simplex + + +def bary_to_xy(verts, bary, result=None): + # verts is (sdim + 1) x sdim so verts[i, :] is i:th vertex + # bary is [npts, sdim + 1] + # result is [npts, sdim] + + if result is None: + return bary @ verts + else: + np.dot(bary, verts, out=result) + return result + + +def xy_to_bary(verts, pts, result=None): + # verts is (sdim + 1) x sdim so verts[i, :] is i:th vertex + # result is [npts, sdim] + # bary is [npts, sdim + 1] + npts = pts.shape[0] + sdim = verts.shape[1] + + mat = np.vstack((verts.T, np.ones((1, sdim+1)))) + + b = np.vstack((pts.T, np.ones((1, npts)))) + + foo = np.linalg.solve(mat, b) + + if result is None: + return np.copy(foo.T) + else: + result[:, :] = foo[:, :].T + return result + + +def barycentric_split(ref_el): + d = ref_el.get_dimension() + vs = np.asarray(T.get_vertices()) + # existing vertices plus the barycenter + newvs = np.vstack((vs, np.average(vs, axis=0))) + # cells comprising each face plus the barycenter + subcell2vert = np.asarray( + [[j for j in range(d+1) if j != i] + [d+1] for i in range(d+1)]) + return newvs, subcell2vert + + +def split_to_cells(ref_el, splitting): + newvs, subcell2vert = splitting(ref_el) + top = ref_el.get_topology() + shape = ref_el.shape + ncells = subcell2vert.shape[0] + return [Cell(shape, newvs[subcell2vert[i, :]], top) + for i in range(ncells)] + + +class MacroQuadratureRule(QuadratureRule): + def __init__(self, rule, splitting): + ref_el = rule.ref_el + pts = np.asarray(rule.pts) + wts = np.asarray(rule.wts) + new_els = split_to_cells(ref_el, splitting) + new_rules = [map_quadrature(pts, wts, ref_el, new_el) + for new_el in new_els] + super(MacroQuadratureRule, self).__init__( + ref_el, + np.vstack([np.asarray(new_rule[0]) for new_rule in new_rules]), + np.hstack([np.asarray(new_rule[1]) for new_rule in new_rules])) + + +T = ufc_simplex(2) +Q = create_quadrature(T, 2) +macro_Q = MacroQuadratureRule(Q, barycentric_split) +print(macro_Q.pts) +print(macro_Q.wts) From b986e284246494bf412c0bfbbc3548b915fcea5b Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 21 Mar 2024 15:42:16 -0500 Subject: [PATCH 02/93] first stab at alfeld --- FIAT/alfeld.py | 48 +++++++++++++++++++++++++++++++++++++++ FIAT/reference_element.py | 26 ++++++++++----------- 2 files changed, 61 insertions(+), 13 deletions(-) create mode 100644 FIAT/alfeld.py diff --git a/FIAT/alfeld.py b/FIAT/alfeld.py new file mode 100644 index 000000000..704efe224 --- /dev/null +++ b/FIAT/alfeld.py @@ -0,0 +1,48 @@ +import copy + +import numpy + +from FIAT import Lagrange +from FIAT.reference_element import SimplicialComplex, ufc_simplex + + +class AlfeldSplit(SimplicialComplex): + def __init__(self, T): + self.parent = T + sdim = T.get_spatial_dimension() + old_vs = T.get_vertices() + + b = numpy.average(old_vs, axis=0) + + new_verts = old_vs + (tuple(b),) + + new_topology = copy.deepcopy(T.topology) + + new_vert_id = len(T.topology[0]) + new_topology[0] = {i: (i,) for i in range(new_vert_id+1)} + new_topology[sdim] = {} + + for dim_cur in range(1, sdim + 1): + start = len(new_topology[dim_cur]) + for eid, vs in T.topology[dim_cur-1].items(): + new_topology[dim_cur][start+eid] = vs + (new_vert_id,) + + super(AlfeldSplit, self).__init__(T.shape, new_verts, new_topology) + + +if __name__ == "__main__": + sdim = 3 + + T = ufc_simplex(sdim) + + TA = AlfeldSplit(T) + + TAT = TA.topology + + # degree = sdim+1 + # print(T.vertices) + # for i in range(4): + # print("subcell", i, TA.get_vertices_of_subcomplex(TAT[3][i])) + # print("points", TA.make_points(sdim, i, degree)) + # print(T.connectivity) + # print(TA.connectivity) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index dcef5d0a1..e5e6700fe 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -19,17 +19,18 @@ Currently implemented are UFC and Default Line, Triangle and Tetrahedron. """ -from itertools import chain, product, count -from functools import reduce -from collections import defaultdict import operator +from collections import defaultdict +from functools import reduce +from itertools import chain, count, product from math import factorial -from recursivenodes.nodes import _recursive, _decode_family -from FIAT.orientation_utils import make_cell_orientation_reflection_map_simplex, make_cell_orientation_reflection_map_tensorproduct - import numpy +from recursivenodes.nodes import _decode_family, _recursive +from FIAT.orientation_utils import ( + make_cell_orientation_reflection_map_simplex, + make_cell_orientation_reflection_map_tensorproduct) POINT = 0 LINE = 1 @@ -214,7 +215,7 @@ def get_connectivity(self): def get_vertices_of_subcomplex(self, t): """Returns the tuple of vertex coordinates associated with the labels contained in the iterable t.""" - return tuple([self.vertices[ti] for ti in t]) + return tuple(self.vertices[ti] for ti in t) def get_dimension(self): """Returns the subelement dimension of the cell. For tensor @@ -250,7 +251,7 @@ def cell_orientation_reflection_map(self): raise NotImplementedError("Should be implemented in a subclass.") -class Simplex(Cell): +class SimplicialComplex(Cell): r"""Abstract class for a reference simplex. Orientation of a physical cell is computed systematically @@ -414,13 +415,11 @@ def make_points(self, dim, entity_id, order, variant=None): include in each direction.""" if dim == 0: return (self.get_vertices()[entity_id], ) - elif 0 < dim < self.get_spatial_dimension(): + elif 0 < dim <= self.get_spatial_dimension(): entity_verts = \ self.get_vertices_of_subcomplex( self.get_topology()[dim][entity_id]) return make_lattice(entity_verts, order, 1, variant=variant) - elif dim == self.get_spatial_dimension(): - return make_lattice(self.get_vertices(), order, 1, variant=variant) else: raise ValueError("illegal dimension") @@ -443,7 +442,7 @@ def compute_scaled_normal(self, facet_i): def compute_reference_normal(self, facet_dim, facet_i): """Returns the unit normal in infinity norm to facet_i.""" assert facet_dim == self.get_spatial_dimension() - 1 - n = Simplex.compute_normal(self, facet_i) # skip UFC overrides + n = SimplicialComplex.compute_normal(self, facet_i) # skip UFC overrides return n / numpy.linalg.norm(n, numpy.inf) def get_entity_transform(self, dim, entity): @@ -512,7 +511,8 @@ def cell_orientation_reflection_map(self): return make_cell_orientation_reflection_map_simplex(self.get_dimension()) -# Backwards compatible name +# Backwards compatible names +Simplex = SimplicialComplex ReferenceElement = Simplex From 4036e01a26b0f5dff041f2aa515898be666e803d Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 21 Mar 2024 16:39:50 -0500 Subject: [PATCH 03/93] Split up simplex from simplicial complex --- FIAT/alfeld.py | 25 +++++--- FIAT/reference_element.py | 123 ++++++++++++++++++++++---------------- 2 files changed, 88 insertions(+), 60 deletions(-) diff --git a/FIAT/alfeld.py b/FIAT/alfeld.py index 704efe224..eee25ae58 100644 --- a/FIAT/alfeld.py +++ b/FIAT/alfeld.py @@ -2,7 +2,6 @@ import numpy -from FIAT import Lagrange from FIAT.reference_element import SimplicialComplex, ufc_simplex @@ -29,9 +28,17 @@ def __init__(self, T): super(AlfeldSplit, self).__init__(T.shape, new_verts, new_topology) + def construct_subelement(self, dimension): + """Constructs the reference element of a cell subentity + specified by subelement dimension. + + :arg dimension: subentity dimension (integer) + """ + return self.parent.construct_subelement(dimension) + if __name__ == "__main__": - sdim = 3 + sdim = 2 T = ufc_simplex(sdim) @@ -39,10 +46,10 @@ def __init__(self, T): TAT = TA.topology - # degree = sdim+1 - # print(T.vertices) - # for i in range(4): - # print("subcell", i, TA.get_vertices_of_subcomplex(TAT[3][i])) - # print("points", TA.make_points(sdim, i, degree)) - # print(T.connectivity) - # print(TA.connectivity) + for i in range(1, sdim+1): + TX = TA.construct_subelement(i) + b = numpy.average(TX.get_vertices(), axis=0) + for entity in TAT[i].keys(): + mapped_bary = TA.get_entity_transform(i, entity)(b) + computed_bary = numpy.average(TA.get_vertices_of_subcomplex(TAT[i][entity]), axis=0) + assert numpy.allclose(mapped_bary, computed_bary) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index e5e6700fe..3dce57192 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -252,73 +252,58 @@ def cell_orientation_reflection_map(self): class SimplicialComplex(Cell): - r"""Abstract class for a reference simplex. - - Orientation of a physical cell is computed systematically - by comparing the canonical orderings of its facets and - the facets in the FIAT reference cell. - - As an example, we compute the orientation of a - triangular cell: - - + + - | \ | \ - 1 0 47 42 - | \ | \ - +--2---+ +--43--+ - FIAT canonical Mapped example physical cell - - Suppose that the facets of the physical cell - are canonically ordered as: + r"""Abstract class for a simplicial complex. - C = [43, 42, 47] + This consists of list of vertex locations and a topology map defining facets. + """ + def __init__(self, shape, vertices, topology): + # Make sure that every facet has the right number of vertices to be + # a simplex. + for dim in topology: + for entity in topology[dim]: + assert len(topology[dim][entity]) == dim + 1 - FIAT facet to Physical facet map is given by: + super(SimplicialComplex, self).__init__(shape, vertices, topology) - M = [42, 47, 43] + def compute_normal(self, facet_i): + """Returns the unit normal vector to facet i of codimension 1.""" - Then the orientation of the cell is computed as: + t = self.get_topology() + sd = self.get_spatial_dimension() - C.index(M[0]) = 1; C.remove(M[0]) - C.index(M[1]) = 1; C.remove(M[1]) - C.index(M[2]) = 0; C.remove(M[2]) + # To handle simplicial complex case: + # Find a subcell of which facet_i is on the boundary + # Note: this is trivial and vastly overengineered for the single-cell + # case. + for k, facets in enumerate(self.connectivity[(sd, sd-1)]): + if facet_i in facets: + break + vertices = self.get_vertices_of_subcomplex(t[sd][k]) - o = (1 * 2!) + (1 * 1!) + (0 * 0!) = 3 - """ - def compute_normal(self, facet_i): - """Returns the unit normal vector to facet i of codimension 1.""" # Interval case if self.get_shape() == LINE: - verts = numpy.asarray(self.vertices) + verts = numpy.asarray(vertices) v_i, = self.get_topology()[0][facet_i] n = verts[v_i] - verts[[1, 0][v_i]] return n / numpy.linalg.norm(n) - # first, let's compute the span of the simplex - # This is trivial if we have a d-simplex in R^d. - # Not so otherwise. - vert_vecs = [numpy.array(v) - for v in self.vertices] - vert_vecs_foo = numpy.array([vert_vecs[i] - vert_vecs[0] - for i in range(1, len(vert_vecs))]) + # vectors from vertex 0 to each other vertex. + vert_vecs = numpy.asarray(vertices) + vert_vecs_from_v0 = vert_vecs[1:, :] - vert_vecs[0][None, :] - (u, s, vt) = numpy.linalg.svd(vert_vecs_foo) + (u, s, _) = numpy.linalg.svd(vert_vecs_from_v0) rank = len([si for si in s if si > 1.e-10]) # this is the set of vectors that span the simplex spanu = u[:, :rank] - t = self.get_topology() - sd = self.get_spatial_dimension() vert_coords_of_facet = \ self.get_vertices_of_subcomplex(t[sd-1][facet_i]) # now I find everything normal to the facet. - vcf = [numpy.array(foo) - for foo in vert_coords_of_facet] - facet_span = numpy.array([vcf[i] - vcf[0] - for i in range(1, len(vcf))]) - (uf, sf, vft) = numpy.linalg.svd(facet_span) + vcf = numpy.asarray(vert_coords_of_facet) + facet_span = vcf[1:, :] - vcf[0][None, :] + (_, sf, vft) = numpy.linalg.svd(facet_span) # now get the null space from vft rankfacet = len([si for si in sf if si > 1.e-10]) @@ -338,7 +323,7 @@ def compute_normal(self, facet_i): nfoo = foo[:, 0] # what is the vertex not in the facet? - verts_set = set(t[sd][0]) + verts_set = set(t[sd][k]) verts_facet = set(t[sd - 1][facet_i]) verts_diff = verts_set.difference(verts_facet) if len(verts_diff) != 1: @@ -424,9 +409,11 @@ def make_points(self, dim, entity_id, order, variant=None): raise ValueError("illegal dimension") def volume(self): - """Computes the volume of the simplex in the appropriate + """Computes the volume of the simplicial complex in the appropriate dimensional measure.""" - return volume(self.get_vertices()) + sd = self.get_spatial_dimension() + return sum(self.volume_of_subcomplex(sd, k) + for k in self.topology[sd]) def volume_of_subcomplex(self, dim, facet_no): vids = self.topology[dim][facet_no] @@ -460,7 +447,7 @@ def get_entity_transform(self, dim, entity): i, = topology[dim][entity] vertex = self.get_vertices()[i] return lambda point: vertex - elif dim == celldim: + elif dim == celldim and len(self.topology[celldim]) == 1: assert entity == 0 return lambda point: point @@ -503,6 +490,41 @@ def get_dimension(self): spatial dimension.""" return self.get_spatial_dimension() + +class Simplex(SimplicialComplex): + r"""Abstract class for a reference simplex. + + Orientation of a physical cell is computed systematically + by comparing the canonical orderings of its facets and + the facets in the FIAT reference cell. + + As an example, we compute the orientation of a + triangular cell: + + + + + | \ | \ + 1 0 47 42 + | \ | \ + +--2---+ +--43--+ + FIAT canonical Mapped example physical cell + + Suppose that the facets of the physical cell + are canonically ordered as: + + C = [43, 42, 47] + + FIAT facet to Physical facet map is given by: + + M = [42, 47, 43] + + Then the orientation of the cell is computed as: + + C.index(M[0]) = 1; C.remove(M[0]) + C.index(M[1]) = 1; C.remove(M[1]) + C.index(M[2]) = 0; C.remove(M[2]) + + o = (1 * 2!) + (1 * 1!) + (0 * 0!) = 3 + """ def symmetry_group_size(self, dim): return numpy.math.factorial(dim + 1) @@ -511,8 +533,7 @@ def cell_orientation_reflection_map(self): return make_cell_orientation_reflection_map_simplex(self.get_dimension()) -# Backwards compatible names -Simplex = SimplicialComplex +# Backwards compatible name ReferenceElement = Simplex From 047a1b063a08388610918a8992989440d4458b68 Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 21 Mar 2024 17:04:06 -0500 Subject: [PATCH 04/93] macro test, WIP on uniform --- FIAT/macro.py | 95 ++++++++++++++++++++--------------------- test/unit/test_macro.py | 37 ++++++++++++++++ 2 files changed, 84 insertions(+), 48 deletions(-) create mode 100644 test/unit/test_macro.py diff --git a/FIAT/macro.py b/FIAT/macro.py index 9af0b0008..3f8950041 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -1,8 +1,8 @@ -import numpy as np +import copy -from FIAT.quadrature import QuadratureRule, map_quadrature -from FIAT.quadrature_schemes import create_quadrature -from FIAT.reference_element import Cell, ufc_simplex +import numpy + +from FIAT.reference_element import SimplicialComplex, ufc_simplex def bary_to_xy(verts, bary, result=None): @@ -13,7 +13,7 @@ def bary_to_xy(verts, bary, result=None): if result is None: return bary @ verts else: - np.dot(bary, verts, out=result) + numpy.dot(bary, verts, out=result) return result @@ -24,55 +24,54 @@ def xy_to_bary(verts, pts, result=None): npts = pts.shape[0] sdim = verts.shape[1] - mat = np.vstack((verts.T, np.ones((1, sdim+1)))) + mat = numpy.vstack((verts.T, numpy.ones((1, sdim+1)))) - b = np.vstack((pts.T, np.ones((1, npts)))) + b = numpy.vstack((pts.T, numpy.ones((1, npts)))) - foo = np.linalg.solve(mat, b) + foo = numpy.linalg.solve(mat, b) if result is None: - return np.copy(foo.T) + return numpy.copy(foo.T) else: result[:, :] = foo[:, :].T return result -def barycentric_split(ref_el): - d = ref_el.get_dimension() - vs = np.asarray(T.get_vertices()) - # existing vertices plus the barycenter - newvs = np.vstack((vs, np.average(vs, axis=0))) - # cells comprising each face plus the barycenter - subcell2vert = np.asarray( - [[j for j in range(d+1) if j != i] + [d+1] for i in range(d+1)]) - return newvs, subcell2vert - - -def split_to_cells(ref_el, splitting): - newvs, subcell2vert = splitting(ref_el) - top = ref_el.get_topology() - shape = ref_el.shape - ncells = subcell2vert.shape[0] - return [Cell(shape, newvs[subcell2vert[i, :]], top) - for i in range(ncells)] - - -class MacroQuadratureRule(QuadratureRule): - def __init__(self, rule, splitting): - ref_el = rule.ref_el - pts = np.asarray(rule.pts) - wts = np.asarray(rule.wts) - new_els = split_to_cells(ref_el, splitting) - new_rules = [map_quadrature(pts, wts, ref_el, new_el) - for new_el in new_els] - super(MacroQuadratureRule, self).__init__( - ref_el, - np.vstack([np.asarray(new_rule[0]) for new_rule in new_rules]), - np.hstack([np.asarray(new_rule[1]) for new_rule in new_rules])) - - -T = ufc_simplex(2) -Q = create_quadrature(T, 2) -macro_Q = MacroQuadratureRule(Q, barycentric_split) -print(macro_Q.pts) -print(macro_Q.wts) +class AlfeldSplit(SimplicialComplex): + def __init__(self, T): + self.parent = T + sdim = T.get_spatial_dimension() + old_vs = T.get_vertices() + + b = numpy.average(old_vs, axis=0) + + new_verts = old_vs + (tuple(b),) + + new_topology = copy.deepcopy(T.topology) + + new_vert_id = len(T.topology[0]) + new_topology[0] = {i: (i,) for i in range(new_vert_id+1)} + new_topology[sdim] = {} + + for dim_cur in range(1, sdim + 1): + start = len(new_topology[dim_cur]) + for eid, vs in T.topology[dim_cur-1].items(): + new_topology[dim_cur][start+eid] = vs + (new_vert_id,) + + super(AlfeldSplit, self).__init__(T.shape, new_verts, new_topology) + + def construct_subelement(self, dimension): + """Constructs the reference element of a cell subentity + specified by subelement dimension. + + :arg dimension: subentity dimension (integer) + """ + return self.parent.construct_subelement(dimension) + + +# Does a uniform split +class UniformSplit(SimplicialComplex): + def __init__(self, T): + self.parent = T + sdim = T.get_spatial_dimension() + old_vs = T.get_vertices() diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py new file mode 100644 index 000000000..3f7b86bb1 --- /dev/null +++ b/test/unit/test_macro.py @@ -0,0 +1,37 @@ +import numpy +import pytest +from FIAT import ufc_simplex +from FIAT.macro import AlfeldSplit + + +@pytest.mark.parametrize("sdim", (2, 3)) +def test_split_entity_transform(sdim): + T = ufc_simplex(sdim) + TA = AlfeldSplit(T) + TAT = TA.topology + + for i in range(1, sdim+1): + TX = TA.construct_subelement(i) + b = numpy.average(TX.get_vertices(), axis=0) + for entity in TAT[i]: + mapped_bary = TA.get_entity_transform(i, entity)(b) + computed_bary = numpy.average(TA.get_vertices_of_subcomplex(TAT[i][entity]), axis=0) + assert numpy.allclose(mapped_bary, computed_bary) + + +@pytest.mark.parametrize("sdim", (2, 3)) +@pytest.mark.parametrize("degree", (4,)) +@pytest.mark.parametrize("variant", ("gll", "equispaced")) +def test_split_make_points(sdim, degree, variant): + T = ufc_simplex(sdim) + TA = AlfeldSplit(T) + TAT = TA.topology + + for i in range(1, sdim+1): + TX = TA.construct_subelement(i) + pts_ref = TX.make_points(i, 0, degree, variant=variant) + for entity in TAT[i]: + pts_entity = TA.make_points(i, entity, degree, variant=variant) + mapping = TA.get_entity_transform(i, entity) + mapped_pts = list(map(mapping, pts_ref)) + assert numpy.allclose(mapped_pts, pts_entity) From 8e85585f3f8afcd705ae3b73e910e6205991ebe8 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 21 Mar 2024 19:48:45 -0500 Subject: [PATCH 05/93] MacroQuadratureRule, UniformSplit WIP --- FIAT/macro.py | 123 ++++++++++++++++++++++++++++++++-------- test/unit/test_macro.py | 18 +++++- 2 files changed, 116 insertions(+), 25 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 3f8950041..1ebd35a25 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -1,8 +1,8 @@ import copy - import numpy - -from FIAT.reference_element import SimplicialComplex, ufc_simplex +from FIAT.reference_element import SimplicialComplex +from FIAT.finite_element import FiniteElement +from FIAT.quadrature import QuadratureRule, FacetQuadratureRule def bary_to_xy(verts, bary, result=None): @@ -38,27 +38,27 @@ def xy_to_bary(verts, pts, result=None): class AlfeldSplit(SimplicialComplex): - def __init__(self, T): - self.parent = T - sdim = T.get_spatial_dimension() - old_vs = T.get_vertices() - b = numpy.average(old_vs, axis=0) + def __init__(self, ref_el): + self.parent = ref_el + sd = ref_el.get_spatial_dimension() + old_verts = ref_el.get_vertices() - new_verts = old_vs + (tuple(b),) + b = numpy.average(old_verts, axis=0) + new_verts = old_verts + (tuple(b),) - new_topology = copy.deepcopy(T.topology) + new_topology = copy.deepcopy(ref_el.topology) - new_vert_id = len(T.topology[0]) - new_topology[0] = {i: (i,) for i in range(new_vert_id+1)} - new_topology[sdim] = {} + new_vert_id = len(ref_el.topology[0]) + new_topology[0] = {i: (i,) for i in range(new_vert_id + 1)} + new_topology[sd] = {} - for dim_cur in range(1, sdim + 1): - start = len(new_topology[dim_cur]) - for eid, vs in T.topology[dim_cur-1].items(): - new_topology[dim_cur][start+eid] = vs + (new_vert_id,) + for dim in range(1, sd + 1): + offset = len(new_topology[dim]) + for entity, ids in ref_el.topology[dim-1].items(): + new_topology[dim][offset+entity] = ids + (new_vert_id,) - super(AlfeldSplit, self).__init__(T.shape, new_verts, new_topology) + super(AlfeldSplit, self).__init__(ref_el.shape, new_verts, new_topology) def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity @@ -69,9 +69,86 @@ def construct_subelement(self, dimension): return self.parent.construct_subelement(dimension) -# Does a uniform split class UniformSplit(SimplicialComplex): - def __init__(self, T): - self.parent = T - sdim = T.get_spatial_dimension() - old_vs = T.get_vertices() + + def __init__(self, ref_el): + self.parent = ref_el + sd = ref_el.get_spatial_dimension() + old_verts = ref_el.get_vertices() + + new_verts = old_verts + tuple(tuple(numpy.average(old_verts[list(ids)], axis=0)) + for ids in ref_el.topology[1].values()) + + new_topology = {} + new_topology[0] = {i: (i,) for i in range(len(new_verts))} + new_topology[1] = {} + + # Split each edge + offset = len(old_verts) + for entity, verts in ref_el.topology[1].items(): + midpoint = offset + entity + new_topology[1][2*entity] = (verts[0], midpoint) + new_topology[1][2*entity+1] = (verts[1], midpoint) + + # Add edges connecting midpoints + num_old_edges = len(ref_el.topology[1]) + cur = len(new_topology[1]) + for j in range(num_old_edges): + for i in range(j+1, num_old_edges): + new_topology[1][cur] = (offset+j, offset+i) + cur = cur + 1 + + # TODO add higher dimensional entites + for dim in range(2, sd+1): + new_topology[dim] = {} + + + super(UniformSplit, self).__init__(ref_el.shape, new_verts, new_topology) + + + def construct_subelement(self, dimension): + """Constructs the reference element of a cell subentity + specified by subelement dimension. + + :arg dimension: subentity dimension (integer) + """ + return self.parent.construct_subelement(dimension) + + +class MacroElement(FiniteElement): + """ + A macro element built from a base finite element on a split of the reference cell + """ + + def __init__(self, element, split): + ref_el = element.get_reference_element() + dual = None + order = element.get_order() + formdegree = element.get_formdegree() + mapping = element._mapping + self.element = element + self.cell_complex = split(ref_el) + super(MacroElement, self).__init__(ref_el, dual, order, formdegree=formdegree, mapping=mapping) + + def tabulate(self, order, points, entity=None): + raise NotImplementedError("Wait for it") + # tabulate the reference element on each sub-cell and scatter with the local to global mapping + + +class MacroQuadratureRule(QuadratureRule): + + def __init__(self, cell_complex, Q_ref): + pts = [] + wts = [] + sd = cell_complex.get_spatial_dimension() + ref_el = cell_complex.construct_subelement(sd) + t = cell_complex.get_topology() + dim = Q_ref.ref_el.get_spatial_dimension() + for entity in t[dim]: + Q_cur = FacetQuadratureRule(cell_complex, dim, entity, Q_ref) + pts.extend(Q_cur.pts) + wts.extend(Q_cur.wts) + + pts = tuple(pts) + wts = tuple(wts) + super(MacroQuadratureRule, self).__init__(ref_el, pts, wts) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 3f7b86bb1..86229c2b1 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,7 +1,8 @@ import numpy import pytest -from FIAT import ufc_simplex -from FIAT.macro import AlfeldSplit +from FIAT.reference_element import ufc_simplex, symmetric_simplex +from FIAT.macro import AlfeldSplit, MacroQuadratureRule +from FIAT.quadrature_schemes import create_quadrature @pytest.mark.parametrize("sdim", (2, 3)) @@ -35,3 +36,16 @@ def test_split_make_points(sdim, degree, variant): mapping = TA.get_entity_transform(i, entity) mapped_pts = list(map(mapping, pts_ref)) assert numpy.allclose(mapped_pts, pts_entity) + + +@pytest.mark.parametrize("sdim", (2, 3)) +def test_macro_quadrature(sdim): + T = symmetric_simplex(sdim) + TA = AlfeldSplit(T) + + degree = 6 + Q_ref = create_quadrature(T, degree) + Q = MacroQuadratureRule(TA, Q_ref) + # import matplotlib.pyplot as plt + # plt.scatter(*Q.get_points().T) + # plt.show() From 0598d35847b6ce0eb146afbba438c029c53df280 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 21 Mar 2024 22:16:46 -0500 Subject: [PATCH 06/93] flake --- FIAT/macro.py | 2 -- test/unit/test_macro.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 1ebd35a25..497cd2817 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -102,10 +102,8 @@ def __init__(self, ref_el): for dim in range(2, sd+1): new_topology[dim] = {} - super(UniformSplit, self).__init__(ref_el.shape, new_verts, new_topology) - def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity specified by subelement dimension. diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 86229c2b1..2f4736df6 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -46,6 +46,7 @@ def test_macro_quadrature(sdim): degree = 6 Q_ref = create_quadrature(T, degree) Q = MacroQuadratureRule(TA, Q_ref) + Q.get_points() # import matplotlib.pyplot as plt # plt.scatter(*Q.get_points().T) # plt.show() From 1010dcc310cd7290a801ec1101139f6476e77aa2 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 22 Mar 2024 19:00:26 -0500 Subject: [PATCH 07/93] UniformSplit working --- FIAT/macro.py | 120 +++++++++++++++++++++++----------------- test/unit/test_macro.py | 78 +++++++++++++++----------- 2 files changed, 114 insertions(+), 84 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 497cd2817..bc4e6bdb8 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -1,6 +1,6 @@ import copy import numpy -from FIAT.reference_element import SimplicialComplex +from FIAT.reference_element import make_lattice, lattice_iter, SimplicialComplex from FIAT.finite_element import FiniteElement from FIAT.quadrature import QuadratureRule, FacetQuadratureRule @@ -37,28 +37,17 @@ def xy_to_bary(verts, pts, result=None): return result -class AlfeldSplit(SimplicialComplex): +class SplitSimplicialComplex(SimplicialComplex): + """Abstract class to implement a split on a Simplex + """ def __init__(self, ref_el): self.parent = ref_el - sd = ref_el.get_spatial_dimension() - old_verts = ref_el.get_vertices() - - b = numpy.average(old_verts, axis=0) - new_verts = old_verts + (tuple(b),) - - new_topology = copy.deepcopy(ref_el.topology) + vertices, topology = self.split_topology(ref_el) + super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology) - new_vert_id = len(ref_el.topology[0]) - new_topology[0] = {i: (i,) for i in range(new_vert_id + 1)} - new_topology[sd] = {} - - for dim in range(1, sd + 1): - offset = len(new_topology[dim]) - for entity, ids in ref_el.topology[dim-1].items(): - new_topology[dim][offset+entity] = ids + (new_vert_id,) - - super(AlfeldSplit, self).__init__(ref_el.shape, new_verts, new_topology) + def split_topology(self, ref_el): + raise NotImplementedError def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity @@ -69,48 +58,75 @@ def construct_subelement(self, dimension): return self.parent.construct_subelement(dimension) -class UniformSplit(SimplicialComplex): +class AlfeldSplit(SplitSimplicialComplex): - def __init__(self, ref_el): - self.parent = ref_el + def split_topology(self, ref_el): sd = ref_el.get_spatial_dimension() - old_verts = ref_el.get_vertices() + new_topology = copy.deepcopy(ref_el.topology) + new_topology[sd] = {} - new_verts = old_verts + tuple(tuple(numpy.average(old_verts[list(ids)], axis=0)) - for ids in ref_el.topology[1].values()) + barycenter = ref_el.make_points(sd, 0, sd+1) + new_verts = ref_el.vertices + tuple(barycenter) + new_vert_id = len(ref_el.vertices) + + new_topology[0][new_vert_id] = (new_vert_id,) + for dim in range(1, sd + 1): + offset = len(new_topology[dim]) + for entity, ids in ref_el.topology[dim-1].items(): + new_topology[dim][offset+entity] = ids + (new_vert_id,) + return new_verts, new_topology + + +class UniformSplit(SplitSimplicialComplex): + + def split_topology(self, ref_el): + sd = ref_el.get_spatial_dimension() + old_verts = ref_el.get_vertices() + new_verts = make_lattice(old_verts, 2) new_topology = {} new_topology[0] = {i: (i,) for i in range(len(new_verts))} new_topology[1] = {} - # Split each edge - offset = len(old_verts) - for entity, verts in ref_el.topology[1].items(): - midpoint = offset + entity - new_topology[1][2*entity] = (verts[0], midpoint) - new_topology[1][2*entity+1] = (verts[1], midpoint) - - # Add edges connecting midpoints - num_old_edges = len(ref_el.topology[1]) - cur = len(new_topology[1]) - for j in range(num_old_edges): - for i in range(j+1, num_old_edges): - new_topology[1][cur] = (offset+j, offset+i) - cur = cur + 1 - - # TODO add higher dimensional entites + # Loop through vertex pairs + # Edges are oriented from low vertex id to high vertex id to avoid duplicates + # Place a new edge when the two lattice multiindices are at Manhattan distance < 3, + # this connects the midpoints of edges within a face + # Only include diagonal edges that are parallel to the simplex edges, + # we take the diagonal that goes through vertices of the same "polynomial degree" + cur = 0 + distance = lambda x, y: sum(abs(b-a) for a, b in zip(x, y)) + for j, v1 in enumerate(lattice_iter(0, 3, sd)): + for i, v0 in enumerate(lattice_iter(0, 3, sd)): + if i < j and distance(v0, v1) < 3 and sum(v1) - sum(v0) <= 1: + new_topology[1][cur] = (i, j) + cur = cur + 1 + if sd == 3: + # Cut the octahedron + # FIXME do this more generalically + new_topology[1][cur] = (1, 8) + + # Get an adjacency list for each vertex + adjacency = {} + for v in new_topology[0]: + cur_neigh = [] + for entity in new_topology[1]: + if v in new_topology[1][entity]: + cur_neigh.extend(new_topology[1][entity]) + adjacency[v] = set(cur_neigh) + + # Complete the higher dimensional facets by appending a vertex + # adjacent to the vertices of co-dimension 1 facets for dim in range(2, sd+1): - new_topology[dim] = {} - - super(UniformSplit, self).__init__(ref_el.shape, new_verts, new_topology) - - def construct_subelement(self, dimension): - """Constructs the reference element of a cell subentity - specified by subelement dimension. - - :arg dimension: subentity dimension (integer) - """ - return self.parent.construct_subelement(dimension) + entities = [] + for entity in new_topology[dim-1]: + facet = new_topology[dim-1][entity] + for v in range(min(facet)): + if set(facet) < adjacency[v]: + entities.append((v,) + facet) + + new_topology[dim] = dict(enumerate(entities)) + return new_verts, new_topology class MacroElement(FiniteElement): diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 2f4736df6..565af6df3 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,52 +1,66 @@ import numpy import pytest -from FIAT.reference_element import ufc_simplex, symmetric_simplex -from FIAT.macro import AlfeldSplit, MacroQuadratureRule +from FIAT.reference_element import ufc_simplex +from FIAT.macro import AlfeldSplit, UniformSplit, MacroQuadratureRule from FIAT.quadrature_schemes import create_quadrature -@pytest.mark.parametrize("sdim", (2, 3)) -def test_split_entity_transform(sdim): - T = ufc_simplex(sdim) - TA = AlfeldSplit(T) - TAT = TA.topology +@pytest.fixture(params=("T", "S")) +def cell(request): + return {"T": ufc_simplex(2), + "S": ufc_simplex(3)}[request.param] - for i in range(1, sdim+1): - TX = TA.construct_subelement(i) - b = numpy.average(TX.get_vertices(), axis=0) - for entity in TAT[i]: - mapped_bary = TA.get_entity_transform(i, entity)(b) - computed_bary = numpy.average(TA.get_vertices_of_subcomplex(TAT[i][entity]), axis=0) + +@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +def test_split_entity_transform(split, cell): + split_cell = split(cell) + top = split_cell.get_topology() + sdim = cell.get_spatial_dimension() + for dim in range(1, sdim+1): + ref_el = split_cell.construct_subelement(dim) + b = numpy.average(ref_el.get_vertices(), axis=0) + for entity in top[dim]: + mapped_bary = split_cell.get_entity_transform(dim, entity)(b) + computed_bary = numpy.average(split_cell.get_vertices_of_subcomplex(top[dim][entity]), axis=0) assert numpy.allclose(mapped_bary, computed_bary) -@pytest.mark.parametrize("sdim", (2, 3)) @pytest.mark.parametrize("degree", (4,)) @pytest.mark.parametrize("variant", ("gll", "equispaced")) -def test_split_make_points(sdim, degree, variant): - T = ufc_simplex(sdim) - TA = AlfeldSplit(T) - TAT = TA.topology - +@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +def test_split_make_points(split, cell, degree, variant): + split_cell = split(cell) + top = split_cell.get_topology() + sdim = cell.get_spatial_dimension() for i in range(1, sdim+1): - TX = TA.construct_subelement(i) - pts_ref = TX.make_points(i, 0, degree, variant=variant) - for entity in TAT[i]: - pts_entity = TA.make_points(i, entity, degree, variant=variant) - mapping = TA.get_entity_transform(i, entity) + ref_el = split_cell.construct_subelement(i) + pts_ref = ref_el.make_points(i, 0, degree, variant=variant) + for entity in top[i]: + pts_entity = split_cell.make_points(i, entity, degree, variant=variant) + mapping = split_cell.get_entity_transform(i, entity) mapped_pts = list(map(mapping, pts_ref)) assert numpy.allclose(mapped_pts, pts_entity) -@pytest.mark.parametrize("sdim", (2, 3)) -def test_macro_quadrature(sdim): - T = symmetric_simplex(sdim) - TA = AlfeldSplit(T) +@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +def test_macro_quadrature(split, cell): + split_cell = split(cell) - degree = 6 - Q_ref = create_quadrature(T, degree) - Q = MacroQuadratureRule(TA, Q_ref) + degree = 12 + Q_ref = create_quadrature(cell.construct_subelement(1), degree) + Q = MacroQuadratureRule(split_cell, Q_ref) Q.get_points() + # import matplotlib.pyplot as plt - # plt.scatter(*Q.get_points().T) + # fig = plt.figure() + # sdim = cell.get_spatial_dimension() + # if sdim == 3: + # ax = fig.add_subplot(projection='3d') + # else: + # ax = fig.add_subplot() + # for i, vert in enumerate(split_cell.vertices): + # ax.text(*vert, str(i)) + # + # ax.scatter(*Q.get_points().T) + # ax.axis("equal") # plt.show() From dc809bf627b9cafcd8f3ca699091c7a2caa30466 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 22 Mar 2024 19:34:40 -0500 Subject: [PATCH 08/93] Add child_to_parent --- FIAT/macro.py | 27 +++++++++++++++++++++++++++ test/unit/test_macro.py | 9 +++++++++ 2 files changed, 36 insertions(+) diff --git a/FIAT/macro.py b/FIAT/macro.py index bc4e6bdb8..d4e753c04 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -37,6 +37,16 @@ def xy_to_bary(verts, pts, result=None): return result +def invert_cell_topology(T): + return {dim: {T[dim][entity]: entity for entity in T[dim]} for dim in T} + + +def support_of_many(xs, tol=1.e-12): + # xs is an iterable of tuples (barycentric coordinates) + # produce set of indices where some x is nonzero + return tuple(sorted(set(i for x in xs for (i, xi) in enumerate(x) if abs(xi) > tol))) + + class SplitSimplicialComplex(SimplicialComplex): """Abstract class to implement a split on a Simplex """ @@ -49,6 +59,23 @@ def __init__(self, ref_el): def split_topology(self, ref_el): raise NotImplementedError + def get_child_to_parent(self): + bary = xy_to_bary(numpy.asarray(self.parent.get_vertices()), + numpy.asarray(self.get_vertices())) + mapping = {} + top = self.get_topology() + parent_inv_top = invert_cell_topology(self.parent.get_topology()) + for dim in top: + mapping[dim] = {} + for entity in top[dim]: + facet_ids = top[dim][entity] + facet_coords = bary[list(facet_ids), :] + parent_verts = support_of_many(facet_coords) + parent_dim = len(parent_verts) - 1 + parent_entity = parent_inv_top[parent_dim][parent_verts] + mapping[dim][entity] = (parent_dim, parent_entity) + return mapping + def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity specified by subelement dimension. diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 565af6df3..0c164504a 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -42,6 +42,15 @@ def test_split_make_points(split, cell, degree, variant): assert numpy.allclose(mapped_pts, pts_entity) +@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +def test_split_child_to_parent(split, cell): + split_cell = split(cell) + mapping = split_cell.get_child_to_parent() + print("") + for dim in mapping: + print(mapping[dim]) + + @pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) def test_macro_quadrature(split, cell): split_cell = split(cell) From 4a079fe4fbe1d052c08b97136518d6bdde2838d8 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 24 Mar 2024 16:53:26 -0500 Subject: [PATCH 09/93] First stab at piecewise polynomial ExpansionSets on simplicial complexes --- FIAT/expansions.py | 74 ++++++++++++++++++++++++++++++++++------------ FIAT/macro.py | 65 +++++++++++----------------------------- 2 files changed, 72 insertions(+), 67 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 854855d8e..d80033357 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -223,6 +223,15 @@ def xi_tetrahedron(eta): return xi1, xi2, xi3 +def apply_mapping(A, b, pts): + if isinstance(pts, numpy.ndarray) and len(pts.shape) == 2: + return numpy.dot(A, pts) + b[:, None] + else: + m1, m2 = A.shape + return [sum((A[i, j] * pts[j] for j in range(m2)), b[i]) + for i in range(m1)] + + class ExpansionSet(object): def __new__(cls, *args, **kwargs): """Returns an ExpansionSet instance appopriate for the given @@ -244,12 +253,15 @@ def __new__(cls, *args, **kwargs): def __init__(self, ref_el, scale=None, variant=None): self.ref_el = ref_el self.variant = variant - dim = ref_el.get_spatial_dimension() - self.base_ref_el = reference_element.default_simplex(dim) - v1 = ref_el.get_vertices() + sd = ref_el.get_spatial_dimension() + top = ref_el.get_topology() + self.base_ref_el = reference_element.default_simplex(sd) v2 = self.base_ref_el.get_vertices() - self.A, self.b = reference_element.make_affine_mapping(v1, v2) - self.mapping = lambda x: numpy.dot(self.A, x) + self.b + self.affine_mappings = [reference_element.make_affine_mapping( + ref_el.get_vertices_of_subcomplex(top[sd][cell]), v2) + for cell in top[sd]] + # self.mapping = lambda x: numpy.dot(self.A, x) + self.b + self._dmats_cache = {} if scale is None: scale = math.sqrt(1.0 / self.base_ref_el.volume()) @@ -262,22 +274,45 @@ def __init__(self, ref_el, scale=None, variant=None): self.scale = scale def get_num_members(self, n): - D = self.ref_el.get_spatial_dimension() - return math.comb(n + D, D) - - def _mapping(self, pts): - if isinstance(pts, numpy.ndarray) and len(pts.shape) == 2: - return numpy.dot(self.A, pts) + self.b[:, None] + sd = self.ref_el.get_spatial_dimension() + top = self.ref_el.get_topology() + if len(top[sd]) == 1: + return math.comb(n + sd, sd) + else: + # TODO macro elements + raise NotImplementedError + + def get_cell_node_map(self, n): + sd = self.ref_el.get_spatial_dimension() + top = self.ref_el.get_topology() + if len(top[sd]) == 1: + return (slice(None, None),) + else: + # TODO macro elements + raise NotImplementedError + + def get_point_cell_map(self, pts): + sd = self.ref_el.get_spatial_dimension() + top = self.ref_el.get_topology() + if len(top[sd]) == 1: + return (slice(None, None),) else: - m1, m2 = self.A.shape - return [sum((self.A[i, j] * pts[j] for j in range(m2)), self.b[i]) - for i in range(m1)] + # TODO macro elements + raise NotImplementedError def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point. """ - D = self.ref_el.get_spatial_dimension() - return dubiner_recurrence(D, n, order, self._mapping(pts), self.A, self.scale, variant=self.variant) + sd = self.ref_el.get_spatial_dimension() + cell_node_map = self.get_cell_node_map(n) + point_cell_map = self.get_point_cell_map(pts) + for ibfs, ipts, (A, b) in zip(cell_node_map, point_cell_map, self.affine_mappings): + # TODO indirection + # NOTE results is a tuple of tabulations of derivatives up to the given order + ref_pts = apply_mapping(A, b, pts) + results = dubiner_recurrence(sd, n, order, ref_pts, A, + self.scale, variant=self.variant) + return results def get_dmats(self, degree): """Returns a numpy array with the expansion coefficients dmat[k, j, i] @@ -380,10 +415,11 @@ def __init__(self, ref_el, **kwargs): def _tabulate(self, n, pts, order=0): """Returns a tuple of (vals, derivs) such that vals[i,j] = phi_i(pts[j]), derivs[i,j] = D vals[i,j].""" - if self.variant is not None: + if self.variant is not None or len(self.affine_mappings) > 1: return super(LineExpansionSet, self)._tabulate(n, pts, order=order) - xs = self._mapping(pts).T + A, b = self.affine_mappings[0] + xs = apply_mapping(A, b, pts).T results = [] scale = self.scale * numpy.sqrt(2 * numpy.arange(n+1) + 1) for k in range(order+1): @@ -392,7 +428,7 @@ def _tabulate(self, n, pts, order=0): v[k:] = jacobi.eval_jacobi_batch(k, k, n-k, xs) for p in range(n + 1): v[p] *= scale[p] - scale[p] *= 0.5 * (p + k + 1) * self.A[0, 0] + scale[p] *= 0.5 * (p + k + 1) * A[0, 0] shape = v.shape shape = shape[:1] + (1,) * k + shape[1:] results.append(v.reshape(shape)) diff --git a/FIAT/macro.py b/FIAT/macro.py index d4e753c04..99f2aaa83 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -1,7 +1,7 @@ import copy import numpy +from itertools import chain from FIAT.reference_element import make_lattice, lattice_iter, SimplicialComplex -from FIAT.finite_element import FiniteElement from FIAT.quadrature import QuadratureRule, FacetQuadratureRule @@ -9,12 +9,7 @@ def bary_to_xy(verts, bary, result=None): # verts is (sdim + 1) x sdim so verts[i, :] is i:th vertex # bary is [npts, sdim + 1] # result is [npts, sdim] - - if result is None: - return bary @ verts - else: - numpy.dot(bary, verts, out=result) - return result + return numpy.dot(bary, verts, out=result) def xy_to_bary(verts, pts, result=None): @@ -37,14 +32,14 @@ def xy_to_bary(verts, pts, result=None): return result -def invert_cell_topology(T): - return {dim: {T[dim][entity]: entity for entity in T[dim]} for dim in T} +def facet_support(facet_coords, tol=1.e-12): + # facet_coords is an iterable of tuples (barycentric coordinates) + # return vertex ids where some x is nonzero + return tuple(sorted(set(i for x in facet_coords for (i, xi) in enumerate(x) if abs(xi) > tol))) -def support_of_many(xs, tol=1.e-12): - # xs is an iterable of tuples (barycentric coordinates) - # produce set of indices where some x is nonzero - return tuple(sorted(set(i for x in xs for (i, xi) in enumerate(x) if abs(xi) > tol))) +def invert_cell_topology(T): + return {dim: {T[dim][entity]: entity for entity in T[dim]} for dim in T} class SplitSimplicialComplex(SimplicialComplex): @@ -62,19 +57,19 @@ def split_topology(self, ref_el): def get_child_to_parent(self): bary = xy_to_bary(numpy.asarray(self.parent.get_vertices()), numpy.asarray(self.get_vertices())) - mapping = {} top = self.get_topology() parent_inv_top = invert_cell_topology(self.parent.get_topology()) + child_to_parent = {} for dim in top: - mapping[dim] = {} + child_to_parent[dim] = {} for entity in top[dim]: facet_ids = top[dim][entity] facet_coords = bary[list(facet_ids), :] - parent_verts = support_of_many(facet_coords) + parent_verts = facet_support(facet_coords) parent_dim = len(parent_verts) - 1 parent_entity = parent_inv_top[parent_dim][parent_verts] - mapping[dim][entity] = (parent_dim, parent_entity) - return mapping + child_to_parent[dim][entity] = (parent_dim, parent_entity) + return child_to_parent def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity @@ -120,7 +115,7 @@ def split_topology(self, ref_el): # Place a new edge when the two lattice multiindices are at Manhattan distance < 3, # this connects the midpoints of edges within a face # Only include diagonal edges that are parallel to the simplex edges, - # we take the diagonal that goes through vertices of the same "polynomial degree" + # we take the diagonal that goes through vertices of the same multiindex sum cur = 0 distance = lambda x, y: sum(abs(b-a) for a, b in zip(x, y)) for j, v1 in enumerate(lattice_iter(0, 3, sd)): @@ -134,16 +129,11 @@ def split_topology(self, ref_el): new_topology[1][cur] = (1, 8) # Get an adjacency list for each vertex - adjacency = {} - for v in new_topology[0]: - cur_neigh = [] - for entity in new_topology[1]: - if v in new_topology[1][entity]: - cur_neigh.extend(new_topology[1][entity]) - adjacency[v] = set(cur_neigh) + edges = new_topology[1].values() + adjacency = {v: set(chain.from_iterable(verts for verts in edges if v in verts)) for v in new_topology[0]} # Complete the higher dimensional facets by appending a vertex - # adjacent to the vertices of co-dimension 1 facets + # adjacent to the vertices of codimension 1 facets for dim in range(2, sd+1): entities = [] for entity in new_topology[dim-1]: @@ -151,31 +141,10 @@ def split_topology(self, ref_el): for v in range(min(facet)): if set(facet) < adjacency[v]: entities.append((v,) + facet) - new_topology[dim] = dict(enumerate(entities)) return new_verts, new_topology -class MacroElement(FiniteElement): - """ - A macro element built from a base finite element on a split of the reference cell - """ - - def __init__(self, element, split): - ref_el = element.get_reference_element() - dual = None - order = element.get_order() - formdegree = element.get_formdegree() - mapping = element._mapping - self.element = element - self.cell_complex = split(ref_el) - super(MacroElement, self).__init__(ref_el, dual, order, formdegree=formdegree, mapping=mapping) - - def tabulate(self, order, points, entity=None): - raise NotImplementedError("Wait for it") - # tabulate the reference element on each sub-cell and scatter with the local to global mapping - - class MacroQuadratureRule(QuadratureRule): def __init__(self, cell_complex, Q_ref): From e224ada176c1eb8c5b6a87a1b0a6d14390681820 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Mon, 25 Mar 2024 10:12:40 -0500 Subject: [PATCH 10/93] ExpansionSet: add indirection --- FIAT/expansions.py | 14 +++++++------- FIAT/macro.py | 5 +++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index d80033357..b9d3f5666 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -260,8 +260,6 @@ def __init__(self, ref_el, scale=None, variant=None): self.affine_mappings = [reference_element.make_affine_mapping( ref_el.get_vertices_of_subcomplex(top[sd][cell]), v2) for cell in top[sd]] - # self.mapping = lambda x: numpy.dot(self.A, x) + self.b - self._dmats_cache = {} if scale is None: scale = math.sqrt(1.0 / self.base_ref_el.volume()) @@ -306,12 +304,14 @@ def _tabulate(self, n, pts, order=0): sd = self.ref_el.get_spatial_dimension() cell_node_map = self.get_cell_node_map(n) point_cell_map = self.get_point_cell_map(pts) + nphis = self.get_num_members(n) + results = tuple(numpy.zeros((nphis,) + (sd, )*k + pts.shape[1:]) for k in range(order+1)) for ibfs, ipts, (A, b) in zip(cell_node_map, point_cell_map, self.affine_mappings): - # TODO indirection - # NOTE results is a tuple of tabulations of derivatives up to the given order - ref_pts = apply_mapping(A, b, pts) - results = dubiner_recurrence(sd, n, order, ref_pts, A, - self.scale, variant=self.variant) + ref_pts = apply_mapping(A, b, pts[ipts]) + phis = dubiner_recurrence(sd, n, order, ref_pts, A, + self.scale, variant=self.variant) + for result, phi in zip(results, phis): + result[ibfs, ..., ipts] = phi return results def get_dmats(self, degree): diff --git a/FIAT/macro.py b/FIAT/macro.py index 99f2aaa83..85061b891 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -125,12 +125,13 @@ def split_topology(self, ref_el): cur = cur + 1 if sd == 3: # Cut the octahedron - # FIXME do this more generalically + # FIXME do this more generically new_topology[1][cur] = (1, 8) # Get an adjacency list for each vertex edges = new_topology[1].values() - adjacency = {v: set(chain.from_iterable(verts for verts in edges if v in verts)) for v in new_topology[0]} + adjacency = {v: set(chain.from_iterable(verts for verts in edges if v in verts)) + for v in new_topology[0]} # Complete the higher dimensional facets by appending a vertex # adjacent to the vertices of codimension 1 facets From 43bab790d4ef7950a75d76b0bf52b61bc2445fe6 Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Mon, 25 Mar 2024 10:49:54 -0500 Subject: [PATCH 11/93] Added plausible macro element constructor that builds entity_ids and cell_node_map --- FIAT/macro.py | 63 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/FIAT/macro.py b/FIAT/macro.py index 99f2aaa83..f0d6f557c 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -162,3 +162,66 @@ def __init__(self, cell_complex, Q_ref): pts = tuple(pts) wts = tuple(wts) super(MacroQuadratureRule, self).__init__(ref_el, pts, wts) + + +class MacroElement(): + + def __init__(self, finite_element, splitting): + ref_el = finite_element.ref_el + sdim = ref_el.get_spatial_dimension() + sc = splitting(ref_el) + + ref_eids = finite_element.entity_dofs() + ndofs_per_dim = {d: len(ref_eids[d][0]) for d in ref_eids} + + sc_facet_to_dofs = {} + + sc_t = sc.topology + + # Enumerate dofs, attach to complex facets + dof_cur = 0 + for dim in sc_t: + for facet_id in sc_t[dim]: + facet = sc_t[dim][facet_id] + ndof_cur = ndofs_per_dim[dim] + sc_facet_to_dofs[facet] = (dof_cur, dof_cur + ndof_cur) + dof_cur += ndof_cur + + # cell_node_map + num_cells = len(sc_t[dim]) + dofs_per_cell = finite_element.space_dimension() + + # This is used in evaluation. + cell_node_map = numpy.zeros((num_cells, dofs_per_cell), int) + conn = sc.connectivity + for cid, in conn[(sdim, sdim)]: + for dim in range(sdim+1): + for ref_fid, fid in enumerate(conn[(sdim, dim)][cid]): + facet = sc_t[dim][fid] + dofs = list(range(*sc_facet_to_dofs[facet])) + ref_dofs = ref_eids[dim][ref_fid] + cell_node_map[cid, ref_dofs] = dofs + + # collect dofs from complex onto facets of main cell + # This needs to go into a dual something or other somewhere + c2p = sc.get_child_to_parent() + ref_t = ref_el.topology + entity_ids = {d: {f: [] for f in ref_t[d]} for d in ref_t} + for dim in c2p: + for fid in c2p[dim]: + (parent_dim, parent_id) = c2p[dim][fid] + dofs_cur = list(range(*sc_facet_to_dofs[sc_t[dim][fid]])) + entity_ids[parent_dim][parent_id].extend(dofs_cur) + + print(f"Cell node map:\n{cell_node_map}") + print(f"DOFs per facet in reference cell:\n{entity_ids}") + + +if __name__ == "__main__": + from reference_element import ufc_simplex + from lagrange import Lagrange + K = ufc_simplex(2) + L = Lagrange(K, 3) + + ML = MacroElement(L, AlfeldSplit) + From bf060aed5a26ee6f868a0b9b572195d98d2582ba Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 26 Mar 2024 09:19:40 -0500 Subject: [PATCH 12/93] Some infrastructure for C0 ExpansionSet --- FIAT/expansions.py | 105 +++++++++++++++++++++++++++++++++++++-------- FIAT/macro.py | 10 ++--- 2 files changed, 92 insertions(+), 23 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index b9d3f5666..4accc07db 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -281,38 +281,42 @@ def get_num_members(self, n): raise NotImplementedError def get_cell_node_map(self, n): - sd = self.ref_el.get_spatial_dimension() - top = self.ref_el.get_topology() - if len(top[sd]) == 1: - return (slice(None, None),) - else: - # TODO macro elements - raise NotImplementedError + return make_cell_node_map(self.ref_el, n, self.variant) def get_point_cell_map(self, pts): sd = self.ref_el.get_spatial_dimension() top = self.ref_el.get_topology() if len(top[sd]) == 1: + if len(pts.shape) == 1: + return tuple() return (slice(None, None),) else: # TODO macro elements + return (slice(None, None),) raise NotImplementedError def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point. """ - sd = self.ref_el.get_spatial_dimension() - cell_node_map = self.get_cell_node_map(n) + phis = [] point_cell_map = self.get_point_cell_map(pts) - nphis = self.get_num_members(n) - results = tuple(numpy.zeros((nphis,) + (sd, )*k + pts.shape[1:]) for k in range(order+1)) - for ibfs, ipts, (A, b) in zip(cell_node_map, point_cell_map, self.affine_mappings): + sd = self.ref_el.get_spatial_dimension() + for ipts, (A, b) in zip(point_cell_map, self.affine_mappings): ref_pts = apply_mapping(A, b, pts[ipts]) - phis = dubiner_recurrence(sd, n, order, ref_pts, A, - self.scale, variant=self.variant) - for result, phi in zip(results, phis): - result[ibfs, ..., ipts] = phi - return results + phis.append(dubiner_recurrence(sd, n, order, ref_pts, A, + self.scale, variant=self.variant)) + if len(self.affine_mappings) == 1: + return phis[0] + + results = [] + cell_node_map = self.get_cell_node_map(n) + num_phis = numpy.max(cell_node_map) + 1 + for k in range(order+1): + result = numpy.zeros((num_phis,) + (sd,) * k + pts.shape[1:]) + for ibfs, ipts, phi in zip(cell_node_map, point_cell_map, phis): + result[ibfs, ..., ipts] = phi[k] + results.append(result) + return tuple(results) def get_dmats(self, degree): """Returns a numpy array with the expansion coefficients dmat[k, j, i] @@ -467,3 +471,70 @@ def polynomial_dimension(ref_el, degree): return max(0, (degree + 1) * (degree + 2) * (degree + 3) // 6) else: raise ValueError("Unknown reference element type.") + + +def support(alpha): + return tuple(i for (i, ai) in enumerate(alpha) if abs(ai) > 0) + + +def invert_cell_topology(T): + return {T[dim][entity]: (dim, entity) for dim in T for entity in T[dim]} + + + +def make_entity_ids(ref_el, n, variant): + top = ref_el.get_topology() + sd = ref_el.get_spatial_dimension() + entity_ids = {dim: {entity: [] for entity in top[dim]} for dim in top} + if variant == "integral": + idx = (lambda p: p, morton_index2, morton_index3)[sd-1] + inv_top = invert_cell_topology(top) + if len(top[sd]) == 1: + for alpha in reference_element.lattice_iter(0, n+1, sd): + beta = list(alpha) + for k in range(len(beta)): + m = n - sum(beta) + if beta[k] == 0: + beta[k] = m + + beta += [n - sum(beta)] + dim, entity = inv_top[support(beta)] + entity_ids[dim][entity].append(idx(*alpha)) + else: + # C0 numbering + cur = 0 + for dim in top: + dofs = math.comb(n - 1, dim) + for entity in top[dim]: + entity_ids[dim][entity] = list(range(cur, cur + dofs)) + cur += dofs + else: + # DG numbering + cur = 0 + dofs = math.comb(n + sd, sd) + for entity in top[sd]: + entity_ids[sd][entity] = list(range(cur, cur + dofs)) + cur += dofs + + print(entity_ids) + return entity_ids + + +def make_cell_node_map(ref_el, degree, variant): + top = ref_el.get_topology() + sd = ref_el.get_spatial_dimension() + + ref_entity_ids = make_entity_ids(ref_el.construct_subelement(sd), degree, variant) + entity_ids = make_entity_ids(ref_el, degree, variant) + + num_cells = len(top[sd]) + dofs_per_cell = sum(len(ref_entity_ids[dim][entity]) + for dim in ref_entity_ids for entity in ref_entity_ids[dim]) + cell_node_map = numpy.zeros((num_cells, dofs_per_cell), dtype=int) + conn = ref_el.connectivity + for cell in top[sd]: + for dim in range(sd+1): + for ref_entity, entity in enumerate(conn[(sd, dim)][cell]): + ref_dofs = ref_entity_ids[dim][ref_entity] + cell_node_map[cell, ref_dofs] = entity_ids[dim][entity] + return cell_node_map diff --git a/FIAT/macro.py b/FIAT/macro.py index 063e0dfe6..1f39f3688 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -131,7 +131,7 @@ def split_topology(self, ref_el): # Get an adjacency list for each vertex edges = new_topology[1].values() adjacency = {v: set(chain.from_iterable(verts for verts in edges if v in verts)) - for v in new_topology[0]} + for v in new_topology[0]} # Complete the higher dimensional facets by appending a vertex # adjacent to the vertices of codimension 1 facets @@ -174,7 +174,7 @@ def __init__(self, finite_element, splitting): ref_eids = finite_element.entity_dofs() ndofs_per_dim = {d: len(ref_eids[d][0]) for d in ref_eids} - + sc_facet_to_dofs = {} sc_t = sc.topology @@ -193,7 +193,7 @@ def __init__(self, finite_element, splitting): dofs_per_cell = finite_element.space_dimension() # This is used in evaluation. - cell_node_map = numpy.zeros((num_cells, dofs_per_cell), int) + cell_node_map = numpy.zeros((num_cells, dofs_per_cell), int) conn = sc.connectivity for cid, in conn[(sdim, sdim)]: for dim in range(sdim+1): @@ -216,13 +216,11 @@ def __init__(self, finite_element, splitting): print(f"Cell node map:\n{cell_node_map}") print(f"DOFs per facet in reference cell:\n{entity_ids}") - + if __name__ == "__main__": from reference_element import ufc_simplex from lagrange import Lagrange K = ufc_simplex(2) L = Lagrange(K, 3) - ML = MacroElement(L, AlfeldSplit) - From c0117ab7b4e3bb776f07fa389580fb594deb4c84 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 26 Mar 2024 14:25:44 -0500 Subject: [PATCH 13/93] C0 basis is sorted by entity --- FIAT/expansions.py | 176 +++++++++++++++++++---------------------- FIAT/polynomial_set.py | 13 ++- 2 files changed, 85 insertions(+), 104 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 4accc07db..5e2dca965 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -62,6 +62,55 @@ def jacobi_factors(x, y, z, dx, dy, dz): return fa, fb, fc, dfa, dfb, dfc +def C0_basis(dim, n, phi): + idx = (lambda p: p, morton_index2, morton_index3)[dim-1] + # recover facet bubbles + icur = 0 + phi[icur] *= -1 + for inext in range(1, dim+1): + phi[icur] -= phi[inext] + + if dim == 2: + for i in range(2, n+1): + phi[idx(0, i)] -= phi[idx(1, i-1)] + + elif dim == 3: + for i in range(2, n+1): + for j in range(0, n+1-i): + phi[idx(0, i, j)] -= phi[idx(1, i-1, j)] + icur = idx(0, 0, i) + phi[icur] -= phi[idx(1, 0, i-1)] + phi[icur] -= phi[idx(0, 1, i-1)] + + # reorder by dimension and entity on the reference simplex + dofs = list(range(dim+1)) + if dim == 1: + dofs.extend(range(2, n+1)) + elif dim == 2: + dofs.extend(idx(1, i-1) for i in range(2, n+1)) + dofs.extend(idx(0, i) for i in range(2, n+1)) + dofs.extend(idx(i, 0) for i in range(2, n+1)) + + dofs.extend(idx(i, j) for j in range(1, n+1) for i in range(2, n-j+1)) + else: + dofs.extend(idx(0, 1, i-1) for i in range(2, n+1)) + dofs.extend(idx(1, 0, i-1) for i in range(2, n+1)) + dofs.extend(idx(1, i-1, 0) for i in range(2, n+1)) + dofs.extend(idx(0, 0, i) for i in range(2, n+1)) + dofs.extend(idx(0, i, 0) for i in range(2, n+1)) + dofs.extend(idx(i, 0, 0) for i in range(2, n+1)) + + dofs.extend(idx(1, i-1, j) for j in range(1, n+1) for i in range(2, n-j+1)) + dofs.extend(idx(0, i, j) for j in range(1, n+1) for i in range(2, n-j+1)) + dofs.extend(idx(i, 0, j) for j in range(1, n+1) for i in range(2, n-j+1)) + dofs.extend(idx(i, j, 0) for j in range(1, n+1) for i in range(2, n-j+1)) + + dofs.extend(idx(i, j, k) for k in range(1, n+1) for j in range(1, n-k+1) for i in range(2, n-j-k+1)) + + result = [phi[i] for i in dofs] + return result + + def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): """Tabulate a Dubiner expansion set using the recurrence from (Kirby 2010). @@ -171,38 +220,6 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): scale = math.sqrt(norm2) for result in results: result[icur] *= scale - - # recover facet bubbles - if variant == "integral": - icur = 0 - for result in results: - result[icur] *= -1 - for inext in range(1, dim+1): - for result in results: - result[icur] -= result[inext] - - if dim == 2: - for i in range(2, n+1): - icur = idx(0, i) - iprev = idx(1, i-1) - for result in results: - result[icur] -= result[iprev] - - elif dim == 3: - for i in range(2, n+1): - for j in range(0, n+1-i): - icur = idx(0, i, j) - iprev = idx(1, i-1, j) - for result in results: - result[icur] -= result[iprev] - - icur = idx(0, 0, i) - iprev0 = idx(1, 0, i-1) - iprev1 = idx(0, 1, i-1) - for result in results: - result[icur] -= result[iprev0] - result[icur] -= result[iprev1] - return results @@ -234,7 +251,7 @@ def apply_mapping(A, b, pts): class ExpansionSet(object): def __new__(cls, *args, **kwargs): - """Returns an ExpansionSet instance appopriate for the given + """Returns an ExpansionSet instance appropriate for the given reference element.""" if cls is not ExpansionSet: return super(ExpansionSet, cls).__new__(cls) @@ -272,16 +289,10 @@ def __init__(self, ref_el, scale=None, variant=None): self.scale = scale def get_num_members(self, n): - sd = self.ref_el.get_spatial_dimension() - top = self.ref_el.get_topology() - if len(top[sd]) == 1: - return math.comb(n + sd, sd) - else: - # TODO macro elements - raise NotImplementedError + return polynomial_dimension(self.ref_el, n, variant=self.variant) def get_cell_node_map(self, n): - return make_cell_node_map(self.ref_el, n, self.variant) + return polynomial_cell_node_map(self.ref_el, n, self.variant) def get_point_cell_map(self, pts): sd = self.ref_el.get_spatial_dimension() @@ -303,8 +314,11 @@ def _tabulate(self, n, pts, order=0): sd = self.ref_el.get_spatial_dimension() for ipts, (A, b) in zip(point_cell_map, self.affine_mappings): ref_pts = apply_mapping(A, b, pts[ipts]) - phis.append(dubiner_recurrence(sd, n, order, ref_pts, A, - self.scale, variant=self.variant)) + cur_phis = dubiner_recurrence(sd, n, order, ref_pts, A, + self.scale, variant=self.variant) + if self.variant == "integral": + cur_phis = tuple(C0_basis(sd, n, cur_phi) for cur_phi in cur_phis) + phis.append(cur_phis) if len(self.affine_mappings) == 1: return phis[0] @@ -456,76 +470,46 @@ def __init__(self, ref_el, **kwargs): super(TetrahedronExpansionSet, self).__init__(ref_el, **kwargs) -def polynomial_dimension(ref_el, degree): +def polynomial_dimension(ref_el, n, variant=None): """Returns the dimension of the space of polynomials of degree no greater than degree on the reference element.""" if ref_el.get_shape() == reference_element.POINT: if degree > 0: raise ValueError("Only degree zero polynomials supported on point elements.") return 1 - elif ref_el.get_shape() == reference_element.LINE: - return max(0, degree + 1) - elif ref_el.get_shape() == reference_element.TRIANGLE: - return max((degree + 1) * (degree + 2) // 2, 0) - elif ref_el.get_shape() == reference_element.TETRAHEDRON: - return max(0, (degree + 1) * (degree + 2) * (degree + 3) // 6) + top = ref_el.get_topology() + if variant == "integral": + space_dimension = sum(math.comb(n - 1, dim) * len(top[dim]) for dim in top) else: - raise ValueError("Unknown reference element type.") - - -def support(alpha): - return tuple(i for (i, ai) in enumerate(alpha) if abs(ai) > 0) - + dim = ref_el.get_spatial_dimension() + space_dimension = math.comb(n + dim, dim) * len(top[dim]) + return space_dimension -def invert_cell_topology(T): - return {T[dim][entity]: (dim, entity) for dim in T for entity in T[dim]} - - -def make_entity_ids(ref_el, n, variant): +def polynomial_entity_ids(ref_el, n, variant=None): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() - entity_ids = {dim: {entity: [] for entity in top[dim]} for dim in top} - if variant == "integral": - idx = (lambda p: p, morton_index2, morton_index3)[sd-1] - inv_top = invert_cell_topology(top) - if len(top[sd]) == 1: - for alpha in reference_element.lattice_iter(0, n+1, sd): - beta = list(alpha) - for k in range(len(beta)): - m = n - sum(beta) - if beta[k] == 0: - beta[k] = m - - beta += [n - sum(beta)] - dim, entity = inv_top[support(beta)] - entity_ids[dim][entity].append(idx(*alpha)) + entity_ids = {} + cur = 0 + for dim in sorted(top): + if variant == "integral": + # CG numbering + dofs = math.comb(n - 1, dim) else: - # C0 numbering - cur = 0 - for dim in top: - dofs = math.comb(n - 1, dim) - for entity in top[dim]: - entity_ids[dim][entity] = list(range(cur, cur + dofs)) - cur += dofs - else: - # DG numbering - cur = 0 - dofs = math.comb(n + sd, sd) - for entity in top[sd]: - entity_ids[sd][entity] = list(range(cur, cur + dofs)) + # DG numbering + dofs = math.comb(n + dim, dim) if dim == sd else 0 + entity_ids[dim] = {} + for entity in sorted(top[dim]): + entity_ids[dim][entity] = list(range(cur, cur + dofs)) cur += dofs - - print(entity_ids) return entity_ids -def make_cell_node_map(ref_el, degree, variant): +def polynomial_cell_node_map(ref_el, n, variant=None): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() - - ref_entity_ids = make_entity_ids(ref_el.construct_subelement(sd), degree, variant) - entity_ids = make_entity_ids(ref_el, degree, variant) + ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, variant) + entity_ids = polynomial_entity_ids(ref_el, n, variant) num_cells = len(top[sd]) dofs_per_cell = sum(len(ref_entity_ids[dim][entity]) @@ -533,7 +517,7 @@ def make_cell_node_map(ref_el, degree, variant): cell_node_map = numpy.zeros((num_cells, dofs_per_cell), dtype=int) conn = ref_el.connectivity for cell in top[sd]: - for dim in range(sd+1): + for dim in top: for ref_entity, entity in enumerate(conn[(sd, dim)][cell]): ref_dofs = ref_entity_ids[dim][ref_entity] cell_node_map[cell, ref_dofs] = entity_ids[dim][entity] diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 64271edd5..0abe2ca01 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -250,17 +250,14 @@ def make_bubbles(ref_el, degree, shape=()): """ dim = ref_el.get_spatial_dimension() poly_set = ONPolynomialSet(ref_el, degree, shape=shape, scale="L2 piola", variant="integral") - degrees = chain(range(dim + 1, degree+1, 2), range(dim + 2, degree+1, 2)) - if dim == 1: + # odd / even reordering + degrees = chain(range(dim+1, degree+1, 2), range(dim+2, degree+1, 2)) indices = list(degrees) else: - idx = (expansions.morton_index2, expansions.morton_index3)[dim-2] - indices = [] - for p in degrees: - for alpha in mis(dim, p): - if alpha[0] > 1 and min(alpha[1:]) > 0: - indices.append(idx(*alpha)) + idofs = expansions.polynomial_dimension(ref_el, degree-dim-1) + ndofs = poly_set.get_num_members() + indices = list(range(ndofs-idofs, ndofs)) if shape != (): ncomp = numpy.prod(shape) From b545b6f63f3d98227d722991c911a4e8276b50d7 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 26 Mar 2024 14:33:46 -0500 Subject: [PATCH 14/93] fix tests --- FIAT/expansions.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 5e2dca965..7fac3fc3b 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -289,9 +289,10 @@ def __init__(self, ref_el, scale=None, variant=None): self.scale = scale def get_num_members(self, n): - return polynomial_dimension(self.ref_el, n, variant=self.variant) + return polynomial_dimension(self.ref_el, n, self.variant) def get_cell_node_map(self, n): + # TODO cache by degree return polynomial_cell_node_map(self.ref_el, n, self.variant) def get_point_cell_map(self, pts): @@ -299,7 +300,7 @@ def get_point_cell_map(self, pts): top = self.ref_el.get_topology() if len(top[sd]) == 1: if len(pts.shape) == 1: - return tuple() + return (tuple(),) return (slice(None, None),) else: # TODO macro elements @@ -474,7 +475,7 @@ def polynomial_dimension(ref_el, n, variant=None): """Returns the dimension of the space of polynomials of degree no greater than degree on the reference element.""" if ref_el.get_shape() == reference_element.POINT: - if degree > 0: + if n > 0: raise ValueError("Only degree zero polynomials supported on point elements.") return 1 top = ref_el.get_topology() From 6f9bbe61be1975c01fd48ec71e7b9329acec0182 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 26 Mar 2024 15:38:54 -0500 Subject: [PATCH 15/93] ExpansionSet: Point to cell mapping and local to global mapping --- FIAT/expansions.py | 70 ++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 58 insertions(+), 12 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 7fac3fc3b..1bf6cc465 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -278,6 +278,7 @@ def __init__(self, ref_el, scale=None, variant=None): ref_el.get_vertices_of_subcomplex(top[sd][cell]), v2) for cell in top[sd]] self._dmats_cache = {} + self._cell_node_map_cache = {} if scale is None: scale = math.sqrt(1.0 / self.base_ref_el.volume()) elif isinstance(scale, str): @@ -292,10 +293,13 @@ def get_num_members(self, n): return polynomial_dimension(self.ref_el, n, self.variant) def get_cell_node_map(self, n): - # TODO cache by degree - return polynomial_cell_node_map(self.ref_el, n, self.variant) + try: + return self._cell_node_map_cache[n] + except KeyError: + cell_node_map = polynomial_cell_node_map(self.ref_el, n, self.variant) + return self._cell_node_map_cache.setdefault(n, cell_node_map) - def get_point_cell_map(self, pts): + def get_cell_point_map(self, pts): sd = self.ref_el.get_spatial_dimension() top = self.ref_el.get_topology() if len(top[sd]) == 1: @@ -303,18 +307,18 @@ def get_point_cell_map(self, pts): return (tuple(),) return (slice(None, None),) else: - # TODO macro elements - return (slice(None, None),) - raise NotImplementedError + return compute_point_cell_map(self.ref_el, pts) def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point. """ phis = [] - point_cell_map = self.get_point_cell_map(pts) + cell_point_map = self.get_cell_point_map(pts) + print(cell_point_map) + sd = self.ref_el.get_spatial_dimension() - for ipts, (A, b) in zip(point_cell_map, self.affine_mappings): - ref_pts = apply_mapping(A, b, pts[ipts]) + for ipts, (A, b) in zip(cell_point_map, self.affine_mappings): + ref_pts = apply_mapping(A, b, pts[:, ipts]) cur_phis = dubiner_recurrence(sd, n, order, ref_pts, A, self.scale, variant=self.variant) if self.variant == "integral": @@ -327,9 +331,11 @@ def _tabulate(self, n, pts, order=0): cell_node_map = self.get_cell_node_map(n) num_phis = numpy.max(cell_node_map) + 1 for k in range(order+1): - result = numpy.zeros((num_phis,) + (sd,) * k + pts.shape[1:]) - for ibfs, ipts, phi in zip(cell_node_map, point_cell_map, phis): - result[ibfs, ..., ipts] = phi[k] + result = numpy.zeros((num_phis,) + (sd,)*k + pts.shape[1:]) + shape_slices = (slice(None, None),)*k + for ibfs, ipts, phi in zip(cell_node_map, cell_point_map, phis): + indices = (ibfs,) + shape_slices + (ipts,) + result[numpy.ix_(*indices)] = phi[k] results.append(result) return tuple(results) @@ -509,6 +515,7 @@ def polynomial_entity_ids(ref_el, n, variant=None): def polynomial_cell_node_map(ref_el, n, variant=None): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() + ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, variant) entity_ids = polynomial_entity_ids(ref_el, n, variant) @@ -523,3 +530,42 @@ def polynomial_cell_node_map(ref_el, n, variant=None): ref_dofs = ref_entity_ids[dim][ref_entity] cell_node_map[cell, ref_dofs] = entity_ids[dim][entity] return cell_node_map + + +def compute_point_cell_map(ref_el, pts): + top = ref_el.get_topology() + sd = ref_el.get_spatial_dimension() + + bins = [] + ref_vertices = numpy.eye(sd+1, sd) + for entity in top[sd]: + vertices = ref_el.get_vertices_of_subcomplex(top[dim][entity]) + A, b = reference_element.make_affine_mapping(vertices, ref_vertices) + x = numpy.dot(A, pts) + b[:, None] + sx = numpy.sum(x, axis=0) + pts_on_cell = numpy.all(numpy.logical_and(numpy.logical_and(x >= 0, x <= 1), + numpy.logical_and(sx >= 0, sx <= 1)), + axis=0) + bins.append(numpy.where(pts_on_cell)[0]) + return bins + + +if __name__ == "__main__": + from reference_element import ufc_simplex + from quadrature_schemes import create_quadrature + from macro import AlfeldSplit + from macro import MacroQuadratureRule + dim = 3 + K = ufc_simplex(dim) + ref_el = AlfeldSplit(K) + + degree = 2 + Q_ref = create_quadrature(ref_el.construct_subelement(dim), degree) + Q = MacroQuadratureRule(ref_el, Q_ref) + pts = Q.get_points().T + cells = compute_point_cell_map(ref_el, pts) + print(cells) + + U = ExpansionSet(ref_el, variant="integral") + phis = U.tabulate(degree, Q.get_points()) + print(phis) From 8274927b4686f51f54914e30ab90273f4247a712 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 26 Mar 2024 20:03:01 -0500 Subject: [PATCH 16/93] unisolvent P2-iso-P1 --- FIAT/expansions.py | 66 +++++++++++++++++++++++++++--------------- FIAT/lagrange.py | 4 +-- FIAT/macro.py | 33 +++++++++++++-------- FIAT/polynomial_set.py | 10 +++---- 4 files changed, 71 insertions(+), 42 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 1bf6cc465..fc3934da7 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -314,7 +314,6 @@ def _tabulate(self, n, pts, order=0): """ phis = [] cell_point_map = self.get_cell_point_map(pts) - print(cell_point_map) sd = self.ref_el.get_spatial_dimension() for ipts, (A, b) in zip(cell_point_map, self.affine_mappings): @@ -328,8 +327,8 @@ def _tabulate(self, n, pts, order=0): return phis[0] results = [] + num_phis = self.get_num_members(n) cell_node_map = self.get_cell_node_map(n) - num_phis = numpy.max(cell_node_map) + 1 for k in range(order+1): result = numpy.zeros((num_phis,) + (sd,)*k + pts.shape[1:]) shape_slices = (slice(None, None),)*k @@ -516,9 +515,8 @@ def polynomial_cell_node_map(ref_el, n, variant=None): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() - ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, variant) entity_ids = polynomial_entity_ids(ref_el, n, variant) - + ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, variant) num_cells = len(top[sd]) dofs_per_cell = sum(len(ref_entity_ids[dim][entity]) for dim in ref_entity_ids for entity in ref_entity_ids[dim]) @@ -532,40 +530,62 @@ def polynomial_cell_node_map(ref_el, n, variant=None): return cell_node_map -def compute_point_cell_map(ref_el, pts): +def compute_point_cell_map(ref_el, pts, tol=1E-12): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() + low, high = -tol, 1 + tol bins = [] ref_vertices = numpy.eye(sd+1, sd) for entity in top[sd]: - vertices = ref_el.get_vertices_of_subcomplex(top[dim][entity]) + vertices = ref_el.get_vertices_of_subcomplex(top[sd][entity]) A, b = reference_element.make_affine_mapping(vertices, ref_vertices) + A = numpy.vstack((A, numpy.sum(A, axis=0))) + b = numpy.hstack((b, numpy.sum(b, axis=0))) x = numpy.dot(A, pts) + b[:, None] - sx = numpy.sum(x, axis=0) - pts_on_cell = numpy.all(numpy.logical_and(numpy.logical_and(x >= 0, x <= 1), - numpy.logical_and(sx >= 0, sx <= 1)), - axis=0) + + pts_on_cell = numpy.all(numpy.logical_and(x >= low, x <= high), axis=0) bins.append(numpy.where(pts_on_cell)[0]) return bins if __name__ == "__main__": from reference_element import ufc_simplex - from quadrature_schemes import create_quadrature - from macro import AlfeldSplit - from macro import MacroQuadratureRule - dim = 3 + from macro import AlfeldSplit, UniformSplit + from lagrange import Lagrange + from polynomial_set import ONPolynomialSet + dim = 2 K = ufc_simplex(dim) ref_el = AlfeldSplit(K) + ref_el = UniformSplit(K) degree = 2 - Q_ref = create_quadrature(ref_el.construct_subelement(dim), degree) - Q = MacroQuadratureRule(ref_el, Q_ref) - pts = Q.get_points().T - cells = compute_point_cell_map(ref_el, pts) - print(cells) - - U = ExpansionSet(ref_el, variant="integral") - phis = U.tabulate(degree, Q.get_points()) - print(phis) + variant = "integral" + + pts = [] + top = ref_el.get_topology() + for dim in sorted(top): + for entity in sorted(top[dim]): + pts.extend(ref_el.make_points(dim, entity, degree)) + + pt_array = numpy.asarray(pts) + pts_on_cell = pt_array[polynomial_cell_node_map(ref_el, degree, variant)] + print("points\n", pt_array) + print("points\n", pts_on_cell) + + U = ExpansionSet(ref_el, variant=variant, scale="L2 piola") + V = U.tabulate(degree, pts) + print("ExpansionSet\n", V) + + poly_set = ONPolynomialSet(ref_el, degree, variant=variant, scale="L2 piola") + V = poly_set.tabulate(pts)[(0,)*dim] + print("PolySet\n", V) + + istart, iend = 4, 7 + sub = poly_set.take(range(istart, iend)) + V = poly_set.tabulate(pts[istart:iend])[(0,)*dim] + print(f"PolySet at {pts[istart:iend]}\n", V) + + fe = Lagrange(ref_el, degree) + phis = fe.tabulate(0, pts)[(0,)*dim] + print("Lagrange\n", phis) diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index d46c589e7..796461c64 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -48,7 +48,7 @@ class Lagrange(finite_element.CiarletElement): def __init__(self, ref_el, degree, variant="equispaced"): dual = LagrangeDualSet(ref_el, degree, variant=variant) - if ref_el.shape == LINE: + if ref_el.shape == LINE and False: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation points = [] @@ -58,6 +58,6 @@ def __init__(self, ref_el, degree, variant="equispaced"): points.append(pt) poly_set = LagrangePolynomialSet(ref_el, points) else: - poly_set = polynomial_set.ONPolynomialSet(ref_el, degree) + poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant="integral", scale="L2 piola") formdegree = 0 # 0-form super(Lagrange, self).__init__(poly_set, dual, degree, formdegree) diff --git a/FIAT/macro.py b/FIAT/macro.py index 1f39f3688..cda501336 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -46,9 +46,9 @@ class SplitSimplicialComplex(SimplicialComplex): """Abstract class to implement a split on a Simplex """ - def __init__(self, ref_el): + def __init__(self, ref_el, splits=1): self.parent = ref_el - vertices, topology = self.split_topology(ref_el) + vertices, topology = self.split_topology(ref_el, splits=splits) super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology) def split_topology(self, ref_el): @@ -79,32 +79,40 @@ def construct_subelement(self, dimension): """ return self.parent.construct_subelement(dimension) + def get_entity_transform(self, dim, entity): + # This is to trick FiniteElement.tabulate + return self.parent.get_entity_transform(dim, entity) + class AlfeldSplit(SplitSimplicialComplex): - def split_topology(self, ref_el): + def split_topology(self, ref_el, splits=1): + assert splits == 1 sd = ref_el.get_spatial_dimension() - new_topology = copy.deepcopy(ref_el.topology) + top = ref_el.get_topology() + new_topology = copy.deepcopy(top) new_topology[sd] = {} barycenter = ref_el.make_points(sd, 0, sd+1) - new_verts = ref_el.vertices + tuple(barycenter) - new_vert_id = len(ref_el.vertices) + old_verts = ref_el.get_vertices() + new_verts = old_verts + tuple(barycenter) + new_vert_id = len(old_verts) new_topology[0][new_vert_id] = (new_vert_id,) for dim in range(1, sd + 1): offset = len(new_topology[dim]) - for entity, ids in ref_el.topology[dim-1].items(): + for entity, ids in top[dim-1].items(): new_topology[dim][offset+entity] = ids + (new_vert_id,) return new_verts, new_topology class UniformSplit(SplitSimplicialComplex): - def split_topology(self, ref_el): + def split_topology(self, ref_el, splits=1): + depth = splits + 1 sd = ref_el.get_spatial_dimension() old_verts = ref_el.get_vertices() - new_verts = make_lattice(old_verts, 2) + new_verts = make_lattice(old_verts, depth) new_topology = {} new_topology[0] = {i: (i,) for i in range(len(new_verts))} @@ -115,17 +123,18 @@ def split_topology(self, ref_el): # Place a new edge when the two lattice multiindices are at Manhattan distance < 3, # this connects the midpoints of edges within a face # Only include diagonal edges that are parallel to the simplex edges, - # we take the diagonal that goes through vertices of the same multiindex sum + # we take the diagonal that goes through vertices at the same depth cur = 0 distance = lambda x, y: sum(abs(b-a) for a, b in zip(x, y)) - for j, v1 in enumerate(lattice_iter(0, 3, sd)): - for i, v0 in enumerate(lattice_iter(0, 3, sd)): + for j, v1 in enumerate(lattice_iter(0, depth+1, sd)): + for i, v0 in enumerate(lattice_iter(0, depth+1, sd)): if i < j and distance(v0, v1) < 3 and sum(v1) - sum(v0) <= 1: new_topology[1][cur] = (i, j) cur = cur + 1 if sd == 3: # Cut the octahedron # FIXME do this more generically + assert splits == 1 new_topology[1][cur] = (1, 8) # Get an adjacency list for each vertex diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 0abe2ca01..75d82b2c2 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -128,7 +128,7 @@ def __init__(self, ref_el, degree, shape=tuple(), scale=None, variant=None): else: flat_shape = numpy.ravel(shape) num_components = numpy.prod(flat_shape) - num_exp_functions = expansions.polynomial_dimension(ref_el, degree) + num_exp_functions = expansions.polynomial_dimension(ref_el, degree, variant) num_members = num_components * num_exp_functions embedded_degree = degree expansion_set = expansions.ExpansionSet(ref_el, scale=scale, variant=variant) @@ -142,7 +142,7 @@ def __init__(self, ref_el, degree, shape=tuple(), scale=None, variant=None): # use functional's index_iterator function cur_bf = 0 for idx in index_iterator(shape): - n = expansions.polynomial_dimension(ref_el, embedded_degree) + n = expansions.polynomial_dimension(ref_el, embedded_degree, variant) for exp_bf in range(n): cur_idx = (cur_bf, *idx, exp_bf) coeffs[cur_idx] = 1.0 @@ -212,18 +212,18 @@ class ONSymTensorPolynomialSet(PolynomialSet): """ - def __init__(self, ref_el, degree, size=None, scale=None): + def __init__(self, ref_el, degree, size=None, scale=None, variant=None): sd = ref_el.get_spatial_dimension() if size is None: size = sd shape = (size, size) - num_exp_functions = expansions.polynomial_dimension(ref_el, degree) + num_exp_functions = expansions.polynomial_dimension(ref_el, degree, variant) num_components = size * (size + 1) // 2 num_members = num_components * num_exp_functions embedded_degree = degree - expansion_set = expansions.ExpansionSet(ref_el, scale=scale) + expansion_set = expansions.ExpansionSet(ref_el, scale=scale, variant=variant) # set up coefficients for symmetric tensors coeffs_shape = (num_members, *shape, num_exp_functions) From e6ca9313fcac7278288abde604642c9957dd1f87 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 26 Mar 2024 22:36:26 -0500 Subject: [PATCH 17/93] Attempt to hack coordinate mapping orientation --- FIAT/expansions.py | 43 ++++++++++++++++++++++--------------------- FIAT/macro.py | 4 +--- 2 files changed, 23 insertions(+), 24 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index fc3934da7..691ee8645 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -273,10 +273,16 @@ def __init__(self, ref_el, scale=None, variant=None): sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() self.base_ref_el = reference_element.default_simplex(sd) - v2 = self.base_ref_el.get_vertices() - self.affine_mappings = [reference_element.make_affine_mapping( - ref_el.get_vertices_of_subcomplex(top[sd][cell]), v2) - for cell in top[sd]] + base_verts = self.base_ref_el.get_vertices() + + def oriented_affine_mapping(cell): + verts = ref_el.get_vertices_of_subcomplex(top[sd][cell]) + A, b = reference_element.make_affine_mapping(verts, base_verts) + # FIXME determine orientation to make Alfeld work + # A, b = reference_element.make_affine_mapping(verts, base_verts[::-1]) + return A, b + + self.affine_mappings = [oriented_affine_mapping(cell) for cell in top[sd]] self._dmats_cache = {} self._cell_node_map_cache = {} if scale is None: @@ -299,21 +305,11 @@ def get_cell_node_map(self, n): cell_node_map = polynomial_cell_node_map(self.ref_el, n, self.variant) return self._cell_node_map_cache.setdefault(n, cell_node_map) - def get_cell_point_map(self, pts): - sd = self.ref_el.get_spatial_dimension() - top = self.ref_el.get_topology() - if len(top[sd]) == 1: - if len(pts.shape) == 1: - return (tuple(),) - return (slice(None, None),) - else: - return compute_point_cell_map(self.ref_el, pts) - def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point. """ phis = [] - cell_point_map = self.get_cell_point_map(pts) + cell_point_map = compute_cell_point_map(self.ref_el, pts) sd = self.ref_el.get_spatial_dimension() for ipts, (A, b) in zip(cell_point_map, self.affine_mappings): @@ -391,7 +387,7 @@ def tabulate(self, n, pts): if len(pts) == 0: return numpy.array([]) results, = self._tabulate(n, numpy.transpose(pts)) - return numpy.array(results) + return numpy.asarray(results) def tabulate_derivatives(self, n, pts): vals, deriv_vals = self._tabulate(n, numpy.transpose(pts), order=1) @@ -530,13 +526,16 @@ def polynomial_cell_node_map(ref_el, n, variant=None): return cell_node_map -def compute_point_cell_map(ref_el, pts, tol=1E-12): +def compute_cell_point_map(ref_el, pts, tol=1E-12): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() + if len(top[sd]) == 1: + point_map = tuple() if len(pts.shape) == 1 else slice(None, None) + return (point_map,) low, high = -tol, 1 + tol bins = [] - ref_vertices = numpy.eye(sd+1, sd) + ref_vertices = reference_element.ufc_simplex(sd).get_vertices() for entity in top[sd]: vertices = ref_el.get_vertices_of_subcomplex(top[sd][entity]) A, b = reference_element.make_affine_mapping(vertices, ref_vertices) @@ -569,9 +568,9 @@ def compute_point_cell_map(ref_el, pts, tol=1E-12): pts.extend(ref_el.make_points(dim, entity, degree)) pt_array = numpy.asarray(pts) - pts_on_cell = pt_array[polynomial_cell_node_map(ref_el, degree, variant)] print("points\n", pt_array) - print("points\n", pts_on_cell) + # pts_on_cell = pt_array[polynomial_cell_node_map(ref_el, degree, variant)] + # print("points\n", pts_on_cell) U = ExpansionSet(ref_el, variant=variant, scale="L2 piola") V = U.tabulate(degree, pts) @@ -581,7 +580,9 @@ def compute_point_cell_map(ref_el, pts, tol=1E-12): V = poly_set.tabulate(pts)[(0,)*dim] print("PolySet\n", V) - istart, iend = 4, 7 + Ktop = K.get_topology() + top = ref_el.get_topology() + istart, iend = len(top[0]), len(top[0]) + len(Ktop[1]) sub = poly_set.take(range(istart, iend)) V = poly_set.tabulate(pts[istart:iend])[(0,)*dim] print(f"PolySet at {pts[istart:iend]}\n", V) diff --git a/FIAT/macro.py b/FIAT/macro.py index cda501336..f5f34b4d7 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -16,15 +16,13 @@ def xy_to_bary(verts, pts, result=None): # verts is (sdim + 1) x sdim so verts[i, :] is i:th vertex # result is [npts, sdim] # bary is [npts, sdim + 1] + verts = numpy.asarray(verts) npts = pts.shape[0] sdim = verts.shape[1] mat = numpy.vstack((verts.T, numpy.ones((1, sdim+1)))) - b = numpy.vstack((pts.T, numpy.ones((1, npts)))) - foo = numpy.linalg.solve(mat, b) - if result is None: return numpy.copy(foo.T) else: From 291d9ffabf78dadfd91d43ffee9b599183d73f10 Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Tue, 26 Mar 2024 22:48:22 -0500 Subject: [PATCH 18/93] First sketch of HCT --- FIAT/hct.py | 51 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 FIAT/hct.py diff --git a/FIAT/hct.py b/FIAT/hct.py new file mode 100644 index 000000000..b0dbb2ef2 --- /dev/null +++ b/FIAT/hct.py @@ -0,0 +1,51 @@ +from macro import AlfeldSplit +import dual_set +import functional +from finite_element import CiarletElement + + +# TODO: Need to do constraints with jump functionals, +# null space, etc to get a new polynomial set +def C1subspace(simplicial_complex, deg): + pass + + +class HCTDual(dual_set.DualSet): + + def __init__(self, simplical_complex): + entity_ids = { + 0: {0: [0, 1, 2], + 1: [3, 4, 5], + 2: [6, 7, 8]}, + 1: {0: [9], 1: [10], 2: [11]}, + 2: {0: []}} + T = simplicial_complex.parent + verts = T.get_vertices() + for vid in sorted(T[0]): + v = verts[vid] + nodes.extend( + [functional.PointEvaluation(v), + functional.PointDerivative(v, (1, 0)), + functional.PointDerivative(v, (0, 1))]) + # FIXME: Should be integral moments + # for better transformation theory + for eid in sorted(T[1]): + pt = T.make_points(1, eid, 2)[0] + nodes.append( + functional.PointNormalDerivative(T, eid, pt)) + super(HCTDual, self).__init__(nodes, T, entity_ids) + + + +class HCT(CiarletElement): + + def __init__(self, ref_el): + TA = AlfeldSplit(ref_el) + + P = C1Subspace(TA, 3) + + D = HCTDualSet(TA) + + super(HCT, self).__init__(P, D, 3) + + From e8a279f0373aaea6701d2c2a0c99b4c452c35d7d Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 27 Mar 2024 12:19:01 -0500 Subject: [PATCH 19/93] Support for C0 macroelements - Macro: implement get_cell_connectivity() that preserves ordering - DualSet: merge entity_ids and keep parent cell - Lagrange: Add tests on simplicial complexes --- FIAT/dual_set.py | 19 ++++++++++++ FIAT/expansions.py | 66 ++++++----------------------------------- FIAT/finite_element.py | 5 ++-- FIAT/hct.py | 51 ------------------------------- FIAT/lagrange.py | 8 +++-- FIAT/macro.py | 42 +++++++++++++++----------- test/unit/test_macro.py | 51 +++++++++++++++++++++++++++---- 7 files changed, 107 insertions(+), 135 deletions(-) delete mode 100644 FIAT/hct.py diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index ddf129491..652952a27 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -13,6 +13,7 @@ class DualSet(object): def __init__(self, nodes, ref_el, entity_ids, entity_permutations=None): + ref_el, entity_ids = merge_entity_ids(ref_el, entity_ids) self.nodes = nodes self.ref_el = ref_el self.entity_ids = entity_ids @@ -193,3 +194,21 @@ def make_entity_closure_ids(ref_el, entity_ids): entity_closure_ids[d][e] = ids return entity_closure_ids + + +def merge_entity_ids(ref_el, entity_ids): + """Collect DOFs from simplicial complex onto facets of parent cell""" + try: + parent_cell = ref_el.parent + except AttributeError: + return ref_el, entity_ids + + parent_top = parent_cell.get_topology() + parent_ids = {dim: {entity: [] for entity in parent_top[dim]} for dim in parent_top} + child_to_parent = ref_el.get_child_to_parent() + for dim in child_to_parent: + for entity in child_to_parent[dim]: + parent_dim, parent_id = child_to_parent[dim][entity] + dofs_cur = entity_ids[dim][entity] + parent_ids[parent_dim][parent_id].extend(dofs_cur) + return parent_cell, parent_ids diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 691ee8645..ed74055fd 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -79,8 +79,8 @@ def C0_basis(dim, n, phi): for j in range(0, n+1-i): phi[idx(0, i, j)] -= phi[idx(1, i-1, j)] icur = idx(0, 0, i) - phi[icur] -= phi[idx(1, 0, i-1)] phi[icur] -= phi[idx(0, 1, i-1)] + phi[icur] -= phi[idx(1, 0, i-1)] # reorder by dimension and entity on the reference simplex dofs = list(range(dim+1)) @@ -275,14 +275,9 @@ def __init__(self, ref_el, scale=None, variant=None): self.base_ref_el = reference_element.default_simplex(sd) base_verts = self.base_ref_el.get_vertices() - def oriented_affine_mapping(cell): - verts = ref_el.get_vertices_of_subcomplex(top[sd][cell]) - A, b = reference_element.make_affine_mapping(verts, base_verts) - # FIXME determine orientation to make Alfeld work - # A, b = reference_element.make_affine_mapping(verts, base_verts[::-1]) - return A, b - - self.affine_mappings = [oriented_affine_mapping(cell) for cell in top[sd]] + self.affine_mappings = [reference_element.make_affine_mapping( + ref_el.get_vertices_of_subcomplex(top[sd][cell]), + base_verts) for cell in top[sd]] self._dmats_cache = {} self._cell_node_map_cache = {} if scale is None: @@ -508,19 +503,21 @@ def polynomial_entity_ids(ref_el, n, variant=None): def polynomial_cell_node_map(ref_el, n, variant=None): + assert hasattr(ref_el, "parent") top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() entity_ids = polynomial_entity_ids(ref_el, n, variant) ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, variant) + num_cells = len(top[sd]) dofs_per_cell = sum(len(ref_entity_ids[dim][entity]) for dim in ref_entity_ids for entity in ref_entity_ids[dim]) cell_node_map = numpy.zeros((num_cells, dofs_per_cell), dtype=int) - conn = ref_el.connectivity + conn = ref_el.get_cell_connectivity() for cell in top[sd]: for dim in top: - for ref_entity, entity in enumerate(conn[(sd, dim)][cell]): + for ref_entity, entity in enumerate(conn[cell][dim]): ref_dofs = ref_entity_ids[dim][ref_entity] cell_node_map[cell, ref_dofs] = entity_ids[dim][entity] return cell_node_map @@ -530,8 +527,7 @@ def compute_cell_point_map(ref_el, pts, tol=1E-12): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() if len(top[sd]) == 1: - point_map = tuple() if len(pts.shape) == 1 else slice(None, None) - return (point_map,) + return (Ellipsis,) low, high = -tol, 1 + tol bins = [] @@ -546,47 +542,3 @@ def compute_cell_point_map(ref_el, pts, tol=1E-12): pts_on_cell = numpy.all(numpy.logical_and(x >= low, x <= high), axis=0) bins.append(numpy.where(pts_on_cell)[0]) return bins - - -if __name__ == "__main__": - from reference_element import ufc_simplex - from macro import AlfeldSplit, UniformSplit - from lagrange import Lagrange - from polynomial_set import ONPolynomialSet - dim = 2 - K = ufc_simplex(dim) - ref_el = AlfeldSplit(K) - ref_el = UniformSplit(K) - - degree = 2 - variant = "integral" - - pts = [] - top = ref_el.get_topology() - for dim in sorted(top): - for entity in sorted(top[dim]): - pts.extend(ref_el.make_points(dim, entity, degree)) - - pt_array = numpy.asarray(pts) - print("points\n", pt_array) - # pts_on_cell = pt_array[polynomial_cell_node_map(ref_el, degree, variant)] - # print("points\n", pts_on_cell) - - U = ExpansionSet(ref_el, variant=variant, scale="L2 piola") - V = U.tabulate(degree, pts) - print("ExpansionSet\n", V) - - poly_set = ONPolynomialSet(ref_el, degree, variant=variant, scale="L2 piola") - V = poly_set.tabulate(pts)[(0,)*dim] - print("PolySet\n", V) - - Ktop = K.get_topology() - top = ref_el.get_topology() - istart, iend = len(top[0]), len(top[0]) + len(Ktop[1]) - sub = poly_set.take(range(istart, iend)) - V = poly_set.tabulate(pts[istart:iend])[(0,)*dim] - print(f"PolySet at {pts[istart:iend]}\n", V) - - fe = Lagrange(ref_el, degree) - phis = fe.tabulate(0, pts)[(0,)*dim] - print("Lagrange\n", phis) diff --git a/FIAT/finite_element.py b/FIAT/finite_element.py index 724f46956..52a316637 100644 --- a/FIAT/finite_element.py +++ b/FIAT/finite_element.py @@ -118,7 +118,7 @@ class CiarletElement(FiniteElement): """ def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_el=None): - ref_el = ref_el or poly_set.get_reference_element() + ref_el = ref_el or dual.get_reference_element() super(CiarletElement, self).__init__(ref_el, dual, order, formdegree, mapping) # build generalized Vandermonde matrix @@ -143,7 +143,8 @@ def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref new_shp = new_coeffs_flat.shape[:1] + shp[1:] new_coeffs = new_coeffs_flat.reshape(new_shp) - self.poly_set = PolynomialSet(ref_el, + # dual might advertise the parent cell but poly_set might be on a simplicial complex + self.poly_set = PolynomialSet(poly_set.get_reference_element(), poly_set.get_degree(), poly_set.get_embedded_degree(), poly_set.get_expansion_set(), diff --git a/FIAT/hct.py b/FIAT/hct.py deleted file mode 100644 index b0dbb2ef2..000000000 --- a/FIAT/hct.py +++ /dev/null @@ -1,51 +0,0 @@ -from macro import AlfeldSplit -import dual_set -import functional -from finite_element import CiarletElement - - -# TODO: Need to do constraints with jump functionals, -# null space, etc to get a new polynomial set -def C1subspace(simplicial_complex, deg): - pass - - -class HCTDual(dual_set.DualSet): - - def __init__(self, simplical_complex): - entity_ids = { - 0: {0: [0, 1, 2], - 1: [3, 4, 5], - 2: [6, 7, 8]}, - 1: {0: [9], 1: [10], 2: [11]}, - 2: {0: []}} - T = simplicial_complex.parent - verts = T.get_vertices() - for vid in sorted(T[0]): - v = verts[vid] - nodes.extend( - [functional.PointEvaluation(v), - functional.PointDerivative(v, (1, 0)), - functional.PointDerivative(v, (0, 1))]) - # FIXME: Should be integral moments - # for better transformation theory - for eid in sorted(T[1]): - pt = T.make_points(1, eid, 2)[0] - nodes.append( - functional.PointNormalDerivative(T, eid, pt)) - super(HCTDual, self).__init__(nodes, T, entity_ids) - - - -class HCT(CiarletElement): - - def __init__(self, ref_el): - TA = AlfeldSplit(ref_el) - - P = C1Subspace(TA, 3) - - D = HCTDualSet(TA) - - super(HCT, self).__init__(P, D, 3) - - diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index 796461c64..be7f56b52 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -48,7 +48,9 @@ class Lagrange(finite_element.CiarletElement): def __init__(self, ref_el, degree, variant="equispaced"): dual = LagrangeDualSet(ref_el, degree, variant=variant) - if ref_el.shape == LINE and False: + sd = ref_el.get_spatial_dimension() + num_cells = len(ref_el.get_topology()[sd]) + if ref_el.shape == LINE and num_cells == 1: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation points = [] @@ -56,8 +58,10 @@ def __init__(self, ref_el, degree, variant="equispaced"): # Assert singleton point for each node. pt, = node.get_point_dict().keys() points.append(pt) + # FIXME macro-ize LagrangePolynomial set poly_set = LagrangePolynomialSet(ref_el, points) else: - poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant="integral", scale="L2 piola") + variant = "integral" if num_cells > 1 else None + poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=variant) formdegree = 0 # 0-form super(Lagrange, self).__init__(poly_set, dual, degree, formdegree) diff --git a/FIAT/macro.py b/FIAT/macro.py index f5f34b4d7..6ae6ee310 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -44,9 +44,9 @@ class SplitSimplicialComplex(SimplicialComplex): """Abstract class to implement a split on a Simplex """ - def __init__(self, ref_el, splits=1): + def __init__(self, ref_el, splits=1, variant=None): self.parent = ref_el - vertices, topology = self.split_topology(ref_el, splits=splits) + vertices, topology = self.split_topology(ref_el, splits=splits, variant=variant) super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology) def split_topology(self, ref_el): @@ -69,6 +69,24 @@ def get_child_to_parent(self): child_to_parent[dim][entity] = (parent_dim, parent_entity) return child_to_parent + def get_cell_connectivity(self): + """Connectitivity from cell in a complex to globally number and + respects the entity numbering on the reference cell. + """ + sd = self.get_spatial_dimension() + top = self.get_topology() + inv_top = invert_cell_topology(top) + parent_top = self.parent.get_topology() + connectivity = {cell: {dim: [] for dim in top} for cell in top[sd]} + for cell in top[sd]: + cell_verts = top[sd][cell] + for dim in top: + for entity in parent_top[dim]: + ref_verts = parent_top[dim][entity] + global_verts = tuple(cell_verts[v] for v in ref_verts) + connectivity[cell][dim].append(inv_top[dim][global_verts]) + return connectivity + def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity specified by subelement dimension. @@ -77,14 +95,10 @@ def construct_subelement(self, dimension): """ return self.parent.construct_subelement(dimension) - def get_entity_transform(self, dim, entity): - # This is to trick FiniteElement.tabulate - return self.parent.get_entity_transform(dim, entity) - class AlfeldSplit(SplitSimplicialComplex): - def split_topology(self, ref_el, splits=1): + def split_topology(self, ref_el, splits=1, variant=None): assert splits == 1 sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() @@ -104,13 +118,13 @@ def split_topology(self, ref_el, splits=1): return new_verts, new_topology -class UniformSplit(SplitSimplicialComplex): +class IsoSplit(SplitSimplicialComplex): - def split_topology(self, ref_el, splits=1): + def split_topology(self, ref_el, splits=1, variant=None): depth = splits + 1 sd = ref_el.get_spatial_dimension() old_verts = ref_el.get_vertices() - new_verts = make_lattice(old_verts, depth) + new_verts = make_lattice(old_verts, depth, variant=variant) new_topology = {} new_topology[0] = {i: (i,) for i in range(len(new_verts))} @@ -223,11 +237,3 @@ def __init__(self, finite_element, splitting): print(f"Cell node map:\n{cell_node_map}") print(f"DOFs per facet in reference cell:\n{entity_ids}") - - -if __name__ == "__main__": - from reference_element import ufc_simplex - from lagrange import Lagrange - K = ufc_simplex(2) - L = Lagrange(K, 3) - ML = MacroElement(L, AlfeldSplit) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 0c164504a..25c79a492 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,8 +1,9 @@ import numpy import pytest from FIAT.reference_element import ufc_simplex -from FIAT.macro import AlfeldSplit, UniformSplit, MacroQuadratureRule +from FIAT.macro import AlfeldSplit, IsoSplit, MacroQuadratureRule from FIAT.quadrature_schemes import create_quadrature +from FIAT.lagrange import Lagrange @pytest.fixture(params=("T", "S")) @@ -11,7 +12,7 @@ def cell(request): "S": ufc_simplex(3)}[request.param] -@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) def test_split_entity_transform(split, cell): split_cell = split(cell) top = split_cell.get_topology() @@ -27,7 +28,7 @@ def test_split_entity_transform(split, cell): @pytest.mark.parametrize("degree", (4,)) @pytest.mark.parametrize("variant", ("gll", "equispaced")) -@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) def test_split_make_points(split, cell, degree, variant): split_cell = split(cell) top = split_cell.get_topology() @@ -42,7 +43,7 @@ def test_split_make_points(split, cell, degree, variant): assert numpy.allclose(mapped_pts, pts_entity) -@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) def test_split_child_to_parent(split, cell): split_cell = split(cell) mapping = split_cell.get_child_to_parent() @@ -51,7 +52,7 @@ def test_split_child_to_parent(split, cell): print(mapping[dim]) -@pytest.mark.parametrize("split", (AlfeldSplit, UniformSplit)) +@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) def test_macro_quadrature(split, cell): split_cell = split(cell) @@ -73,3 +74,43 @@ def test_macro_quadrature(split, cell): # ax.scatter(*Q.get_points().T) # ax.axis("equal") # plt.show() + + +@pytest.mark.parametrize("degree", range(1, 5)) +@pytest.mark.parametrize("variant", ("equispaced", "gll")) +@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) +def test_macro_lagrange(variant, degree, split, cell): + ref_el = split(cell) + + fe = Lagrange(ref_el, degree, variant=variant) + poly_set = fe.get_nodal_basis() + + # Test that the poly set is defined on the split and not on the parent cell + assert poly_set.get_reference_element() is ref_el + + # Test that the finite element is defined on the parent cell and not on the split + assert fe.get_reference_element() is cell + + # Test that parent entities are the ones exposed + entity_ids = fe.entity_dofs() + parent_top = ref_el.parent.get_topology() + for dim in parent_top: + assert len(entity_ids[dim]) == len(parent_top[dim]) + + # TODO more thorough checks on entity_ids + + # Test that tabulation onto lattice points gives the identity + sd = ref_el.get_spatial_dimension() + top = ref_el.get_topology() + pts = [] + for dim in sorted(top): + for entity in sorted(top[dim]): + pts.extend(ref_el.make_points(dim, entity, degree, variant=variant)) + + phis = fe.tabulate(0, pts)[(0,)*sd] + assert numpy.allclose(phis, numpy.eye(fe.space_dimension())) + + # Test that we can reproduce the Vandermonde matrix by tabulating the expansion set + U = poly_set.get_expansion_set() + V = U.tabulate(degree, pts).T + assert numpy.allclose(fe.V, V) From caf9f94191eb25eb55a060a42c23d98cb2658f39 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 27 Mar 2024 12:42:34 -0500 Subject: [PATCH 20/93] remove MacroElement class and alfeld.py --- FIAT/alfeld.py | 55 -------------------------------------------------- FIAT/macro.py | 53 ------------------------------------------------ 2 files changed, 108 deletions(-) delete mode 100644 FIAT/alfeld.py diff --git a/FIAT/alfeld.py b/FIAT/alfeld.py deleted file mode 100644 index eee25ae58..000000000 --- a/FIAT/alfeld.py +++ /dev/null @@ -1,55 +0,0 @@ -import copy - -import numpy - -from FIAT.reference_element import SimplicialComplex, ufc_simplex - - -class AlfeldSplit(SimplicialComplex): - def __init__(self, T): - self.parent = T - sdim = T.get_spatial_dimension() - old_vs = T.get_vertices() - - b = numpy.average(old_vs, axis=0) - - new_verts = old_vs + (tuple(b),) - - new_topology = copy.deepcopy(T.topology) - - new_vert_id = len(T.topology[0]) - new_topology[0] = {i: (i,) for i in range(new_vert_id+1)} - new_topology[sdim] = {} - - for dim_cur in range(1, sdim + 1): - start = len(new_topology[dim_cur]) - for eid, vs in T.topology[dim_cur-1].items(): - new_topology[dim_cur][start+eid] = vs + (new_vert_id,) - - super(AlfeldSplit, self).__init__(T.shape, new_verts, new_topology) - - def construct_subelement(self, dimension): - """Constructs the reference element of a cell subentity - specified by subelement dimension. - - :arg dimension: subentity dimension (integer) - """ - return self.parent.construct_subelement(dimension) - - -if __name__ == "__main__": - sdim = 2 - - T = ufc_simplex(sdim) - - TA = AlfeldSplit(T) - - TAT = TA.topology - - for i in range(1, sdim+1): - TX = TA.construct_subelement(i) - b = numpy.average(TX.get_vertices(), axis=0) - for entity in TAT[i].keys(): - mapped_bary = TA.get_entity_transform(i, entity)(b) - computed_bary = numpy.average(TA.get_vertices_of_subcomplex(TAT[i][entity]), axis=0) - assert numpy.allclose(mapped_bary, computed_bary) diff --git a/FIAT/macro.py b/FIAT/macro.py index 6ae6ee310..3eb9817d6 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -184,56 +184,3 @@ def __init__(self, cell_complex, Q_ref): pts = tuple(pts) wts = tuple(wts) super(MacroQuadratureRule, self).__init__(ref_el, pts, wts) - - -class MacroElement(): - - def __init__(self, finite_element, splitting): - ref_el = finite_element.ref_el - sdim = ref_el.get_spatial_dimension() - sc = splitting(ref_el) - - ref_eids = finite_element.entity_dofs() - ndofs_per_dim = {d: len(ref_eids[d][0]) for d in ref_eids} - - sc_facet_to_dofs = {} - - sc_t = sc.topology - - # Enumerate dofs, attach to complex facets - dof_cur = 0 - for dim in sc_t: - for facet_id in sc_t[dim]: - facet = sc_t[dim][facet_id] - ndof_cur = ndofs_per_dim[dim] - sc_facet_to_dofs[facet] = (dof_cur, dof_cur + ndof_cur) - dof_cur += ndof_cur - - # cell_node_map - num_cells = len(sc_t[dim]) - dofs_per_cell = finite_element.space_dimension() - - # This is used in evaluation. - cell_node_map = numpy.zeros((num_cells, dofs_per_cell), int) - conn = sc.connectivity - for cid, in conn[(sdim, sdim)]: - for dim in range(sdim+1): - for ref_fid, fid in enumerate(conn[(sdim, dim)][cid]): - facet = sc_t[dim][fid] - dofs = list(range(*sc_facet_to_dofs[facet])) - ref_dofs = ref_eids[dim][ref_fid] - cell_node_map[cid, ref_dofs] = dofs - - # collect dofs from complex onto facets of main cell - # This needs to go into a dual something or other somewhere - c2p = sc.get_child_to_parent() - ref_t = ref_el.topology - entity_ids = {d: {f: [] for f in ref_t[d]} for d in ref_t} - for dim in c2p: - for fid in c2p[dim]: - (parent_dim, parent_id) = c2p[dim][fid] - dofs_cur = list(range(*sc_facet_to_dofs[sc_t[dim][fid]])) - entity_ids[parent_dim][parent_id].extend(dofs_cur) - - print(f"Cell node map:\n{cell_node_map}") - print(f"DOFs per facet in reference cell:\n{entity_ids}") From b50d04b41d4ea490eaa74ead782ff59ebc630971 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 27 Mar 2024 20:00:36 -0500 Subject: [PATCH 21/93] Macro LagrangePolynomialSet --- FIAT/barycentric_interpolation.py | 93 +++++++++++++++++++------------ FIAT/dual_set.py | 5 +- FIAT/expansions.py | 16 +++--- FIAT/lagrange.py | 7 +-- FIAT/macro.py | 3 +- FIAT/polynomial_set.py | 15 +++-- FIAT/reference_element.py | 8 +-- test/unit/test_macro.py | 5 +- 8 files changed, 85 insertions(+), 67 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 5bb9e10b0..8f3758916 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -11,6 +11,31 @@ from FIAT.functional import index_iterator +def barycentric_interpolation(nodes, wts, dmat, pts, order=0): + """Evaluates a Lagrange basis on a line reference element + via the second barycentric interpolation formula. See Berrut and Trefethen (2004) + https://doi.org/10.1137/S0036144502417715 Eq. (4.2) & (9.4) + """ + if pts.dtype == object: + from sympy import simplify + sp_simplify = numpy.vectorize(simplify) + else: + sp_simplify = lambda x: x + phi = numpy.add.outer(-nodes, pts) + with numpy.errstate(divide='ignore', invalid='ignore'): + numpy.reciprocal(phi, out=phi) + numpy.multiply(phi, wts[:, None], out=phi) + numpy.multiply(1.0 / numpy.sum(phi, axis=0), phi, out=phi) + phi[phi != phi] = 1.0 + + phi = sp_simplify(phi) + results = [phi] + for r in range(order): + phi = sp_simplify(numpy.dot(dmat, phi)) + results.append(phi) + return results + + def make_dmat(x): """Returns Lagrange differentiation matrix and barycentric weights associated with x[j].""" @@ -24,14 +49,20 @@ def make_dmat(x): class LagrangeLineExpansionSet(expansions.LineExpansionSet): - """Evaluates a Lagrange basis on a line reference element - via the second barycentric interpolation formula. See Berrut and Trefethen (2004) - https://doi.org/10.1137/S0036144502417715 Eq. (4.2) & (9.4) + """Lagrange polynomial set on the line """ def __init__(self, ref_el, pts): self.points = pts self.x = numpy.array(pts).flatten() - self.dmat, self.weights = make_dmat(self.x) + self.cell_node_map = expansions.compute_cell_point_map(ref_el, numpy.transpose(pts)) + self.dmats = [] + self.weights = [] + for ibfs in self.cell_node_map: + dmat, wts = make_dmat(self.x[ibfs]) + self.dmats.append(dmat) + self.weights.append(wts) + + self.degree = max(len(wts) for wts in self.weights)-1 super(LagrangeLineExpansionSet, self).__init__(ref_el) def get_num_members(self, n): @@ -41,54 +72,44 @@ def get_points(self): return self.points def get_dmats(self, degree): - return [self.dmat.T] - - def tabulate(self, n, pts): - assert n == len(self.points)-1 - results = numpy.add.outer(-self.x, numpy.array(pts).flatten()) - with numpy.errstate(divide='ignore', invalid='ignore'): - numpy.reciprocal(results, out=results) - numpy.multiply(results, self.weights[:, None], out=results) - numpy.multiply(1.0 / numpy.sum(results, axis=0), results, out=results) - - results[results != results] = 1.0 - if results.dtype == object: - from sympy import simplify - results = numpy.vectorize(simplify)(results) - return results + return [dmat.T for dmat in self.dmats] def _tabulate(self, n, pts, order=0): - vals = self.tabulate(n, pts) - results = [vals] - for r in range(order): - vals = numpy.dot(self.dmat, vals) - if vals.dtype == object: - from sympy import simplify - vals = numpy.vectorize(simplify)(vals) - results.append(vals) + num_members = self.get_num_members(n) + cell_node_map = self.cell_node_map + cell_point_map = expansions.compute_cell_point_map(self.ref_el, pts) + pts = numpy.asarray(pts).flatten() + + results = [numpy.zeros((num_members, len(pts)), dtype=pts.dtype) for r in range(order+1)] + for ibfs, ipts, wts, dmat in zip(cell_node_map, cell_point_map, self.weights, self.dmats): + vals = barycentric_interpolation(self.x[ibfs], wts, dmat, pts[ipts], order=order) + indices = Ellipsis if len(cell_node_map) == 1 else numpy.ix_(ibfs, ipts) + for result, val in zip(results, vals): + result[indices] = val + for r in range(order+1): shape = results[r].shape shape = shape[:1] + (1,)*r + shape[1:] results[r] = numpy.reshape(results[r], shape) - return results + return tuple(results) class LagrangePolynomialSet(polynomial_set.PolynomialSet): def __init__(self, ref_el, pts, shape=tuple()): - degree = len(pts) - 1 + if ref_el.get_shape() != reference_element.LINE: + raise ValueError("Invalid reference element type.") + + expansion_set = LagrangeLineExpansionSet(ref_el, pts) + degree = expansion_set.degree if shape == tuple(): num_components = 1 else: flat_shape = numpy.ravel(shape) num_components = numpy.prod(flat_shape) - num_exp_functions = expansions.polynomial_dimension(ref_el, degree) + num_exp_functions = expansion_set.get_num_members(degree) num_members = num_components * num_exp_functions embedded_degree = degree - if ref_el.get_shape() == reference_element.LINE: - expansion_set = LagrangeLineExpansionSet(ref_el, pts) - else: - raise ValueError("Invalid reference element type.") # set up coefficients if shape == tuple(): @@ -99,8 +120,8 @@ def __init__(self, ref_el, pts, shape=tuple()): # use functional's index_iterator function cur_bf = 0 for idx in index_iterator(shape): - n = expansions.polynomial_dimension(ref_el, embedded_degree) - for exp_bf in range(n): + n = expansion_set.get_num_members(embedded_degree) + for exp_bf in range(num_exp_functions): cur_idx = (cur_bf, *idx, exp_bf) coeffs[cur_idx] = 1.0 cur_bf += 1 diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index 652952a27..454f9c481 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -198,9 +198,8 @@ def make_entity_closure_ids(ref_el, entity_ids): def merge_entity_ids(ref_el, entity_ids): """Collect DOFs from simplicial complex onto facets of parent cell""" - try: - parent_cell = ref_el.parent - except AttributeError: + parent_cell = ref_el.parent + if parent_cell is None: return ref_el, entity_ids parent_top = parent_cell.get_topology() diff --git a/FIAT/expansions.py b/FIAT/expansions.py index ed74055fd..0fd45cd10 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -220,6 +220,9 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): scale = math.sqrt(norm2) for result in results: result[icur] *= scale + + if variant == "integral": + results = tuple(C0_basis(dim, n, result) for result in results) return results @@ -309,11 +312,8 @@ def _tabulate(self, n, pts, order=0): sd = self.ref_el.get_spatial_dimension() for ipts, (A, b) in zip(cell_point_map, self.affine_mappings): ref_pts = apply_mapping(A, b, pts[:, ipts]) - cur_phis = dubiner_recurrence(sd, n, order, ref_pts, A, - self.scale, variant=self.variant) - if self.variant == "integral": - cur_phis = tuple(C0_basis(sd, n, cur_phi) for cur_phi in cur_phis) - phis.append(cur_phis) + phis.append(dubiner_recurrence(sd, n, order, ref_pts, A, + self.scale, variant=self.variant)) if len(self.affine_mappings) == 1: return phis[0] @@ -503,7 +503,6 @@ def polynomial_entity_ids(ref_el, n, variant=None): def polynomial_cell_node_map(ref_el, n, variant=None): - assert hasattr(ref_el, "parent") top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() @@ -535,8 +534,9 @@ def compute_cell_point_map(ref_el, pts, tol=1E-12): for entity in top[sd]: vertices = ref_el.get_vertices_of_subcomplex(top[sd][entity]) A, b = reference_element.make_affine_mapping(vertices, ref_vertices) - A = numpy.vstack((A, numpy.sum(A, axis=0))) - b = numpy.hstack((b, numpy.sum(b, axis=0))) + if sd > 1: + A = numpy.vstack((A, numpy.sum(A, axis=0))) + b = numpy.hstack((b, numpy.sum(b, axis=0))) x = numpy.dot(A, pts) + b[:, None] pts_on_cell = numpy.all(numpy.logical_and(x >= low, x <= high), axis=0) diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index be7f56b52..a5d4aa251 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -48,9 +48,7 @@ class Lagrange(finite_element.CiarletElement): def __init__(self, ref_el, degree, variant="equispaced"): dual = LagrangeDualSet(ref_el, degree, variant=variant) - sd = ref_el.get_spatial_dimension() - num_cells = len(ref_el.get_topology()[sd]) - if ref_el.shape == LINE and num_cells == 1: + if ref_el.shape == LINE: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation points = [] @@ -58,9 +56,10 @@ def __init__(self, ref_el, degree, variant="equispaced"): # Assert singleton point for each node. pt, = node.get_point_dict().keys() points.append(pt) - # FIXME macro-ize LagrangePolynomial set poly_set = LagrangePolynomialSet(ref_el, points) else: + sd = ref_el.get_spatial_dimension() + num_cells = len(ref_el.get_topology()[sd]) variant = "integral" if num_cells > 1 else None poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=variant) formdegree = 0 # 0-form diff --git a/FIAT/macro.py b/FIAT/macro.py index 3eb9817d6..c30c40841 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -45,9 +45,8 @@ class SplitSimplicialComplex(SimplicialComplex): """ def __init__(self, ref_el, splits=1, variant=None): - self.parent = ref_el vertices, topology = self.split_topology(ref_el, splits=splits, variant=variant) - super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology) + super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology, parent=ref_el) def split_topology(self, ref_el): raise NotImplementedError diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 75d82b2c2..60ba9e246 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -121,17 +121,16 @@ class ONPolynomialSet(PolynomialSet): """ - def __init__(self, ref_el, degree, shape=tuple(), scale=None, variant=None): - + def __init__(self, ref_el, degree, shape=tuple(), **kwargs): + expansion_set = expansions.ExpansionSet(ref_el, **kwargs) if shape == tuple(): num_components = 1 else: flat_shape = numpy.ravel(shape) num_components = numpy.prod(flat_shape) - num_exp_functions = expansions.polynomial_dimension(ref_el, degree, variant) + num_exp_functions = expansion_set.get_num_members(degree) num_members = num_components * num_exp_functions embedded_degree = degree - expansion_set = expansions.ExpansionSet(ref_el, scale=scale, variant=variant) # set up coefficients if shape == tuple(): @@ -142,7 +141,7 @@ def __init__(self, ref_el, degree, shape=tuple(), scale=None, variant=None): # use functional's index_iterator function cur_bf = 0 for idx in index_iterator(shape): - n = expansions.polynomial_dimension(ref_el, embedded_degree, variant) + n = expansion_set.get_num_members(embedded_degree) for exp_bf in range(n): cur_idx = (cur_bf, *idx, exp_bf) coeffs[cur_idx] = 1.0 @@ -212,18 +211,18 @@ class ONSymTensorPolynomialSet(PolynomialSet): """ - def __init__(self, ref_el, degree, size=None, scale=None, variant=None): + def __init__(self, ref_el, degree, size=None, **kwargs): + expansion_set = expansions.ExpansionSet(ref_el, **kwargs) sd = ref_el.get_spatial_dimension() if size is None: size = sd shape = (size, size) - num_exp_functions = expansions.polynomial_dimension(ref_el, degree, variant) + num_exp_functions = expansion_set.get_num_members(degree) num_components = size * (size + 1) // 2 num_members = num_components * num_exp_functions embedded_degree = degree - expansion_set = expansions.ExpansionSet(ref_el, scale=scale, variant=variant) # set up coefficients for symmetric tensors coeffs_shape = (num_members, *shape, num_exp_functions) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 3dce57192..ea2d199c6 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -124,8 +124,7 @@ class Cell(object): """Abstract class for a reference cell. Provides accessors for geometry (vertex coordinates) as well as topology (orderings of vertices that make up edges, facecs, etc.""" - - def __init__(self, shape, vertices, topology): + def __init__(self, shape, vertices, topology, parent=None): """The constructor takes a shape code, the physical vertices expressed as a list of tuples of numbers, and the topology of a cell. @@ -136,6 +135,7 @@ def __init__(self, shape, vertices, topology): self.shape = shape self.vertices = vertices self.topology = topology + self.parent = parent # Given the topology, work out for each entity in the cell, # which other entities it contains. @@ -256,14 +256,14 @@ class SimplicialComplex(Cell): This consists of list of vertex locations and a topology map defining facets. """ - def __init__(self, shape, vertices, topology): + def __init__(self, shape, vertices, topology, parent=None): # Make sure that every facet has the right number of vertices to be # a simplex. for dim in topology: for entity in topology[dim]: assert len(topology[dim][entity]) == dim + 1 - super(SimplicialComplex, self).__init__(shape, vertices, topology) + super(SimplicialComplex, self).__init__(shape, vertices, topology, parent=parent) def compute_normal(self, facet_i): """Returns the unit normal vector to facet i of codimension 1.""" diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 25c79a492..b38f85bcc 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -6,9 +6,10 @@ from FIAT.lagrange import Lagrange -@pytest.fixture(params=("T", "S")) +@pytest.fixture(params=("I", "T", "S")) def cell(request): - return {"T": ufc_simplex(2), + return {"I": ufc_simplex(1), + "T": ufc_simplex(2), "S": ufc_simplex(3)}[request.param] From 24d96bef37f08230a34117148ec695ef0453b198 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 27 Mar 2024 20:19:16 -0500 Subject: [PATCH 22/93] flake8 --- FIAT/barycentric_interpolation.py | 1 - 1 file changed, 1 deletion(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 8f3758916..04f5396c0 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -120,7 +120,6 @@ def __init__(self, ref_el, pts, shape=tuple()): # use functional's index_iterator function cur_bf = 0 for idx in index_iterator(shape): - n = expansion_set.get_num_members(embedded_degree) for exp_bf in range(num_exp_functions): cur_idx = (cur_bf, *idx, exp_bf) coeffs[cur_idx] = 1.0 From 08aa7986967f647e91cfbe8d9ba6ca7714c9da82 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 27 Mar 2024 20:36:43 -0500 Subject: [PATCH 23/93] Test MacroQuadratureRule to compute a diagonal mass matrix --- test/unit/test_macro.py | 39 ++++++++++++++++----------------------- 1 file changed, 16 insertions(+), 23 deletions(-) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index b38f85bcc..0b6f04038 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -3,6 +3,7 @@ from FIAT.reference_element import ufc_simplex from FIAT.macro import AlfeldSplit, IsoSplit, MacroQuadratureRule from FIAT.quadrature_schemes import create_quadrature +from FIAT.polynomial_set import ONPolynomialSet from FIAT.lagrange import Lagrange @@ -48,33 +49,25 @@ def test_split_make_points(split, cell, degree, variant): def test_split_child_to_parent(split, cell): split_cell = split(cell) mapping = split_cell.get_child_to_parent() - print("") - for dim in mapping: - print(mapping[dim]) + # TODO @pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) def test_macro_quadrature(split, cell): - split_cell = split(cell) + ref_el = split(cell) + sd = ref_el.get_spatial_dimension() + + degree = 6 + Q_ref = create_quadrature(cell.construct_subelement(sd), 2*degree) + Q = MacroQuadratureRule(ref_el, Q_ref) + pts, wts = Q.get_points(), Q.get_weights() - degree = 12 - Q_ref = create_quadrature(cell.construct_subelement(1), degree) - Q = MacroQuadratureRule(split_cell, Q_ref) - Q.get_points() - - # import matplotlib.pyplot as plt - # fig = plt.figure() - # sdim = cell.get_spatial_dimension() - # if sdim == 3: - # ax = fig.add_subplot(projection='3d') - # else: - # ax = fig.add_subplot() - # for i, vert in enumerate(split_cell.vertices): - # ax.text(*vert, str(i)) - # - # ax.scatter(*Q.get_points().T) - # ax.axis("equal") - # plt.show() + # Test that the mass matrix or an orthogonal basis is diagonal + U = ONPolynomialSet(ref_el, degree) + phis = U.tabulate(pts)[(0,)*sd] + M = numpy.dot(numpy.multiply(phis, wts), phis.T) + M = M - numpy.diag(M.diagonal()) + assert numpy.allclose(M, 0) @pytest.mark.parametrize("degree", range(1, 5)) @@ -86,7 +79,7 @@ def test_macro_lagrange(variant, degree, split, cell): fe = Lagrange(ref_el, degree, variant=variant) poly_set = fe.get_nodal_basis() - # Test that the poly set is defined on the split and not on the parent cell + # Test that the polynomial set is defined on the split and not on the parent cell assert poly_set.get_reference_element() is ref_el # Test that the finite element is defined on the parent cell and not on the split From d34e047a8a1dc930a69874bb27ac5da050df8547 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 00:02:22 -0500 Subject: [PATCH 24/93] PolynomialSet: variant="integral" -> "bubble" --- FIAT/barycentric_interpolation.py | 3 +- FIAT/expansions.py | 153 +++++++++++++++--------------- FIAT/hierarchical.py | 2 +- FIAT/lagrange.py | 7 +- FIAT/polynomial_set.py | 15 +-- test/unit/test_macro.py | 31 ++++-- 6 files changed, 112 insertions(+), 99 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 04f5396c0..0049cf0e3 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -49,8 +49,7 @@ def make_dmat(x): class LagrangeLineExpansionSet(expansions.LineExpansionSet): - """Lagrange polynomial set on the line - """ + """Lagrange polynomial set for fixed points the line.""" def __init__(self, ref_el, pts): self.points = pts self.x = numpy.array(pts).flatten() diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 0fd45cd10..7b2062107 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -62,55 +62,6 @@ def jacobi_factors(x, y, z, dx, dy, dz): return fa, fb, fc, dfa, dfb, dfc -def C0_basis(dim, n, phi): - idx = (lambda p: p, morton_index2, morton_index3)[dim-1] - # recover facet bubbles - icur = 0 - phi[icur] *= -1 - for inext in range(1, dim+1): - phi[icur] -= phi[inext] - - if dim == 2: - for i in range(2, n+1): - phi[idx(0, i)] -= phi[idx(1, i-1)] - - elif dim == 3: - for i in range(2, n+1): - for j in range(0, n+1-i): - phi[idx(0, i, j)] -= phi[idx(1, i-1, j)] - icur = idx(0, 0, i) - phi[icur] -= phi[idx(0, 1, i-1)] - phi[icur] -= phi[idx(1, 0, i-1)] - - # reorder by dimension and entity on the reference simplex - dofs = list(range(dim+1)) - if dim == 1: - dofs.extend(range(2, n+1)) - elif dim == 2: - dofs.extend(idx(1, i-1) for i in range(2, n+1)) - dofs.extend(idx(0, i) for i in range(2, n+1)) - dofs.extend(idx(i, 0) for i in range(2, n+1)) - - dofs.extend(idx(i, j) for j in range(1, n+1) for i in range(2, n-j+1)) - else: - dofs.extend(idx(0, 1, i-1) for i in range(2, n+1)) - dofs.extend(idx(1, 0, i-1) for i in range(2, n+1)) - dofs.extend(idx(1, i-1, 0) for i in range(2, n+1)) - dofs.extend(idx(0, 0, i) for i in range(2, n+1)) - dofs.extend(idx(0, i, 0) for i in range(2, n+1)) - dofs.extend(idx(i, 0, 0) for i in range(2, n+1)) - - dofs.extend(idx(1, i-1, j) for j in range(1, n+1) for i in range(2, n-j+1)) - dofs.extend(idx(0, i, j) for j in range(1, n+1) for i in range(2, n-j+1)) - dofs.extend(idx(i, 0, j) for j in range(1, n+1) for i in range(2, n-j+1)) - dofs.extend(idx(i, j, 0) for j in range(1, n+1) for i in range(2, n-j+1)) - - dofs.extend(idx(i, j, k) for k in range(1, n+1) for j in range(1, n-k+1) for i in range(2, n-j-k+1)) - - result = [phi[i] for i in dofs] - return result - - def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): """Tabulate a Dubiner expansion set using the recurrence from (Kirby 2010). @@ -121,17 +72,17 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): :arg Jinv: The inverse of the Jacobian of the coordinate mapping from the default simplex. :arg scale: A scale factor that sets the first member of expansion set. :arg variant: Choose between the default (None) orthogonal basis, - 'integral' for integrated Jacobi polynomials, + 'bubble' for integrated Jacobi polynomials, or 'dual' for the L2-duals of the integrated Jacobi polynomials. :returns: A tuple with tabulations of the expansion set and its derivatives. """ if order > 2: raise ValueError("Higher order derivatives not supported") - if variant not in [None, "integral", "dual"]: + if variant not in [None, "bubble", "dual"]: raise ValueError(f"Invalid variant {variant}") - if variant == "integral": + if variant == "bubble": scale = -scale if n == 0: # Always return 1 for n=0 to make regression tests pass @@ -157,7 +108,7 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): raise ValueError("Invalid number of spatial dimensions") beta = 1 if variant == "dual" else 0 - coefficients = integrated_jrc if variant == "integral" else jrc + coefficients = integrated_jrc if variant == "bubble" else jrc X = pad_coordinates(ref_pts, pad_dim) idx = (lambda p: p, morton_index2, morton_index3)[dim-1] for codim in range(dim): @@ -169,7 +120,7 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): icur = idx(*sub_index, 0) inext = idx(*sub_index, 1) - if variant == "integral": + if variant == "bubble": alpha = 2 * sum(sub_index) a = b = -0.5 else: @@ -220,12 +171,64 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): scale = math.sqrt(norm2) for result in results: result[icur] *= scale - - if variant == "integral": - results = tuple(C0_basis(dim, n, result) for result in results) return results +def C0_basis(dim, n, tabulations): + """Modify a tabulation of a hierarchical basis to enforce C0-continitity. + + :arg dim: The spatial dimension of the simplex. + :arg n: The polynomial degree. + :arg tabulations: An iterable tabulations of the hierarchical basis. + + :returns: A tuple of tabulations of the C0 basis. + """ + idx = (lambda p: p, morton_index2, morton_index3)[dim-1] + # Recover facet bubbles + for phi in tabulations: + icur = 0 + phi[icur] *= -1 + for inext in range(1, dim+1): + phi[icur] -= phi[inext] + if dim == 2: + for i in range(2, n+1): + phi[idx(0, i)] -= phi[idx(1, i-1)] + elif dim == 3: + for i in range(2, n+1): + for j in range(0, n+1-i): + phi[idx(0, i, j)] -= phi[idx(1, i-1, j)] + icur = idx(0, 0, i) + phi[icur] -= phi[idx(0, 1, i-1)] + phi[icur] -= phi[idx(1, 0, i-1)] + + # Reorder by dimension and entity on the reference simplex + dofs = list(range(dim+1)) + if dim == 1: + dofs.extend(range(2, n+1)) + elif dim == 2: + dofs.extend(idx(1, i-1) for i in range(2, n+1)) + dofs.extend(idx(0, i) for i in range(2, n+1)) + dofs.extend(idx(i, 0) for i in range(2, n+1)) + + dofs.extend(idx(i, j) for j in range(1, n+1) for i in range(2, n-j+1)) + else: + dofs.extend(idx(0, 1, i-1) for i in range(2, n+1)) + dofs.extend(idx(1, 0, i-1) for i in range(2, n+1)) + dofs.extend(idx(1, i-1, 0) for i in range(2, n+1)) + dofs.extend(idx(0, 0, i) for i in range(2, n+1)) + dofs.extend(idx(0, i, 0) for i in range(2, n+1)) + dofs.extend(idx(i, 0, 0) for i in range(2, n+1)) + + dofs.extend(idx(1, i-1, j) for j in range(1, n+1) for i in range(2, n-j+1)) + dofs.extend(idx(0, i, j) for j in range(1, n+1) for i in range(2, n-j+1)) + dofs.extend(idx(i, 0, j) for j in range(1, n+1) for i in range(2, n-j+1)) + dofs.extend(idx(i, j, 0) for j in range(1, n+1) for i in range(2, n-j+1)) + + dofs.extend(idx(i, j, k) for k in range(1, n+1) for j in range(1, n-k+1) for i in range(2, n-j-k+1)) + + return tuple([phi[i] for i in dofs] for phi in tabulations) + + def xi_triangle(eta): """Maps from [-1,1]^2 to the (-1,1) reference triangle.""" eta1, eta2 = eta @@ -281,8 +284,6 @@ def __init__(self, ref_el, scale=None, variant=None): self.affine_mappings = [reference_element.make_affine_mapping( ref_el.get_vertices_of_subcomplex(top[sd][cell]), base_verts) for cell in top[sd]] - self._dmats_cache = {} - self._cell_node_map_cache = {} if scale is None: scale = math.sqrt(1.0 / self.base_ref_el.volume()) elif isinstance(scale, str): @@ -292,15 +293,18 @@ def __init__(self, ref_el, scale=None, variant=None): elif scale == "l2 piola": scale = 1.0 / ref_el.volume() self.scale = scale + self.continuity = "C0" if variant == "bubble" else None + self._dmats_cache = {} + self._cell_node_map_cache = {} def get_num_members(self, n): - return polynomial_dimension(self.ref_el, n, self.variant) + return polynomial_dimension(self.ref_el, n, self.continuity) def get_cell_node_map(self, n): try: return self._cell_node_map_cache[n] except KeyError: - cell_node_map = polynomial_cell_node_map(self.ref_el, n, self.variant) + cell_node_map = polynomial_cell_node_map(self.ref_el, n, self.continuity) return self._cell_node_map_cache.setdefault(n, cell_node_map) def _tabulate(self, n, pts, order=0): @@ -308,12 +312,15 @@ def _tabulate(self, n, pts, order=0): """ phis = [] cell_point_map = compute_cell_point_map(self.ref_el, pts) - sd = self.ref_el.get_spatial_dimension() for ipts, (A, b) in zip(cell_point_map, self.affine_mappings): ref_pts = apply_mapping(A, b, pts[:, ipts]) - phis.append(dubiner_recurrence(sd, n, order, ref_pts, A, - self.scale, variant=self.variant)) + phi = dubiner_recurrence(sd, n, order, ref_pts, A, + self.scale, variant=self.variant) + if self.continuity == "C0": + phi = C0_basis(sd, n, phi) + phis.append(phi) + if len(self.affine_mappings) == 1: return phis[0] @@ -334,15 +341,14 @@ def get_dmats(self, degree): of the gradient of each member of the expansion set: d/dx_k phi_j = sum_i dmat[k, j, i] phi_i. """ - cache = self._dmats_cache key = degree + cache = self._dmats_cache try: return cache[key] except KeyError: pass if degree == 0: return cache.setdefault(key, numpy.zeros((self.ref_el.get_spatial_dimension(), 1, 1), "d")) - pts = reference_element.make_lattice(self.ref_el.get_vertices(), degree, variant="gl") v, dv = self._tabulate(degree, numpy.transpose(pts), order=1) dv = numpy.transpose(dv, (1, 2, 0)) @@ -467,7 +473,7 @@ def __init__(self, ref_el, **kwargs): super(TetrahedronExpansionSet, self).__init__(ref_el, **kwargs) -def polynomial_dimension(ref_el, n, variant=None): +def polynomial_dimension(ref_el, n, continuity=None): """Returns the dimension of the space of polynomials of degree no greater than degree on the reference element.""" if ref_el.get_shape() == reference_element.POINT: @@ -475,7 +481,7 @@ def polynomial_dimension(ref_el, n, variant=None): raise ValueError("Only degree zero polynomials supported on point elements.") return 1 top = ref_el.get_topology() - if variant == "integral": + if continuity == "C0": space_dimension = sum(math.comb(n - 1, dim) * len(top[dim]) for dim in top) else: dim = ref_el.get_spatial_dimension() @@ -483,14 +489,13 @@ def polynomial_dimension(ref_el, n, variant=None): return space_dimension -def polynomial_entity_ids(ref_el, n, variant=None): +def polynomial_entity_ids(ref_el, n, continuity=None): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() entity_ids = {} cur = 0 for dim in sorted(top): - if variant == "integral": - # CG numbering + if continuity == "C0": dofs = math.comb(n - 1, dim) else: # DG numbering @@ -502,12 +507,12 @@ def polynomial_entity_ids(ref_el, n, variant=None): return entity_ids -def polynomial_cell_node_map(ref_el, n, variant=None): +def polynomial_cell_node_map(ref_el, n, continuity=None): top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() - entity_ids = polynomial_entity_ids(ref_el, n, variant) - ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, variant) + entity_ids = polynomial_entity_ids(ref_el, n, continuity) + ref_entity_ids = polynomial_entity_ids(ref_el.construct_subelement(sd), n, continuity) num_cells = len(top[sd]) dofs_per_cell = sum(len(ref_entity_ids[dim][entity]) diff --git a/FIAT/hierarchical.py b/FIAT/hierarchical.py index c9bc01d98..af4400628 100644 --- a/FIAT/hierarchical.py +++ b/FIAT/hierarchical.py @@ -121,7 +121,7 @@ def __init__(self, ref_el, degree, variant=None): if degree < 1: raise ValueError(f"{type(self).__name__} elements only valid for k >= 1") - poly_set = ONPolynomialSet(ref_el, degree, variant="integral") + poly_set = ONPolynomialSet(ref_el, degree, variant="bubble") dual = IntegratedLegendreDual(ref_el, degree) formdegree = 0 # 0-form super(IntegratedLegendre, self).__init__(poly_set, dual, degree, formdegree) diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index a5d4aa251..a0d9eacab 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -58,9 +58,8 @@ def __init__(self, ref_el, degree, variant="equispaced"): points.append(pt) poly_set = LagrangePolynomialSet(ref_el, points) else: - sd = ref_el.get_spatial_dimension() - num_cells = len(ref_el.get_topology()[sd]) - variant = "integral" if num_cells > 1 else None - poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=variant) + num_cells = len(ref_el.get_topology()[ref_el.get_spatial_dimension()]) + poly_variant = "bubble" if num_cells > 1 else None + poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=poly_variant) formdegree = 0 # 0-form super(Lagrange, self).__init__(poly_set, dual, degree, formdegree) diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 60ba9e246..c18bd1138 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -54,7 +54,6 @@ class PolynomialSet(object): empty) tuple giving the index for a vector- or tensor-valued function. """ - def __init__(self, ref_el, degree, embedded_degree, expansion_set, coeffs): self.ref_el = ref_el self.num_members = coeffs.shape[0] @@ -74,8 +73,7 @@ def tabulate(self, pts, jet_order=0): D = self.ref_el.get_spatial_dimension() result = {} for i in range(jet_order + 1): - alphas = mis(D, i) - for alpha in alphas: + for alpha in mis(D, i): result[alpha] = numpy.dot(self.coeffs, base_vals[alpha]) return result @@ -118,9 +116,7 @@ class ONPolynomialSet(PolynomialSet): """Constructs an orthonormal basis out of expansion set by having an identity matrix of coefficients. Can be used to specify ON bases for vector- and tensor-valued sets as well. - """ - def __init__(self, ref_el, degree, shape=tuple(), **kwargs): expansion_set = expansions.ExpansionSet(ref_el, **kwargs) if shape == tuple(): @@ -155,7 +151,6 @@ def project(f, U, Q): """Computes the expansion coefficients of f in terms of the members of a polynomial set U. Numerical integration is performed by quadrature rule Q. - """ pts = Q.get_points() wts = Q.get_weights() @@ -180,7 +175,6 @@ def polynomial_set_union_normalized(A, B): whose span is the same as that of span(A) union span(B). It may not contain any of the same members of the set, as we construct a span via SVD. - """ new_coeffs = numpy.array(list(A.coeffs) + list(B.coeffs)) func_shape = new_coeffs.shape[1:] @@ -208,9 +202,7 @@ def polynomial_set_union_normalized(A, B): class ONSymTensorPolynomialSet(PolynomialSet): """Constructs an orthonormal basis for symmetric-tensor-valued polynomials on a reference element. - """ - def __init__(self, ref_el, degree, size=None, **kwargs): expansion_set = expansions.ExpansionSet(ref_el, **kwargs) @@ -244,11 +236,10 @@ def __init__(self, ref_el, degree, size=None, **kwargs): def make_bubbles(ref_el, degree, shape=()): - """Construct a polynomial set with bubbles up to the given degree. - + """Construct a polynomial set with interior bubbles up to the given degree. """ dim = ref_el.get_spatial_dimension() - poly_set = ONPolynomialSet(ref_el, degree, shape=shape, scale="L2 piola", variant="integral") + poly_set = ONPolynomialSet(ref_el, degree, shape=shape, scale="L2 piola", variant="bubble") if dim == 1: # odd / even reordering degrees = chain(range(dim+1, degree+1, 2), range(dim+2, degree+1, 2)) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 0b6f04038..e43d7a8aa 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -45,11 +45,30 @@ def test_split_make_points(split, cell, degree, variant): assert numpy.allclose(mapped_pts, pts_entity) -@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) -def test_split_child_to_parent(split, cell): - split_cell = split(cell) - mapping = split_cell.get_child_to_parent() - # TODO +def test_split_child_to_parent(cell): + split_cell = IsoSplit(cell) + + dim = cell.get_spatial_dimension() + degree = 2 if dim == 3 else 4 + parent_degree = 2*degree + + top = cell.get_topology() + parent_pts = {dim: {} for dim in top} + for dim in top: + for entity in top[dim]: + parent_pts[dim][entity] = cell.make_points(dim, entity, parent_degree) + + top = split_cell.get_topology() + child_pts = {dim: {} for dim in top} + for dim in top: + for entity in top[dim]: + child_pts[dim][entity] = split_cell.make_points(dim, entity, degree) + + child_to_parent = split_cell.get_child_to_parent() + for dim in top: + for entity in top[dim]: + parent_dim, parent_entity = child_to_parent[dim][entity] + assert set(child_pts[dim][entity]) <= set(parent_pts[parent_dim][parent_entity]) @pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) @@ -62,7 +81,7 @@ def test_macro_quadrature(split, cell): Q = MacroQuadratureRule(ref_el, Q_ref) pts, wts = Q.get_points(), Q.get_weights() - # Test that the mass matrix or an orthogonal basis is diagonal + # Test that the mass matrix for an orthogonal basis is diagonal U = ONPolynomialSet(ref_el, degree) phis = U.tabulate(pts)[(0,)*sd] M = numpy.dot(numpy.multiply(phis, wts), phis.T) From 7cb0fc9c9f3a249c98834396070643b43981d54e Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 07:39:37 -0500 Subject: [PATCH 25/93] ExpansionSet: docstrings --- FIAT/expansions.py | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 7b2062107..f10b16bd5 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -175,7 +175,7 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): def C0_basis(dim, n, tabulations): - """Modify a tabulation of a hierarchical basis to enforce C0-continitity. + """Modify a tabulation of a hierarchical basis to enforce C0-continuity. :arg dim: The spatial dimension of the simplex. :arg n: The polynomial degree. @@ -247,6 +247,7 @@ def xi_tetrahedron(eta): def apply_mapping(A, b, pts): + """Apply an affine mapping to an d-by-npts array of points.""" if isinstance(pts, numpy.ndarray) and len(pts.shape) == 2: return numpy.dot(A, pts) + b[:, None] else: @@ -308,8 +309,7 @@ def get_cell_node_map(self, n): return self._cell_node_map_cache.setdefault(n, cell_node_map) def _tabulate(self, n, pts, order=0): - """A version of tabulate() that also works for a single point. - """ + """A version of tabulate() that also works for a single point.""" phis = [] cell_point_map = compute_cell_point_map(self.ref_el, pts) sd = self.ref_el.get_spatial_dimension() @@ -475,7 +475,7 @@ def __init__(self, ref_el, **kwargs): def polynomial_dimension(ref_el, n, continuity=None): """Returns the dimension of the space of polynomials of degree no - greater than degree on the reference element.""" + greater than n on the reference element.""" if ref_el.get_shape() == reference_element.POINT: if n > 0: raise ValueError("Only degree zero polynomials supported on point elements.") @@ -490,6 +490,13 @@ def polynomial_dimension(ref_el, n, continuity=None): def polynomial_entity_ids(ref_el, n, continuity=None): + """Maps facets to members of a polynomial basis. + + :arg ref_el: a SimplicialComplex. + :arg n: the polynomial degree of the expansion set. + :arg continuity: the continuity of the expansion set. + :returns: a dict of dicts mapping dimension and entity id to basis functions. + """ top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() entity_ids = {} @@ -508,6 +515,13 @@ def polynomial_entity_ids(ref_el, n, continuity=None): def polynomial_cell_node_map(ref_el, n, continuity=None): + """Maps cells on a simplicial complex to members of a polynomial basis. + + :arg ref_el: a SimplicialComplex. + :arg n: the polynomial degree of the expansion set. + :arg continuity: the continuity of the expansion set. + :returns: a numpy array mapping cell id to basis functions supported on that cell. + """ top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() @@ -528,6 +542,13 @@ def polynomial_cell_node_map(ref_el, n, continuity=None): def compute_cell_point_map(ref_el, pts, tol=1E-12): + """Maps cells on a simplicial complex to points. + + :arg ref_el: a SimplicialComplex. + :arg pts: a d-by-npts array of physical coordinates. + :kwarg tol: the absolute tolerance. + :returns: a numpy array mapping cell id to points located on that cell. + """ top = ref_el.get_topology() sd = ref_el.get_spatial_dimension() if len(top[sd]) == 1: From 8b918a5f2e65eafdbfb83dffdc978f5b737d54b5 Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 28 Mar 2024 13:48:33 -0500 Subject: [PATCH 26/93] Allow Lagrange to be a macro-element via variant options --- FIAT/check_format_variant.py | 25 +++++++++++++++++++++++++ FIAT/lagrange.py | 26 ++++++++++++++++++++++---- 2 files changed, 47 insertions(+), 4 deletions(-) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index e7db98d0b..495a83cf8 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -1,4 +1,5 @@ import re +from FIAT.macro import IsoSplit, AlfeldSplit def check_format_variant(variant, degree): @@ -20,3 +21,27 @@ def check_format_variant(variant, degree): 'or variant="integral(q)"') return variant, interpolant_degree + + +def parse_lagrange_variant(variant): + options = variant.replace(" ", "").split(",") + assert len(options) <= 2 + point_variant = "equispaced" + splitting = None + + for pre_opt in options: + opt = pre_opt.lower() + if opt == "alfeld": + splitting = AlfeldSplit + elif opt == "iso": + splitting = IsoSplit + elif opt.startswith("iso"): + match = re.match(r"^iso(?:\((\d+)\))?$", opt) + k, = match.groups() + splitting = lambda T: IsoSplit(T, int(k)) + elif opt in ["equispaced", "gll", "spectral"]: + point_variant = opt + else: + raise ValueError("Illegal variant option") + + return splitting, point_variant diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index a0d9eacab..8e775acea 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -9,6 +9,7 @@ from FIAT.orientation_utils import make_entity_permutations_simplex from FIAT.barycentric_interpolation import LagrangePolynomialSet from FIAT.reference_element import LINE +from FIAT.check_format_variant import parse_lagrange_variant class LagrangeDualSet(dual_set.DualSet): @@ -16,7 +17,7 @@ class LagrangeDualSet(dual_set.DualSet): simplices of any dimension. Nodes are point evaluation at equispaced points.""" - def __init__(self, ref_el, degree, variant="equispaced"): + def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids = {} nodes = [] entity_permutations = {} @@ -31,7 +32,7 @@ def __init__(self, ref_el, degree, variant="equispaced"): entity_permutations[dim] = {} perms = {0: [0]} if dim == 0 else make_entity_permutations_simplex(dim, degree - dim) for entity in sorted(top[dim]): - pts_cur = ref_el.make_points(dim, entity, degree, variant=variant) + pts_cur = ref_el.make_points(dim, entity, degree, variant=point_variant) nodes_cur = [functional.PointEvaluation(ref_el, x) for x in pts_cur] nnodes_cur = len(nodes_cur) @@ -44,10 +45,27 @@ def __init__(self, ref_el, degree, variant="equispaced"): class Lagrange(finite_element.CiarletElement): - """The Lagrange finite element. It is what it is.""" + """The Lagrange finite element. + + :arg ref_el: The reference element, which could be a standard FIAT simplex or a split complex + :arg degree: The polynomial degree + :arg variant: A comma-separated string that may specify the type of point distribution + and the splitting strategy if a macro element is desired. + Either option may be omitted. The default point type is equispaced + and the default splitting strategy is None. + Example: variant='gll' gives a standard unsplit point distribution with + spectral points. + variant='equispaced,Iso(2)' with degree=1 gives the P2:P1 iso element. + variant='Alfeld' can be used to obtain a barycentrically refined + macroelement for Scott-Vogelius. + """ def __init__(self, ref_el, degree, variant="equispaced"): - dual = LagrangeDualSet(ref_el, degree, variant=variant) + splitting, point_variant = parse_lagrange_variant(variant) + if splitting is not None: + ref_el = splitting(ref_el) + + dual = LagrangeDualSet(ref_el, degree, point_variant=point_variant) if ref_el.shape == LINE: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation From 0661f789b7d09b0057ed66d75ea1147f7707b6a1 Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 28 Mar 2024 14:16:25 -0500 Subject: [PATCH 27/93] Tweak variant checker --- FIAT/check_format_variant.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index 495a83cf8..0ba09566d 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -26,7 +26,9 @@ def check_format_variant(variant, degree): def parse_lagrange_variant(variant): options = variant.replace(" ", "").split(",") assert len(options) <= 2 - point_variant = "equispaced" + supported_point_variants = ["equispaced", "gll", "spectral"] + point_variant = "spectral" + splitting = None for pre_opt in options: @@ -39,7 +41,7 @@ def parse_lagrange_variant(variant): match = re.match(r"^iso(?:\((\d+)\))?$", opt) k, = match.groups() splitting = lambda T: IsoSplit(T, int(k)) - elif opt in ["equispaced", "gll", "spectral"]: + elif opt in supported_point_variants: point_variant = opt else: raise ValueError("Illegal variant option") From 222d1e26002ccad7cfd36e4df6f9a8905132c2aa Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 28 Mar 2024 14:24:54 -0500 Subject: [PATCH 28/93] finite elements know if they are macroelements now --- FIAT/finite_element.py | 3 +++ FIAT/macro.py | 3 +++ FIAT/reference_element.py | 3 +++ 3 files changed, 9 insertions(+) diff --git a/FIAT/finite_element.py b/FIAT/finite_element.py index 52a316637..14142b3e7 100644 --- a/FIAT/finite_element.py +++ b/FIAT/finite_element.py @@ -108,6 +108,9 @@ def is_nodal(): """ return False + def is_macroelement(self): + return self.ref_el.is_macrocell() + class CiarletElement(FiniteElement): """Class implementing Ciarlet's abstraction of a finite element diff --git a/FIAT/macro.py b/FIAT/macro.py index c30c40841..efdd1e586 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -94,6 +94,9 @@ def construct_subelement(self, dimension): """ return self.parent.construct_subelement(dimension) + def is_macrocell(self): + return True + class AlfeldSplit(SplitSimplicialComplex): diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index ea2d199c6..3c2c5706e 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -250,6 +250,9 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" raise NotImplementedError("Should be implemented in a subclass.") + def is_macrocell(self): + return False + class SimplicialComplex(Cell): r"""Abstract class for a simplicial complex. From adb22c12b38a82515fd682f23c981243031ba875 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 15:05:46 -0500 Subject: [PATCH 29/93] Macro: docstrings --- FIAT/barycentric_interpolation.py | 21 +++-- FIAT/expansions.py | 6 +- FIAT/macro.py | 137 ++++++++++++++++++++---------- 3 files changed, 111 insertions(+), 53 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 0049cf0e3..d563e2a44 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -49,7 +49,7 @@ def make_dmat(x): class LagrangeLineExpansionSet(expansions.LineExpansionSet): - """Lagrange polynomial set for fixed points the line.""" + """Lagrange polynomial expansion set for given points the line.""" def __init__(self, ref_el, pts): self.points = pts self.x = numpy.array(pts).flatten() @@ -67,6 +67,9 @@ def __init__(self, ref_el, pts): def get_num_members(self, n): return len(self.points) + def get_cell_node_map(self, n): + return self.cell_node_map + def get_points(self): return self.points @@ -75,16 +78,20 @@ def get_dmats(self, degree): def _tabulate(self, n, pts, order=0): num_members = self.get_num_members(n) - cell_node_map = self.cell_node_map + cell_node_map = self.get_cell_node_map(n) cell_point_map = expansions.compute_cell_point_map(self.ref_el, pts) pts = numpy.asarray(pts).flatten() - - results = [numpy.zeros((num_members, len(pts)), dtype=pts.dtype) for r in range(order+1)] + results = None for ibfs, ipts, wts, dmat in zip(cell_node_map, cell_point_map, self.weights, self.dmats): vals = barycentric_interpolation(self.x[ibfs], wts, dmat, pts[ipts], order=order) - indices = Ellipsis if len(cell_node_map) == 1 else numpy.ix_(ibfs, ipts) - for result, val in zip(results, vals): - result[indices] = val + if len(cell_node_map) == 1: + results = vals + else: + if results is None: + results = [numpy.zeros((num_members, len(pts)), dtype=vals[0].dtype) for r in range(order+1)] + indices = numpy.ix_(ibfs, ipts) + for result, val in zip(results, vals): + result[indices] = val for r in range(order+1): shape = results[r].shape diff --git a/FIAT/expansions.py b/FIAT/expansions.py index f10b16bd5..db4bafdfa 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -67,7 +67,7 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): :arg dim: The spatial dimension of the simplex. :arg n: The polynomial degree. - :arg order: The maximum order of differenation. + :arg order: The maximum order of differentiation. :arg ref_pts: An ``ndarray`` with the coordinates on the default (-1, 1)^d simplex. :arg Jinv: The inverse of the Jacobian of the coordinate mapping from the default simplex. :arg scale: A scale factor that sets the first member of expansion set. @@ -247,7 +247,7 @@ def xi_tetrahedron(eta): def apply_mapping(A, b, pts): - """Apply an affine mapping to an d-by-npts array of points.""" + """Apply an affine mapping to an column-stacked array of points.""" if isinstance(pts, numpy.ndarray) and len(pts.shape) == 2: return numpy.dot(A, pts) + b[:, None] else: @@ -545,7 +545,7 @@ def compute_cell_point_map(ref_el, pts, tol=1E-12): """Maps cells on a simplicial complex to points. :arg ref_el: a SimplicialComplex. - :arg pts: a d-by-npts array of physical coordinates. + :arg pts: a column-stacked array of physical coordinates. :kwarg tol: the absolute tolerance. :returns: a numpy array mapping cell id to points located on that cell. """ diff --git a/FIAT/macro.py b/FIAT/macro.py index c30c40841..396686f87 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -6,16 +6,24 @@ def bary_to_xy(verts, bary, result=None): - # verts is (sdim + 1) x sdim so verts[i, :] is i:th vertex - # bary is [npts, sdim + 1] - # result is [npts, sdim] + """Maps barycentric coordinates to physical points. + + :arg verts: A tuple of points. + :arg bary: A row-stacked numpy array of barycentric coordinates. + :arg result: A row-stacked numpy array of physical points. + :returns: result + """ return numpy.dot(bary, verts, out=result) def xy_to_bary(verts, pts, result=None): - # verts is (sdim + 1) x sdim so verts[i, :] is i:th vertex - # result is [npts, sdim] - # bary is [npts, sdim + 1] + """Maps physical points to barycentric coordinates. + + :arg verts: A tuple of points. + :arg ots: A row-stacked numpy array of physical points. + :arg result: A row-stacked numpy array of barycentric coordinates. + :returns: result + """ verts = numpy.asarray(verts) npts = pts.shape[0] sdim = verts.shape[1] @@ -31,60 +39,83 @@ def xy_to_bary(verts, pts, result=None): def facet_support(facet_coords, tol=1.e-12): - # facet_coords is an iterable of tuples (barycentric coordinates) - # return vertex ids where some x is nonzero + """Returns the support of a facet. + + :arg facet_coords: An iterable of tuples (barycentric coordinates) describing the facet. + :returns: A tuple of vertex ids where some coordinate is nonzero. + """ return tuple(sorted(set(i for x in facet_coords for (i, xi) in enumerate(x) if abs(xi) > tol))) def invert_cell_topology(T): + """Returns a dict of dicts mapping dimension x vertices to entity id.""" return {dim: {T[dim][entity]: entity for entity in T[dim]} for dim in T} class SplitSimplicialComplex(SimplicialComplex): """Abstract class to implement a split on a Simplex """ - def __init__(self, ref_el, splits=1, variant=None): vertices, topology = self.split_topology(ref_el, splits=splits, variant=variant) - super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology, parent=ref_el) - def split_topology(self, ref_el): - raise NotImplementedError + bary = xy_to_bary(numpy.asarray(ref_el.get_vertices()), numpy.asarray(vertices)) + parent_top = ref_el.get_topology() + parent_inv_top = invert_cell_topology(parent_top) - def get_child_to_parent(self): - bary = xy_to_bary(numpy.asarray(self.parent.get_vertices()), - numpy.asarray(self.get_vertices())) - top = self.get_topology() - parent_inv_top = invert_cell_topology(self.parent.get_topology()) + # dict mapping child facets to their parent facet child_to_parent = {} - for dim in top: + # dict mapping parent facets to their children + parent_to_children = {dim: {entity: [] for entity in parent_top[dim]} for dim in parent_top} + for dim in topology: child_to_parent[dim] = {} - for entity in top[dim]: - facet_ids = top[dim][entity] + for entity in topology[dim]: + facet_ids = topology[dim][entity] facet_coords = bary[list(facet_ids), :] parent_verts = facet_support(facet_coords) parent_dim = len(parent_verts) - 1 parent_entity = parent_inv_top[parent_dim][parent_verts] child_to_parent[dim][entity] = (parent_dim, parent_entity) - return child_to_parent + parent_to_children[parent_dim][parent_entity].append((dim, entity)) - def get_cell_connectivity(self): - """Connectitivity from cell in a complex to globally number and - respects the entity numbering on the reference cell. - """ - sd = self.get_spatial_dimension() - top = self.get_topology() - inv_top = invert_cell_topology(top) - parent_top = self.parent.get_topology() - connectivity = {cell: {dim: [] for dim in top} for cell in top[sd]} - for cell in top[sd]: - cell_verts = top[sd][cell] + self._child_to_parent = child_to_parent + self._parent_to_children = parent_to_children + + sd = ref_el.get_spatial_dimension() + inv_top = invert_cell_topology(topology) + + # dict mapping cells to boundary facets for each dimension, + # while respecting the ordering on the parent simplex + connectivity = {cell: {dim: [] for dim in topology} for cell in topology[sd]} + for cell in topology[sd]: + cell_verts = topology[sd][cell] for dim in top: for entity in parent_top[dim]: ref_verts = parent_top[dim][entity] global_verts = tuple(cell_verts[v] for v in ref_verts) connectivity[cell][dim].append(inv_top[dim][global_verts]) - return connectivity + self._cell_connectivity = connectivity + + super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology, parent=ref_el) + + def split_topology(self, ref_el): + raise NotImplementedError + + def get_child_to_parent(self): + """Maps split complex facet tuple to its parent entity tuple.""" + return self._child_to_parent + + def get_parent_to_children(self): + """Maps parent facet tuple to a list of tuples of entites in the split complex.""" + return self._parent_to_children + + def get_cell_connectivity(self): + """Connectitivity from cell in a complex to global facet ids and + respects the entity numbering on the reference cell. + + N.B. cell_connectivity[cell][dim] has the same contents as + self.connectivity[(sd, dim)][cell], except those are sorted. + """ + return self._cell_connectivity def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity @@ -96,20 +127,25 @@ def construct_subelement(self, dimension): class AlfeldSplit(SplitSimplicialComplex): - + """Alfeld splitting of a simplex. + """ def split_topology(self, ref_el, splits=1, variant=None): assert splits == 1 sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() + # Keep old facets, respecting the old numbering new_topology = copy.deepcopy(top) + # Discard the cell interior new_topology[sd] = {} + # Append the barycenter as the new vertex barycenter = ref_el.make_points(sd, 0, sd+1) old_verts = ref_el.get_vertices() new_verts = old_verts + tuple(barycenter) new_vert_id = len(old_verts) - new_topology[0][new_vert_id] = (new_vert_id,) + + # Append new facets by adding the barycenter to old facets for dim in range(1, sd + 1): offset = len(new_topology[dim]) for entity, ids in top[dim-1].items(): @@ -167,19 +203,34 @@ def split_topology(self, ref_el, splits=1, variant=None): class MacroQuadratureRule(QuadratureRule): + """Composite quadrature rule on parent facets that respects the splitting. + + :arg ref_el: A simplicial complex. + :arg Q_ref: A QuadratureRule on the reference simplex. + :args parent_facets: An iterable of facets of the same dimension as Q_ref, + defaults to all facets. + + :returns: A quadrature rule on the sub entities of the simplicial complex. + """ + def __init__(self, ref_el, Q_ref, parent_facets=None): + parent_dim = Q_ref.ref_el.get_spatial_dimension() + if parent_facets is not None: + parent_cell = ref_el.parent + parent_to_children = parent_cell.get_parent_to_children() + facets = [] + for parent_entity in parent_facets: + children = parent_to_children[parent_dim][parent_entity] + facets.extend(entity for dim, entity in children if dim == parent_dim) + else: + top = ref_el.get_topology() + facets = top[parent_dim] - def __init__(self, cell_complex, Q_ref): pts = [] wts = [] - sd = cell_complex.get_spatial_dimension() - ref_el = cell_complex.construct_subelement(sd) - t = cell_complex.get_topology() - dim = Q_ref.ref_el.get_spatial_dimension() - for entity in t[dim]: - Q_cur = FacetQuadratureRule(cell_complex, dim, entity, Q_ref) + for entity in facets: + Q_cur = FacetQuadratureRule(ref_el, parent_dim, entity, Q_ref) pts.extend(Q_cur.pts) wts.extend(Q_cur.wts) - pts = tuple(pts) wts = tuple(wts) super(MacroQuadratureRule, self).__init__(ref_el, pts, wts) From cf6d7e9f4c15738b13e435b599ede87ad6efb0f7 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 15:55:45 -0500 Subject: [PATCH 30/93] Cell: expose get_parent() and keep parent private --- FIAT/dual_set.py | 2 +- FIAT/macro.py | 10 +++++++--- FIAT/reference_element.py | 10 ++++++---- test/unit/test_macro.py | 2 +- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index 454f9c481..f0576d8d5 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -198,7 +198,7 @@ def make_entity_closure_ids(ref_el, entity_ids): def merge_entity_ids(ref_el, entity_ids): """Collect DOFs from simplicial complex onto facets of parent cell""" - parent_cell = ref_el.parent + parent_cell = ref_el.get_parent() if parent_cell is None: return ref_el, entity_ids diff --git a/FIAT/macro.py b/FIAT/macro.py index 78e02f705..9d59cf980 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -56,6 +56,7 @@ class SplitSimplicialComplex(SimplicialComplex): """Abstract class to implement a split on a Simplex """ def __init__(self, ref_el, splits=1, variant=None): + self._parent = ref_el vertices, topology = self.split_topology(ref_el, splits=splits, variant=variant) bary = xy_to_bary(numpy.asarray(ref_el.get_vertices()), numpy.asarray(vertices)) @@ -88,14 +89,14 @@ def __init__(self, ref_el, splits=1, variant=None): connectivity = {cell: {dim: [] for dim in topology} for cell in topology[sd]} for cell in topology[sd]: cell_verts = topology[sd][cell] - for dim in top: + for dim in parent_top: for entity in parent_top[dim]: ref_verts = parent_top[dim][entity] global_verts = tuple(cell_verts[v] for v in ref_verts) connectivity[cell][dim].append(inv_top[dim][global_verts]) self._cell_connectivity = connectivity - super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology, parent=ref_el) + super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology) def split_topology(self, ref_el): raise NotImplementedError @@ -123,11 +124,14 @@ def construct_subelement(self, dimension): :arg dimension: subentity dimension (integer) """ - return self.parent.construct_subelement(dimension) + return self.get_parent().construct_subelement(dimension) def is_macrocell(self): return True + def get_parent(self): + return self._parent + class AlfeldSplit(SplitSimplicialComplex): """Alfeld splitting of a simplex. diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 3c2c5706e..59d8d6354 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -124,7 +124,7 @@ class Cell(object): """Abstract class for a reference cell. Provides accessors for geometry (vertex coordinates) as well as topology (orderings of vertices that make up edges, facecs, etc.""" - def __init__(self, shape, vertices, topology, parent=None): + def __init__(self, shape, vertices, topology): """The constructor takes a shape code, the physical vertices expressed as a list of tuples of numbers, and the topology of a cell. @@ -135,7 +135,6 @@ def __init__(self, shape, vertices, topology, parent=None): self.shape = shape self.vertices = vertices self.topology = topology - self.parent = parent # Given the topology, work out for each entity in the cell, # which other entities it contains. @@ -253,20 +252,23 @@ def cell_orientation_reflection_map(self): def is_macrocell(self): return False + def get_parent(self): + return None + class SimplicialComplex(Cell): r"""Abstract class for a simplicial complex. This consists of list of vertex locations and a topology map defining facets. """ - def __init__(self, shape, vertices, topology, parent=None): + def __init__(self, shape, vertices, topology): # Make sure that every facet has the right number of vertices to be # a simplex. for dim in topology: for entity in topology[dim]: assert len(topology[dim][entity]) == dim + 1 - super(SimplicialComplex, self).__init__(shape, vertices, topology, parent=parent) + super(SimplicialComplex, self).__init__(shape, vertices, topology) def compute_normal(self, facet_i): """Returns the unit normal vector to facet i of codimension 1.""" diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index e43d7a8aa..1ee213a81 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -106,7 +106,7 @@ def test_macro_lagrange(variant, degree, split, cell): # Test that parent entities are the ones exposed entity_ids = fe.entity_dofs() - parent_top = ref_el.parent.get_topology() + parent_top = ref_el.get_parent().get_topology() for dim in parent_top: assert len(entity_ids[dim]) == len(parent_top[dim]) From 17cdec4e4eaa2292f213b7441e7bcdeb2cacecce Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 28 Mar 2024 16:03:16 -0500 Subject: [PATCH 31/93] plumb in reference complex idea for supporting quadrature and macro element determination --- FIAT/check_format_variant.py | 8 +++++--- FIAT/finite_element.py | 18 ++++++++++++++---- FIAT/lagrange.py | 3 +-- FIAT/macro.py | 3 --- FIAT/reference_element.py | 3 --- test/unit/test_macro.py | 7 +++++++ 6 files changed, 27 insertions(+), 15 deletions(-) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index 0ba09566d..bf9b2a012 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -26,8 +26,10 @@ def check_format_variant(variant, degree): def parse_lagrange_variant(variant): options = variant.replace(" ", "").split(",") assert len(options) <= 2 - supported_point_variants = ["equispaced", "gll", "spectral"] - point_variant = "spectral" + supported_point_variants = {"equispaced": "equispaced", + "gll": "gll", + "spectral": "gll"} + point_variant = "gll" splitting = None @@ -42,7 +44,7 @@ def parse_lagrange_variant(variant): k, = match.groups() splitting = lambda T: IsoSplit(T, int(k)) elif opt in supported_point_variants: - point_variant = opt + point_variant = supported_point_variants[opt] else: raise ValueError("Illegal variant option") diff --git a/FIAT/finite_element.py b/FIAT/finite_element.py index 14142b3e7..a0114ad87 100644 --- a/FIAT/finite_element.py +++ b/FIAT/finite_element.py @@ -23,7 +23,7 @@ class FiniteElement(object): this class are non-nodal unless they are CiarletElement subclasses. """ - def __init__(self, ref_el, dual, order, formdegree=None, mapping="affine"): + def __init__(self, ref_el, dual, order, formdegree=None, mapping="affine", ref_complex=None): # Relevant attributes that do not necessarily depend on a PolynomialSet object: # The order (degree) of the polynomial basis self.order = order @@ -33,6 +33,10 @@ def __init__(self, ref_el, dual, order, formdegree=None, mapping="affine"): self.ref_el = ref_el self.dual = dual + # when the finite element is a macro-element, the basis will be defined over + # a partition of the reference element into some complex. + self.ref_complex = ref_complex or ref_el + # The appropriate mapping for the finite element space self._mapping = mapping @@ -40,6 +44,11 @@ def get_reference_element(self): """Return the reference element for the finite element.""" return self.ref_el + def get_reference_complex(self): + """Return the reference element complex, which is either the reference element or + its subdivision in the case of a macro element.""" + return self.ref_complex + def get_dual_set(self): """Return the dual for the finite element.""" return self.dual @@ -109,7 +118,7 @@ def is_nodal(): return False def is_macroelement(self): - return self.ref_el.is_macrocell() + return self.ref_el is not self.ref_complex class CiarletElement(FiniteElement): @@ -120,9 +129,10 @@ class CiarletElement(FiniteElement): basis generated from polynomials encoded in a `PolynomialSet`. """ - def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_el=None): + def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_el=None, ref_complex=None): ref_el = ref_el or dual.get_reference_element() - super(CiarletElement, self).__init__(ref_el, dual, order, formdegree, mapping) + ref_complex = ref_complex or ref_el + super(CiarletElement, self).__init__(ref_el, dual, order, formdegree, mapping, ref_complex) # build generalized Vandermonde matrix old_coeffs = poly_set.get_coeffs() diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index 8e775acea..7e0d738a6 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -64,7 +64,6 @@ def __init__(self, ref_el, degree, variant="equispaced"): splitting, point_variant = parse_lagrange_variant(variant) if splitting is not None: ref_el = splitting(ref_el) - dual = LagrangeDualSet(ref_el, degree, point_variant=point_variant) if ref_el.shape == LINE: # In 1D we can use the primal basis as the expansion set, @@ -80,4 +79,4 @@ def __init__(self, ref_el, degree, variant="equispaced"): poly_variant = "bubble" if num_cells > 1 else None poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=poly_variant) formdegree = 0 # 0-form - super(Lagrange, self).__init__(poly_set, dual, degree, formdegree) + super(Lagrange, self).__init__(poly_set, dual, degree, formdegree, ref_complex=ref_el) diff --git a/FIAT/macro.py b/FIAT/macro.py index efdd1e586..c30c40841 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -94,9 +94,6 @@ def construct_subelement(self, dimension): """ return self.parent.construct_subelement(dimension) - def is_macrocell(self): - return True - class AlfeldSplit(SplitSimplicialComplex): diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 3c2c5706e..ea2d199c6 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -250,9 +250,6 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" raise NotImplementedError("Should be implemented in a subclass.") - def is_macrocell(self): - return False - class SimplicialComplex(Cell): r"""Abstract class for a simplicial complex. diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index e43d7a8aa..b730c91a3 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -127,3 +127,10 @@ def test_macro_lagrange(variant, degree, split, cell): U = poly_set.get_expansion_set() V = U.tabulate(degree, pts).T assert numpy.allclose(fe.V, V) + + +def test_is_macro(): + assert Lagrange(ufc_simplex(2), 3, "Alfeld,equispaced").is_macroelement() + assert not Lagrange(ufc_simplex(3), 2, "gll").is_macroelement() + + From 33659ffa3f6b49c5c51184b23c5d92e618f46bc8 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 16:30:03 -0500 Subject: [PATCH 32/93] Macro: more docstrings --- FIAT/macro.py | 47 ++++++++++++++++++++++----------------- FIAT/quadrature.py | 4 ++-- FIAT/reference_element.py | 11 +++++---- 3 files changed, 33 insertions(+), 29 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 9d59cf980..01462acf0 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -53,14 +53,17 @@ def invert_cell_topology(T): class SplitSimplicialComplex(SimplicialComplex): - """Abstract class to implement a split on a Simplex + """Abstract class to implement a split on a Simplex. + + :arg parent: The parent Simplex to split. + :arg vertices: The vertices of the simplicial complex. + :arg topology: The topology of the simplicial complex. """ - def __init__(self, ref_el, splits=1, variant=None): - self._parent = ref_el - vertices, topology = self.split_topology(ref_el, splits=splits, variant=variant) + def __init__(self, parent, vertices, topology): + self._parent = parent - bary = xy_to_bary(numpy.asarray(ref_el.get_vertices()), numpy.asarray(vertices)) - parent_top = ref_el.get_topology() + bary = xy_to_bary(numpy.asarray(parent.get_vertices()), numpy.asarray(vertices)) + parent_top = parent.get_topology() parent_inv_top = invert_cell_topology(parent_top) # dict mapping child facets to their parent facet @@ -81,10 +84,10 @@ def __init__(self, ref_el, splits=1, variant=None): self._child_to_parent = child_to_parent self._parent_to_children = parent_to_children - sd = ref_el.get_spatial_dimension() + sd = parent.get_spatial_dimension() inv_top = invert_cell_topology(topology) - # dict mapping cells to boundary facets for each dimension, + # dict mapping cells to their boundary facets for each dimension, # while respecting the ordering on the parent simplex connectivity = {cell: {dim: [] for dim in topology} for cell in topology[sd]} for cell in topology[sd]: @@ -96,10 +99,7 @@ def __init__(self, ref_el, splits=1, variant=None): connectivity[cell][dim].append(inv_top[dim][global_verts]) self._cell_connectivity = connectivity - super(SplitSimplicialComplex, self).__init__(ref_el.shape, vertices, topology) - - def split_topology(self, ref_el): - raise NotImplementedError + super(SplitSimplicialComplex, self).__init__(parent.shape, vertices, topology) def get_child_to_parent(self): """Maps split complex facet tuple to its parent entity tuple.""" @@ -124,7 +124,7 @@ def construct_subelement(self, dimension): :arg dimension: subentity dimension (integer) """ - return self.get_parent().construct_subelement(dimension) + return self._parent.construct_subelement(dimension) def is_macrocell(self): return True @@ -134,10 +134,10 @@ def get_parent(self): class AlfeldSplit(SplitSimplicialComplex): - """Alfeld splitting of a simplex. + """Splits a simplex into the simplicial complex obtained by + connecting vertices to barycenter. """ - def split_topology(self, ref_el, splits=1, variant=None): - assert splits == 1 + def __init__(self, ref_el): sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() # Keep old facets, respecting the old numbering @@ -157,13 +157,18 @@ def split_topology(self, ref_el, splits=1, variant=None): offset = len(new_topology[dim]) for entity, ids in top[dim-1].items(): new_topology[dim][offset+entity] = ids + (new_vert_id,) - return new_verts, new_topology + super(AlfeldSplit, self).__init__(ref_el, new_verts, new_topology) class IsoSplit(SplitSimplicialComplex): + """Splits simplex into the simplicial complex obtained by + connecting points on a regular lattice. - def split_topology(self, ref_el, splits=1, variant=None): - depth = splits + 1 + :arg ref_el: The parent Simplex to split. + :kwarg depth: The number of subdivisions along each edge of the simplex. + :kwarg variant: The point distribution variant. + """ + def __init__(self, ref_el, depth=2, variant=None): sd = ref_el.get_spatial_dimension() old_verts = ref_el.get_vertices() new_verts = make_lattice(old_verts, depth, variant=variant) @@ -188,7 +193,7 @@ def split_topology(self, ref_el, splits=1, variant=None): if sd == 3: # Cut the octahedron # FIXME do this more generically - assert splits == 1 + assert depth == 2 new_topology[1][cur] = (1, 8) # Get an adjacency list for each vertex @@ -206,7 +211,7 @@ def split_topology(self, ref_el, splits=1, variant=None): if set(facet) < adjacency[v]: entities.append((v,) + facet) new_topology[dim] = dict(enumerate(entities)) - return new_verts, new_topology + super(IsoSplit, self).__init__(ref_el, new_verts, new_topology) class MacroQuadratureRule(QuadratureRule): diff --git a/FIAT/quadrature.py b/FIAT/quadrature.py index f6f237d39..58079686c 100644 --- a/FIAT/quadrature.py +++ b/FIAT/quadrature.py @@ -36,8 +36,8 @@ def map_quadrature(pts_ref, wts_ref, source_cell, target_cell, jacobian=False): class QuadratureRule(object): """General class that models integration over a reference element - as the weighted sum of a function evaluated at a set of points.""" - + as the weighted sum of a function evaluated at a set of points. + """ def __init__(self, ref_el, pts, wts): if len(wts) != len(pts): raise ValueError("Have %d weights, but %d points" % (len(wts), len(pts))) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 59d8d6354..cddd0fd79 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -280,10 +280,9 @@ def compute_normal(self, facet_i): # Find a subcell of which facet_i is on the boundary # Note: this is trivial and vastly overengineered for the single-cell # case. - for k, facets in enumerate(self.connectivity[(sd, sd-1)]): - if facet_i in facets: - break - vertices = self.get_vertices_of_subcomplex(t[sd][k]) + cell = next(k for k, facets in enumerate(self.connectivity[(sd, sd-1)]) + if facet_i in facets) + vertices = self.get_vertices_of_subcomplex(t[sd][cell]) # Interval case if self.get_shape() == LINE: @@ -294,7 +293,7 @@ def compute_normal(self, facet_i): # vectors from vertex 0 to each other vertex. vert_vecs = numpy.asarray(vertices) - vert_vecs_from_v0 = vert_vecs[1:, :] - vert_vecs[0][None, :] + vert_vecs_from_v0 = vert_vecs[1:, :] - vert_vecs[:1, :] (u, s, _) = numpy.linalg.svd(vert_vecs_from_v0) rank = len([si for si in s if si > 1.e-10]) @@ -328,7 +327,7 @@ def compute_normal(self, facet_i): nfoo = foo[:, 0] # what is the vertex not in the facet? - verts_set = set(t[sd][k]) + verts_set = set(t[sd][cell]) verts_facet = set(t[sd - 1][facet_i]) verts_diff = verts_set.difference(verts_facet) if len(verts_diff) != 1: From b2bdf7f176fa81fc17f8ddffa0c624bc82a3b699 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 16:36:45 -0500 Subject: [PATCH 33/93] cleanup --- FIAT/reference_element.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index cddd0fd79..8ff91bbb4 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -282,18 +282,16 @@ def compute_normal(self, facet_i): # case. cell = next(k for k, facets in enumerate(self.connectivity[(sd, sd-1)]) if facet_i in facets) - vertices = self.get_vertices_of_subcomplex(t[sd][cell]) + verts = numpy.asarray(self.get_vertices_of_subcomplex(t[sd][cell])) # Interval case if self.get_shape() == LINE: - verts = numpy.asarray(vertices) v_i, = self.get_topology()[0][facet_i] n = verts[v_i] - verts[[1, 0][v_i]] return n / numpy.linalg.norm(n) # vectors from vertex 0 to each other vertex. - vert_vecs = numpy.asarray(vertices) - vert_vecs_from_v0 = vert_vecs[1:, :] - vert_vecs[:1, :] + vert_vecs_from_v0 = verts[1:, :] - verts[:1, :] (u, s, _) = numpy.linalg.svd(vert_vecs_from_v0) rank = len([si for si in s if si > 1.e-10]) @@ -306,7 +304,7 @@ def compute_normal(self, facet_i): # now I find everything normal to the facet. vcf = numpy.asarray(vert_coords_of_facet) - facet_span = vcf[1:, :] - vcf[0][None, :] + facet_span = vcf[1:, :] - vcf[:1, :] (_, sf, vft) = numpy.linalg.svd(facet_span) # now get the null space from vft From 61210ae85e6982316a5ddf09cd1895f1fbfbcf5b Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 17:12:20 -0500 Subject: [PATCH 34/93] CiarletElement: always extract ref_complex from poly_set --- FIAT/finite_element.py | 5 ++--- FIAT/lagrange.py | 2 +- test/unit/test_macro.py | 19 +++++++++++-------- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/FIAT/finite_element.py b/FIAT/finite_element.py index a0114ad87..d8d5b7072 100644 --- a/FIAT/finite_element.py +++ b/FIAT/finite_element.py @@ -129,9 +129,9 @@ class CiarletElement(FiniteElement): basis generated from polynomials encoded in a `PolynomialSet`. """ - def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_el=None, ref_complex=None): + def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_el=None): ref_el = ref_el or dual.get_reference_element() - ref_complex = ref_complex or ref_el + ref_complex = poly_set.get_reference_element() super(CiarletElement, self).__init__(ref_el, dual, order, formdegree, mapping, ref_complex) # build generalized Vandermonde matrix @@ -156,7 +156,6 @@ def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref new_shp = new_coeffs_flat.shape[:1] + shp[1:] new_coeffs = new_coeffs_flat.reshape(new_shp) - # dual might advertise the parent cell but poly_set might be on a simplicial complex self.poly_set = PolynomialSet(poly_set.get_reference_element(), poly_set.get_degree(), poly_set.get_embedded_degree(), diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index 7e0d738a6..eba10b97f 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -79,4 +79,4 @@ def __init__(self, ref_el, degree, variant="equispaced"): poly_variant = "bubble" if num_cells > 1 else None poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=poly_variant) formdegree = 0 # 0-form - super(Lagrange, self).__init__(poly_set, dual, degree, formdegree, ref_complex=ref_el) + super(Lagrange, self).__init__(poly_set, dual, degree, formdegree) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 8cdc0fa4f..7c4001948 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -9,9 +9,8 @@ @pytest.fixture(params=("I", "T", "S")) def cell(request): - return {"I": ufc_simplex(1), - "T": ufc_simplex(2), - "S": ufc_simplex(3)}[request.param] + dim = {"I": 1, "T": 2, "S": 3}[request.param] + return ufc_simplex(dim) @pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) @@ -129,8 +128,12 @@ def test_macro_lagrange(variant, degree, split, cell): assert numpy.allclose(fe.V, V) -def test_is_macro(): - assert Lagrange(ufc_simplex(2), 3, "Alfeld,equispaced").is_macroelement() - assert not Lagrange(ufc_simplex(3), 2, "gll").is_macroelement() - - +@pytest.mark.parametrize("variant", ("gll", "Alfeld,equispaced", "gll,iso")) +def test_is_macro(variant): + is_macro = "alfeld" in variant.lower() or "iso" in variant.lower() + + fe = Lagrange(ufc_simplex(2), 2, variant) + assert not fe.get_reference_element().is_macrocell() + assert fe.is_macroelement() == is_macro + assert fe.get_reference_complex().is_macrocell() == is_macro + assert fe.get_nodal_basis().get_reference_element().is_macrocell() == is_macro From b8cb9e33f90c20294c2561b8224fd38e6a022e42 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 18:00:31 -0500 Subject: [PATCH 35/93] create_quadrature returns a MacroQuadratureRule on macro cells --- FIAT/expansions.py | 15 ++++++++++----- FIAT/lagrange.py | 3 +-- FIAT/quadrature_schemes.py | 7 +++++++ test/unit/test_macro.py | 8 ++++---- 4 files changed, 22 insertions(+), 11 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index db4bafdfa..6c109c8bf 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -295,6 +295,7 @@ def __init__(self, ref_el, scale=None, variant=None): scale = 1.0 / ref_el.volume() self.scale = scale self.continuity = "C0" if variant == "bubble" else None + self.tabulate_order = 2 self._dmats_cache = {} self._cell_node_map_cache = {} @@ -357,11 +358,15 @@ def get_dmats(self, degree): def _tabulate_jet(self, degree, pts, order=0): from FIAT.polynomial_set import mis + try: + vals = self._tabulate(degree, numpy.transpose(pts), order=order) + except ValueError: + vals = self._tabulate(degree, numpy.transpose(pts), order=2) + + lorder = len(vals) D = self.ref_el.get_spatial_dimension() - lorder = min(2, order) - vals = self._tabulate(degree, numpy.transpose(pts), order=lorder) result = {(0,) * D: numpy.array(vals[0])} - for r in range(1, 1+lorder): + for r in range(1, lorder): vr = numpy.transpose(vals[r], tuple(range(1, r+1)) + (0, r+1)) for indices in numpy.ndindex(vr.shape[:r]): alpha = tuple(map(indices.count, range(D))) @@ -371,8 +376,8 @@ def _tabulate_jet(self, degree, pts, order=0): def distance(alpha, beta): return sum(ai != bi for ai, bi in zip(alpha, beta)) - # Only use dmats if order > lorder - for i in range(lorder + 1, order + 1): + # Only use dmats if tabulate failed + for i in range(lorder, order + 1): dmats = self.get_dmats(degree) alphas = mis(D, i) for alpha in alphas: diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index eba10b97f..e7ca969a9 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -75,8 +75,7 @@ def __init__(self, ref_el, degree, variant="equispaced"): points.append(pt) poly_set = LagrangePolynomialSet(ref_el, points) else: - num_cells = len(ref_el.get_topology()[ref_el.get_spatial_dimension()]) - poly_variant = "bubble" if num_cells > 1 else None + poly_variant = "bubble" if ref_el.is_macrocell() else None poly_set = polynomial_set.ONPolynomialSet(ref_el, degree, variant=poly_variant) formdegree = 0 # 0-form super(Lagrange, self).__init__(poly_set, dual, degree, formdegree) diff --git a/FIAT/quadrature_schemes.py b/FIAT/quadrature_schemes.py index 114a94af8..5be757e37 100644 --- a/FIAT/quadrature_schemes.py +++ b/FIAT/quadrature_schemes.py @@ -36,6 +36,7 @@ from FIAT.reference_element import (HEXAHEDRON, QUADRILATERAL, TENSORPRODUCT, TETRAHEDRON, TRIANGLE, UFCTetrahedron, UFCTriangle, symmetric_simplex) +from FIAT.macro import MacroQuadratureRule def create_quadrature(ref_el, degree, scheme="default"): @@ -52,6 +53,12 @@ def create_quadrature(ref_el, degree, scheme="default"): :arg degree: The degree of polynomial that the rule should integrate exactly. """ + if ref_el.is_macrocell(): + sd = ref_el.get_spatial_dimension() + cell = ref_el.construct_subelement(sd) + Q_ref = create_quadrature(cell, degree, scheme=scheme) + return MacroQuadratureRule(ref_el, Q_ref) + if ref_el.get_shape() == TENSORPRODUCT: try: degree = tuple(degree) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 7c4001948..61ea85df5 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -5,6 +5,7 @@ from FIAT.quadrature_schemes import create_quadrature from FIAT.polynomial_set import ONPolynomialSet from FIAT.lagrange import Lagrange +from FIAT.hierarchical import Legendre @pytest.fixture(params=("I", "T", "S")) @@ -76,13 +77,12 @@ def test_macro_quadrature(split, cell): sd = ref_el.get_spatial_dimension() degree = 6 - Q_ref = create_quadrature(cell.construct_subelement(sd), 2*degree) - Q = MacroQuadratureRule(ref_el, Q_ref) + Q = create_quadrature(ref_el, 2*degree) pts, wts = Q.get_points(), Q.get_weights() # Test that the mass matrix for an orthogonal basis is diagonal - U = ONPolynomialSet(ref_el, degree) - phis = U.tabulate(pts)[(0,)*sd] + fe = Legendre(ref_el, degree) + phis = fe.tabulate(0, pts)[(0,)*sd] M = numpy.dot(numpy.multiply(phis, wts), phis.T) M = M - numpy.diag(M.diagonal()) assert numpy.allclose(M, 0) From 80831f1c494bf9f6d90b2de31075d78d945ff526 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 18:10:09 -0500 Subject: [PATCH 36/93] flake8 --- FIAT/macro.py | 2 +- test/unit/test_macro.py | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 01462acf0..17950f974 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -227,7 +227,7 @@ class MacroQuadratureRule(QuadratureRule): def __init__(self, ref_el, Q_ref, parent_facets=None): parent_dim = Q_ref.ref_el.get_spatial_dimension() if parent_facets is not None: - parent_cell = ref_el.parent + parent_cell = ref_el.get_parent() parent_to_children = parent_cell.get_parent_to_children() facets = [] for parent_entity in parent_facets: diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 61ea85df5..77bf999e0 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,9 +1,8 @@ import numpy import pytest from FIAT.reference_element import ufc_simplex -from FIAT.macro import AlfeldSplit, IsoSplit, MacroQuadratureRule +from FIAT.macro import AlfeldSplit, IsoSplit from FIAT.quadrature_schemes import create_quadrature -from FIAT.polynomial_set import ONPolynomialSet from FIAT.lagrange import Lagrange from FIAT.hierarchical import Legendre @@ -76,7 +75,7 @@ def test_macro_quadrature(split, cell): ref_el = split(cell) sd = ref_el.get_spatial_dimension() - degree = 6 + degree = 3 Q = create_quadrature(ref_el, 2*degree) pts, wts = Q.get_points(), Q.get_weights() From c8e26b9a6888b6a8eba5625c612ededbd07fa38c Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 28 Mar 2024 18:40:14 -0500 Subject: [PATCH 37/93] Add complex comparison --- FIAT/macro.py | 9 ++++++--- FIAT/reference_element.py | 12 ++++++++++++ test/unit/test_macro.py | 6 +++--- 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 01462acf0..6be06c3e0 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -1,8 +1,11 @@ import copy -import numpy from itertools import chain -from FIAT.reference_element import make_lattice, lattice_iter, SimplicialComplex -from FIAT.quadrature import QuadratureRule, FacetQuadratureRule + +import numpy + +from FIAT.quadrature import FacetQuadratureRule, QuadratureRule +from FIAT.reference_element import (SimplicialComplex, lattice_iter, + make_lattice) def bary_to_xy(verts, bary, result=None): diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 8ff91bbb4..57037ae6a 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -255,6 +255,18 @@ def is_macrocell(self): def get_parent(self): return None + def __gt__(self, other): + return self.get_parent() == other + + def __lt__(self, other): + return self == other.get_parent() + + def __ge__(self, other): + return self > other or self == other + + def __le__(self, other): + return self < other or self == other + class SimplicialComplex(Cell): r"""Abstract class for a simplicial complex. diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 7c4001948..0e1762fa8 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,10 +1,10 @@ import numpy import pytest -from FIAT.reference_element import ufc_simplex +from FIAT.lagrange import Lagrange from FIAT.macro import AlfeldSplit, IsoSplit, MacroQuadratureRule -from FIAT.quadrature_schemes import create_quadrature from FIAT.polynomial_set import ONPolynomialSet -from FIAT.lagrange import Lagrange +from FIAT.quadrature_schemes import create_quadrature +from FIAT.reference_element import ufc_simplex @pytest.fixture(params=("I", "T", "S")) From ca62451140587e72a2110043013d86aef8552552 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 20:38:28 -0500 Subject: [PATCH 38/93] fix tabulation of derivatives --- FIAT/expansions.py | 18 +++++++----------- test/unit/test_macro.py | 3 +-- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 6c109c8bf..bd5e367ca 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -295,7 +295,7 @@ def __init__(self, ref_el, scale=None, variant=None): scale = 1.0 / ref_el.volume() self.scale = scale self.continuity = "C0" if variant == "bubble" else None - self.tabulate_order = 2 + self.recurrence_order = 2 self._dmats_cache = {} self._cell_node_map_cache = {} @@ -328,12 +328,12 @@ def _tabulate(self, n, pts, order=0): results = [] num_phis = self.get_num_members(n) cell_node_map = self.get_cell_node_map(n) - for k in range(order+1): - result = numpy.zeros((num_phis,) + (sd,)*k + pts.shape[1:]) - shape_slices = (slice(None, None),)*k + for r in range(order+1): + result = numpy.zeros((num_phis,) + (sd,)*r + pts.shape[1:]) for ibfs, ipts, phi in zip(cell_node_map, cell_point_map, phis): - indices = (ibfs,) + shape_slices + (ipts,) - result[numpy.ix_(*indices)] = phi[k] + shape_indices = tuple(range(sd) for _ in range(r)) + indices = (ibfs,) + shape_indices + (ipts,) + result[numpy.ix_(*indices)] = phi[r] results.append(result) return tuple(results) @@ -358,11 +358,7 @@ def get_dmats(self, degree): def _tabulate_jet(self, degree, pts, order=0): from FIAT.polynomial_set import mis - try: - vals = self._tabulate(degree, numpy.transpose(pts), order=order) - except ValueError: - vals = self._tabulate(degree, numpy.transpose(pts), order=2) - + vals = self._tabulate(degree, numpy.transpose(pts), order=min(order, self.recurrence_order)) lorder = len(vals) D = self.ref_el.get_spatial_dimension() result = {(0,) * D: numpy.array(vals[0])} diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index bd2457f0b..e146a1e1a 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -2,8 +2,7 @@ import pytest from FIAT.hierarchical import Legendre from FIAT.lagrange import Lagrange -from FIAT.macro import AlfeldSplit, IsoSplit, MacroQuadratureRule -from FIAT.polynomial_set import ONPolynomialSet +from FIAT.macro import AlfeldSplit, IsoSplit from FIAT.quadrature_schemes import create_quadrature from FIAT.reference_element import ufc_simplex From 12b7b5cd04e4ea74af1218aea7793f91bd13173a Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 28 Mar 2024 20:40:27 -0500 Subject: [PATCH 39/93] test that derivative tabulation does not break --- FIAT/barycentric_interpolation.py | 1 + FIAT/dual_set.py | 22 ++++++++++++++++++---- FIAT/lagrange.py | 1 - test/unit/test_macro.py | 4 ++-- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index d563e2a44..a2d43c2e0 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -62,6 +62,7 @@ def __init__(self, ref_el, pts): self.weights.append(wts) self.degree = max(len(wts) for wts in self.weights)-1 + self.recurrence_order = self.degree + 1 super(LagrangeLineExpansionSet, self).__init__(ref_el) def get_num_members(self, n): diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index f0576d8d5..6a63d65fd 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -13,7 +13,7 @@ class DualSet(object): def __init__(self, nodes, ref_el, entity_ids, entity_permutations=None): - ref_el, entity_ids = merge_entity_ids(ref_el, entity_ids) + ref_el, entity_ids, entity_permutations = merge_entities(ref_el, entity_ids, entity_permutations) self.nodes = nodes self.ref_el = ref_el self.entity_ids = entity_ids @@ -196,11 +196,12 @@ def make_entity_closure_ids(ref_el, entity_ids): return entity_closure_ids -def merge_entity_ids(ref_el, entity_ids): +def merge_entities(ref_el, entity_ids, entity_permutations): """Collect DOFs from simplicial complex onto facets of parent cell""" + from FIAT.orientation_utils import make_entity_permutations_simplex parent_cell = ref_el.get_parent() if parent_cell is None: - return ref_el, entity_ids + return ref_el, entity_ids, entity_permutations parent_top = parent_cell.get_topology() parent_ids = {dim: {entity: [] for entity in parent_top[dim]} for dim in parent_top} @@ -210,4 +211,17 @@ def merge_entity_ids(ref_el, entity_ids): parent_dim, parent_id = child_to_parent[dim][entity] dofs_cur = entity_ids[dim][entity] parent_ids[parent_dim][parent_id].extend(dofs_cur) - return parent_cell, parent_ids + + if entity_permutations is None: + parent_permutations = None + else: + parent_permutations = {} + npoints = 0 + for dim in sorted(parent_top): + if dim <= 1: + npoints = len(parent_ids[dim][0]) + perms = make_entity_permutations_simplex(dim, npoints) + parent_permutations[dim] = {entity: perms for entity in parent_top[dim]} + npoints -= 1 + + return parent_cell, parent_ids, parent_permutations diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index e7ca969a9..da2b8d2bc 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -40,7 +40,6 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids[dim][entity] = list(range(cur, cur + nnodes_cur)) cur += nnodes_cur entity_permutations[dim][entity] = perms - super(LagrangeDualSet, self).__init__(nodes, ref_el, entity_ids, entity_permutations) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index e146a1e1a..3cfb425ba 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -118,8 +118,8 @@ def test_macro_lagrange(variant, degree, split, cell): for entity in sorted(top[dim]): pts.extend(ref_el.make_points(dim, entity, degree, variant=variant)) - phis = fe.tabulate(0, pts)[(0,)*sd] - assert numpy.allclose(phis, numpy.eye(fe.space_dimension())) + phis = fe.tabulate(2, pts) + assert numpy.allclose(phis[(0,)*sd], numpy.eye(fe.space_dimension())) # Test that we can reproduce the Vandermonde matrix by tabulating the expansion set U = poly_set.get_expansion_set() From 58abc3441605139ad53150108e3a12ffcc993826 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 29 Mar 2024 10:16:29 -0500 Subject: [PATCH 40/93] Cell: define is_simplex() --- FIAT/expansions.py | 3 +-- FIAT/reference_element.py | 6 ++++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index bd5e367ca..61be733b0 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -332,8 +332,7 @@ def _tabulate(self, n, pts, order=0): result = numpy.zeros((num_phis,) + (sd,)*r + pts.shape[1:]) for ibfs, ipts, phi in zip(cell_node_map, cell_point_map, phis): shape_indices = tuple(range(sd) for _ in range(r)) - indices = (ibfs,) + shape_indices + (ipts,) - result[numpy.ix_(*indices)] = phi[r] + result[numpy.ix_(ibfs, *shape_indices, ipts)] = phi[r] results.append(result) return tuple(results) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 57037ae6a..eb08e85fb 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -249,6 +249,9 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" raise NotImplementedError("Should be implemented in a subclass.") + def is_simplex(self): + return False + def is_macrocell(self): return False @@ -539,6 +542,9 @@ class Simplex(SimplicialComplex): o = (1 * 2!) + (1 * 1!) + (0 * 0!) = 3 """ + def is_simplex(self): + return True + def symmetry_group_size(self, dim): return numpy.math.factorial(dim + 1) From f7a0f0843a3e96d9a05263f96f85220dc228b7d0 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 29 Mar 2024 20:56:36 -0500 Subject: [PATCH 41/93] parse discontinuous Lagrange variant --- FIAT/discontinuous_lagrange.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/FIAT/discontinuous_lagrange.py b/FIAT/discontinuous_lagrange.py index 9cf5d6d0c..d5f93c29f 100644 --- a/FIAT/discontinuous_lagrange.py +++ b/FIAT/discontinuous_lagrange.py @@ -9,6 +9,7 @@ import numpy as np from FIAT import finite_element, polynomial_set, dual_set, functional, P0 from FIAT.polynomial_set import mis +from FIAT.check_format_variant import parse_lagrange_variant def make_entity_permutations(dim, npoints): @@ -145,7 +146,7 @@ class DiscontinuousLagrangeDualSet(dual_set.DualSet): equispaced points. This is the discontinuous version where all nodes are topologically associated with the cell itself""" - def __init__(self, ref_el, degree): + def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids = {} nodes = [] entity_permutations = {} @@ -160,7 +161,7 @@ def __init__(self, ref_el, degree): entity_permutations[dim] = {} perms = make_entity_permutations(dim, degree + 1 if dim == len(top) - 1 else -1) for entity in sorted(top[dim]): - pts_cur = ref_el.make_points(dim, entity, degree) + pts_cur = ref_el.make_points(dim, entity, degree, variant=point_variant) nodes_cur = [functional.PointEvaluation(ref_el, x) for x in pts_cur] nnodes_cur = len(nodes_cur) @@ -174,17 +175,20 @@ def __init__(self, ref_el, degree): class HigherOrderDiscontinuousLagrange(finite_element.CiarletElement): - """The discontinuous Lagrange finite element. It is what it is.""" + """The discontinuous Lagrange finite element.""" - def __init__(self, ref_el, degree): + def __init__(self, ref_el, degree, variant="equispaced"): + splitting, point_variant = parse_lagrange_variant(variant) + if splitting is not None: + ref_el = splitting(ref_el) + dual = DiscontinuousLagrangeDualSet(ref_el, degree, point_variant=point_variant) poly_set = polynomial_set.ONPolynomialSet(ref_el, degree) - dual = DiscontinuousLagrangeDualSet(ref_el, degree) formdegree = ref_el.get_spatial_dimension() # n-form super(HigherOrderDiscontinuousLagrange, self).__init__(poly_set, dual, degree, formdegree) -def DiscontinuousLagrange(ref_el, degree): +def DiscontinuousLagrange(ref_el, degree, variant="equispaced"): if degree == 0: return P0.P0(ref_el) else: - return HigherOrderDiscontinuousLagrange(ref_el, degree) + return HigherOrderDiscontinuousLagrange(ref_el, degree, variant) From 674a19d054689cf6bc24e3a47476762a5d370564 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 29 Mar 2024 20:57:00 -0500 Subject: [PATCH 42/93] codim bubbles --- FIAT/polynomial_set.py | 21 ++++++------ test/unit/test_fiat.py | 43 ------------------------- test/unit/test_macro.py | 71 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 83 insertions(+), 52 deletions(-) diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index c18bd1138..5bee07b88 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -235,19 +235,22 @@ def __init__(self, ref_el, degree, size=None, **kwargs): expansion_set, coeffs) -def make_bubbles(ref_el, degree, shape=()): - """Construct a polynomial set with interior bubbles up to the given degree. +def make_bubbles(ref_el, degree, codim=0, shape=()): + """Construct a polynomial set with codim bubbles up to the given degree. """ - dim = ref_el.get_spatial_dimension() poly_set = ONPolynomialSet(ref_el, degree, shape=shape, scale="L2 piola", variant="bubble") + entity_ids = expansions.polynomial_entity_ids(ref_el, degree, continuity="C0") + sd = ref_el.get_spatial_dimension() + dim = sd - codim if dim == 1: - # odd / even reordering - degrees = chain(range(dim+1, degree+1, 2), range(dim+2, degree+1, 2)) - indices = list(degrees) + # Apply even / odd reordering on edge bubbles + indices = [] + for entity in entity_ids[dim]: + ids = entity_ids[dim][entity] + indices.extend(ids[::2]) + indices.extend(ids[1::2]) else: - idofs = expansions.polynomial_dimension(ref_el, degree-dim-1) - ndofs = poly_set.get_num_members() - indices = list(range(ndofs-idofs, ndofs)) + indices = list(chain(*entity_ids[dim].values())) if shape != (): ncomp = numpy.prod(shape) diff --git a/test/unit/test_fiat.py b/test/unit/test_fiat.py index 5f0b0f28c..6c9df1efc 100644 --- a/test/unit/test_fiat.py +++ b/test/unit/test_fiat.py @@ -603,49 +603,6 @@ def eval_basis(f, pt): assert np.allclose(uh, exact, atol=1E-14) -@pytest.mark.parametrize('cell', [I, T, S]) -def test_make_bubbles(cell): - from FIAT.quadrature_schemes import create_quadrature - from FIAT.expansions import polynomial_dimension - from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet - - degree = 10 - B = make_bubbles(cell, degree) - - # basic tests - sd = cell.get_spatial_dimension() - assert isinstance(B, PolynomialSet) - assert B.degree == degree - assert B.get_num_members() == polynomial_dimension(cell, degree - sd - 1) - - # test values on the boundary - top = cell.get_topology() - points = [] - for dim in range(len(top)-1): - for entity in range(len(top[dim])): - points.extend(cell.make_points(dim, entity, degree)) - values = B.tabulate(points)[(0,) * sd] - assert np.allclose(values, 0, atol=1E-12) - - # test linear independence - m = B.get_num_members() - points = cell.make_points(sd, 0, degree) - values = B.tabulate(points)[(0,) * sd] - assert values.shape == (m, m) - assert np.linalg.matrix_rank(values.T, tol=1E-12) == m - - # test that B does not have components in span(P_{degree+2} \ P_{degree}) - P = ONPolynomialSet(cell, degree + 2) - P = P.take(list(range(polynomial_dimension(cell, degree), - P.get_num_members()))) - - Q = create_quadrature(cell, P.degree + B.degree) - qpts, qwts = Q.get_points(), Q.get_weights() - P_at_qpts = P.tabulate(qpts)[(0,) * sd] - B_at_qpts = B.tabulate(qpts)[(0,) * sd] - assert np.allclose(np.dot(np.multiply(P_at_qpts, qwts), B_at_qpts.T), 0.0) - - @pytest.mark.parametrize('cell', [I, T, S]) def test_bubble_duality(cell): from FIAT.polynomial_set import make_bubbles diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 3cfb425ba..930f22f4c 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,3 +1,4 @@ +import math import numpy import pytest from FIAT.hierarchical import Legendre @@ -5,6 +6,8 @@ from FIAT.macro import AlfeldSplit, IsoSplit from FIAT.quadrature_schemes import create_quadrature from FIAT.reference_element import ufc_simplex +from FIAT.expansions import polynomial_entity_ids +from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet @pytest.fixture(params=("I", "T", "S")) @@ -136,3 +139,71 @@ def test_is_macro(variant): assert fe.is_macroelement() == is_macro assert fe.get_reference_complex().is_macrocell() == is_macro assert fe.get_nodal_basis().get_reference_element().is_macrocell() == is_macro + + +@pytest.mark.parametrize('split', [None, AlfeldSplit]) +@pytest.mark.parametrize('codim', range(3)) +def test_make_bubbles(cell, split, codim): + sd = cell.get_spatial_dimension() + if codim > sd: + return + degree = 5 + if split is not None: + cell = split(cell) + B = make_bubbles(cell, degree, codim=codim) + + # basic tests + assert isinstance(B, PolynomialSet) + assert B.degree == degree + num_members = B.get_num_members() + top = cell.get_topology() + assert num_members == math.comb(degree-1, sd-codim) * len(top[sd - codim]) + + # tabulate onto a lattice + points = [] + for dim in range(sd+1-codim): + for entity in sorted(top[dim]): + points.extend(cell.make_points(dim, entity, degree)) + values = B.tabulate(points)[(0,) * sd] + + # test that bubbles vanish on the boundary + num_pts_on_facet = len(points) - num_members + facet_values = values[:, :num_pts_on_facet] + assert numpy.allclose(facet_values, 0, atol=1E-12) + + # test linear independence + interior_values = values[:, num_pts_on_facet:] + assert numpy.linalg.matrix_rank(interior_values.T, tol=1E-12) == num_members + + # test trace similarity + dim = sd - codim + nfacets = len(top[dim]) + if nfacets > 1 and dim > 0: + ref_facet = cell.construct_subelement(dim) + ref_bubbles = make_bubbles(ref_facet, degree) + ref_points = ref_facet.make_points(dim, 0, degree) + ref_values = ref_bubbles.tabulate(ref_points)[(0,) * dim] + + bubbles_per_entity = ref_bubbles.get_num_members() + cur = 0 + for entity in sorted(top[dim]): + indices = list(range(cur, cur + bubbles_per_entity)) + cur_values = interior_values[numpy.ix_(indices, indices)] + scale = numpy.max(abs(cur_values)) / numpy.max(abs(ref_values)) + assert numpy.allclose(ref_values * scale, cur_values) + cur += bubbles_per_entity + + # test that bubbles do not have components in span(P_{degree+2} \ P_{degree}) + Pkdim = math.comb(degree + sd, sd) + entity_ids = polynomial_entity_ids(cell, degree + 2) + indices = [] + for entity in top[sd]: + indices.extend(entity_ids[sd][entity][Pkdim:]) + P = ONPolynomialSet(cell, degree + 2) + P = P.take(indices) + + Q = create_quadrature(cell, P.degree + B.degree) + qpts, qwts = Q.get_points(), Q.get_weights() + P_at_qpts = P.tabulate(qpts)[(0,) * sd] + B_at_qpts = B.tabulate(qpts)[(0,) * sd] + assert numpy.allclose(numpy.dot(numpy.multiply(P_at_qpts, qwts), B_at_qpts.T), 0.0) From 779785f98c4a8bf04a559dd4a1820682e7ccd244 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sat, 30 Mar 2024 16:00:43 -0500 Subject: [PATCH 43/93] Macro-ize DiscontinuousLagrange, Legendre, and IntegratedLegendre --- FIAT/check_format_variant.py | 10 +++-- FIAT/discontinuous_lagrange.py | 70 ++++++++++++++++++++++++++-------- FIAT/dual_set.py | 14 +------ FIAT/gauss_legendre.py | 49 ++---------------------- FIAT/gauss_lobatto_legendre.py | 2 +- FIAT/hierarchical.py | 39 +++++++++++-------- 6 files changed, 91 insertions(+), 93 deletions(-) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index bf9b2a012..2191be112 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -23,13 +23,17 @@ def check_format_variant(variant, degree): return variant, interpolant_degree -def parse_lagrange_variant(variant): +def parse_lagrange_variant(variant, discontinuous=False): options = variant.replace(" ", "").split(",") assert len(options) <= 2 + spectral = "gl" if discontinuous else "gll" supported_point_variants = {"equispaced": "equispaced", "gll": "gll", - "spectral": "gll"} - point_variant = "gll" + "spectral": spectral} + if discontinuous: + supported_point_variants["gl"] = "gl" + + point_variant = spectral splitting = None diff --git a/FIAT/discontinuous_lagrange.py b/FIAT/discontinuous_lagrange.py index d5f93c29f..098c322ea 100644 --- a/FIAT/discontinuous_lagrange.py +++ b/FIAT/discontinuous_lagrange.py @@ -8,6 +8,9 @@ import itertools import numpy as np from FIAT import finite_element, polynomial_set, dual_set, functional, P0 +from FIAT.reference_element import LINE, make_lattice +from FIAT.orientation_utils import make_entity_permutations_simplex +from FIAT.barycentric_interpolation import LagrangePolynomialSet from FIAT.polynomial_set import mis from FIAT.check_format_variant import parse_lagrange_variant @@ -140,10 +143,10 @@ def make_entity_permutations(dim, npoints): return perms -class DiscontinuousLagrangeDualSet(dual_set.DualSet): +class BrokenLagrangeDualSet(dual_set.DualSet): """The dual basis for Lagrange elements. This class works for simplices of any dimension. Nodes are point evaluation at - equispaced points. This is the discontinuous version where + equispaced points. This is the broken version where all nodes are topologically associated with the cell itself""" def __init__(self, ref_el, degree, point_variant="equispaced"): @@ -171,24 +174,61 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): entity_permutations[dim][entity] = perms entity_ids[dim][0] = list(range(len(nodes))) + super(BrokenLagrangeDualSet, self).__init__(nodes, ref_el, entity_ids, entity_permutations) + + +class DiscontinuousLagrangeDualSet(dual_set.DualSet): + """The dual basis for discontinuous elements with nodes at recursively-defined points.""" + + def __init__(self, ref_el, degree, point_variant="equispaced"): + nodes = [] + entity_ids = {} + entity_permutations = {} + sd = ref_el.get_spatial_dimension() + top = ref_el.get_topology() + for dim in sorted(top): + entity_ids[dim] = {} + entity_permutations[dim] = {} + perms = make_entity_permutations_simplex(dim, degree + 1 if dim == sd else -1) + for entity in sorted(top[dim]): + entity_ids[dim][entity] = [] + entity_permutations[dim][entity] = perms + + # make nodes by getting points + for entity in top[sd]: + cur = len(nodes) + pts = make_lattice(ref_el.get_vertices_of_subcomplex(top[sd][entity]), + degree, variant=point_variant) + nodes.extend(functional.PointEvaluation(ref_el, x) for x in pts) + entity_ids[dim][entity] = list(range(cur, len(nodes))) super(DiscontinuousLagrangeDualSet, self).__init__(nodes, ref_el, entity_ids, entity_permutations) -class HigherOrderDiscontinuousLagrange(finite_element.CiarletElement): - """The discontinuous Lagrange finite element.""" +class DiscontinuousLagrange(finite_element.CiarletElement): + """Simplicial discontinuous element with nodes at the (recursive) Gauss-Legendre points.""" + def __new__(cls, ref_el, degree, variant="equsipaced"): + if degree == 0: + return P0.P0(ref_el) + return super(DiscontinuousLagrange, cls).__new__(cls) def __init__(self, ref_el, degree, variant="equispaced"): - splitting, point_variant = parse_lagrange_variant(variant) + splitting, point_variant = parse_lagrange_variant(variant, discontinuous=True) if splitting is not None: ref_el = splitting(ref_el) - dual = DiscontinuousLagrangeDualSet(ref_el, degree, point_variant=point_variant) - poly_set = polynomial_set.ONPolynomialSet(ref_el, degree) + if point_variant in ["equispaced", "gll"]: + dual = BrokenLagrangeDualSet(ref_el, degree, point_variant) + else: + dual = DiscontinuousLagrangeDualSet(ref_el, degree, point_variant) + if ref_el.shape == LINE: + # In 1D we can use the primal basis as the expansion set, + # avoiding any round-off coming from a basis transformation + points = [] + for node in dual.nodes: + # Assert singleton point for each node. + pt, = node.get_point_dict().keys() + points.append(pt) + poly_set = LagrangePolynomialSet(ref_el, points) + else: + poly_set = polynomial_set.ONPolynomialSet(ref_el, degree) formdegree = ref_el.get_spatial_dimension() # n-form - super(HigherOrderDiscontinuousLagrange, self).__init__(poly_set, dual, degree, formdegree) - - -def DiscontinuousLagrange(ref_el, degree, variant="equispaced"): - if degree == 0: - return P0.P0(ref_el) - else: - return HigherOrderDiscontinuousLagrange(ref_el, degree, variant) + super(DiscontinuousLagrange, self).__init__(poly_set, dual, degree, formdegree) diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index 6a63d65fd..801c1855f 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -198,7 +198,6 @@ def make_entity_closure_ids(ref_el, entity_ids): def merge_entities(ref_el, entity_ids, entity_permutations): """Collect DOFs from simplicial complex onto facets of parent cell""" - from FIAT.orientation_utils import make_entity_permutations_simplex parent_cell = ref_el.get_parent() if parent_cell is None: return ref_el, entity_ids, entity_permutations @@ -212,16 +211,5 @@ def merge_entities(ref_el, entity_ids, entity_permutations): dofs_cur = entity_ids[dim][entity] parent_ids[parent_dim][parent_id].extend(dofs_cur) - if entity_permutations is None: - parent_permutations = None - else: - parent_permutations = {} - npoints = 0 - for dim in sorted(parent_top): - if dim <= 1: - npoints = len(parent_ids[dim][0]) - perms = make_entity_permutations_simplex(dim, npoints) - parent_permutations[dim] = {entity: perms for entity in parent_top[dim]} - npoints -= 1 - + parent_permutations = None return parent_cell, parent_ids, parent_permutations diff --git a/FIAT/gauss_legendre.py b/FIAT/gauss_legendre.py index 18efa22de..7962918f6 100644 --- a/FIAT/gauss_legendre.py +++ b/FIAT/gauss_legendre.py @@ -8,53 +8,10 @@ # # Modified by Pablo D. Brubeck (brubeck@protonmail.com), 2021 -from FIAT import finite_element, polynomial_set, dual_set, functional -from FIAT.reference_element import POINT, LINE, TRIANGLE, TETRAHEDRON -from FIAT.orientation_utils import make_entity_permutations_simplex -from FIAT.barycentric_interpolation import LagrangePolynomialSet -from FIAT.reference_element import make_lattice +from FIAT import discontinuous_lagrange -class GaussLegendreDualSet(dual_set.DualSet): - """The dual basis for discontinuous elements with nodes at the - (recursive) Gauss-Legendre points.""" - - def __init__(self, ref_el, degree): - entity_ids = {} - entity_permutations = {} - top = ref_el.get_topology() - for dim in sorted(top): - entity_ids[dim] = {} - entity_permutations[dim] = {} - perms = make_entity_permutations_simplex(dim, degree + 1 if dim == len(top)-1 else -1) - for entity in sorted(top[dim]): - entity_ids[dim][entity] = [] - entity_permutations[dim][entity] = perms - - # make nodes by getting points - dim = ref_el.get_spatial_dimension() - pts = make_lattice(ref_el.get_vertices(), degree, variant="gl") - nodes = [functional.PointEvaluation(ref_el, x) for x in pts] - entity_ids[dim][0] = list(range(len(nodes))) - super(GaussLegendreDualSet, self).__init__(nodes, ref_el, entity_ids, entity_permutations) - - -class GaussLegendre(finite_element.CiarletElement): +class GaussLegendre(discontinuous_lagrange.DiscontinuousLagrange): """Simplicial discontinuous element with nodes at the (recursive) Gauss-Legendre points.""" def __init__(self, ref_el, degree): - if ref_el.shape not in {POINT, LINE, TRIANGLE, TETRAHEDRON}: - raise ValueError("Gauss-Legendre elements are only defined on simplices.") - dual = GaussLegendreDualSet(ref_el, degree) - if ref_el.shape == LINE: - # In 1D we can use the primal basis as the expansion set, - # avoiding any round-off coming from a basis transformation - points = [] - for node in dual.nodes: - # Assert singleton point for each node. - pt, = node.get_point_dict().keys() - points.append(pt) - poly_set = LagrangePolynomialSet(ref_el, points) - else: - poly_set = polynomial_set.ONPolynomialSet(ref_el, degree) - formdegree = ref_el.get_spatial_dimension() # n-form - super(GaussLegendre, self).__init__(poly_set, dual, degree, formdegree) + super(GaussLegendre, self).__init__(ref_el, degree, variant="gl") diff --git a/FIAT/gauss_lobatto_legendre.py b/FIAT/gauss_lobatto_legendre.py index 45e1ddedc..d4589da0e 100644 --- a/FIAT/gauss_lobatto_legendre.py +++ b/FIAT/gauss_lobatto_legendre.py @@ -12,6 +12,6 @@ class GaussLobattoLegendre(lagrange.Lagrange): - """Simplicial continuous element with nodes at the (recursive) Gauss-Lobatto points.""" + """Simplicial continuous element with nodes at the (recursive) Gauss-Lobatto-Legendre points.""" def __init__(self, ref_el, degree): super(GaussLobattoLegendre, self).__init__(ref_el, degree, variant="gll") diff --git a/FIAT/hierarchical.py b/FIAT/hierarchical.py index af4400628..5101b4065 100644 --- a/FIAT/hierarchical.py +++ b/FIAT/hierarchical.py @@ -20,24 +20,35 @@ class LegendreDual(dual_set.DualSet): """The dual basis for Legendre elements.""" - def __init__(self, ref_el, degree, poly_set): + def __init__(self, ref_el, degree, codim=0): + nodes = [] entity_ids = {} entity_permutations = {} + + sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() for dim in sorted(top): - entity_ids[dim] = {} + npoints = degree + 1 if dim == sd - codim else 0 + perms = make_entity_permutations_simplex(dim, npoints) entity_permutations[dim] = {} - perms = make_entity_permutations_simplex(dim, degree + 1 if dim == len(top)-1 else -1) + entity_ids[dim] = {} + if npoints == 0: + for entity in sorted(top[dim]): + entity_ids[dim][entity] = [] + entity_permutations[dim][entity] = perms + continue + + ref_facet = ref_el.construct_subelement(dim) + poly_set = ONPolynomialSet(ref_facet, degree) + Q_ref = create_quadrature(ref_facet, 2 * degree) + phis = poly_set.tabulate(Q_ref.get_points())[(0,) * dim] for entity in sorted(top[dim]): - entity_ids[dim][entity] = [] + cur = len(nodes) + Q_facet = FacetQuadratureRule(ref_el, dim, entity, Q_ref) + nodes.extend(functional.IntegralMoment(ref_el, Q_facet, phi) for phi in phis) + entity_ids[dim][entity] = list(range(cur, len(nodes))) entity_permutations[dim][entity] = perms - dim = ref_el.get_spatial_dimension() - Q = create_quadrature(ref_el, 2 * degree) - phis = poly_set.tabulate(Q.get_points())[(0,) * dim] - nodes = [functional.IntegralMoment(ref_el, Q, phi) for phi in phis] - entity_ids[dim][0] = list(range(len(nodes))) - super(LegendreDual, self).__init__(nodes, ref_el, entity_ids, entity_permutations) @@ -48,7 +59,7 @@ def __init__(self, ref_el, degree): if ref_el.shape not in {POINT, LINE, TRIANGLE, TETRAHEDRON}: raise ValueError("%s is only defined on simplices." % type(self)) poly_set = ONPolynomialSet(ref_el, degree) - dual = LegendreDual(ref_el, degree, poly_set) + dual = LegendreDual(ref_el, degree) formdegree = ref_el.get_spatial_dimension() # n-form super(Legendre, self).__init__(poly_set, dual, degree, formdegree) @@ -56,8 +67,6 @@ def __init__(self, ref_el, degree): class IntegratedLegendreDual(dual_set.DualSet): """The dual basis for integrated Legendre elements.""" def __init__(self, ref_el, degree): - duals = self._beuchler_integral_duals - nodes = [] entity_ids = {} entity_permutations = {} @@ -77,7 +86,7 @@ def __init__(self, ref_el, degree): continue ref_facet = symmetric_simplex(dim) - Q_ref, phis = duals(ref_facet, degree) + Q_ref, phis = self.make_reference_duals(ref_facet, degree) for entity in sorted(top[dim]): cur = len(nodes) Q_facet = FacetQuadratureRule(ref_el, dim, entity, Q_ref) @@ -92,7 +101,7 @@ def __init__(self, ref_el, degree): super(IntegratedLegendreDual, self).__init__(nodes, ref_el, entity_ids, entity_permutations) - def _beuchler_integral_duals(self, ref_el, degree): + def make_reference_duals(self, ref_el, degree): Q = create_quadrature(ref_el, 2 * degree) qpts, qwts = Q.get_points(), Q.get_weights() inner = lambda v, u: numpy.dot(numpy.multiply(v, qwts), u.T) From e965c52e231b67b14983ec7626383e3432aa6454 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sat, 30 Mar 2024 22:26:20 -0500 Subject: [PATCH 44/93] handle macro variants --- FIAT/P0.py | 28 +++++++++------------- FIAT/__init__.py | 4 +++- FIAT/check_format_variant.py | 34 +++++++++++++++++++------- FIAT/discontinuous_lagrange.py | 44 +++++++++++++++++++++------------- FIAT/hierarchical.py | 27 +++++++++++++-------- FIAT/lagrange.py | 5 ++-- FIAT/reference_element.py | 13 ++++++---- test/unit/test_macro.py | 19 ++++++++++++--- 8 files changed, 110 insertions(+), 64 deletions(-) diff --git a/FIAT/P0.py b/FIAT/P0.py index 721f40992..4ffedfc2f 100644 --- a/FIAT/P0.py +++ b/FIAT/P0.py @@ -17,34 +17,28 @@ class P0Dual(dual_set.DualSet): def __init__(self, ref_el): entity_ids = {} - nodes = [] entity_permutations = {} - vs = numpy.array(ref_el.get_vertices()) - if ref_el.get_dimension() == 0: - bary = () - else: - bary = tuple(numpy.average(vs, 0)) - - nodes = [functional.PointEvaluation(ref_el, bary)] - entity_ids = {} + sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() + if sd == 0: + pts = [tuple() for entity in sorted(top[sd])] + else: + pts = [tuple(numpy.average(ref_el.get_vertices_of_subcomplex(top[sd][entity]), 0)) + for entity in sorted(top[sd])] + nodes = [functional.PointEvaluation(ref_el, pt) for pt in pts] for dim in sorted(top): entity_ids[dim] = {} entity_permutations[dim] = {} sym_size = ref_el.symmetry_group_size(dim) + perm = [0] if dim == sd else [] if isinstance(dim, tuple): assert isinstance(sym_size, tuple) - perms = {o: [] for o in numpy.ndindex(sym_size)} + perms = {o: perm for o in numpy.ndindex(sym_size)} else: - perms = {o: [] for o in range(sym_size)} + perms = {o: perm for o in range(sym_size)} for entity in sorted(top[dim]): - entity_ids[dim][entity] = [] + entity_ids[dim][entity] = [entity] if dim == sd else [] entity_permutations[dim][entity] = perms - entity_ids[dim] = {0: [0]} - if isinstance(dim, tuple): - entity_permutations[dim][0] = {o: [0] for o in numpy.ndindex(sym_size)} - else: - entity_permutations[dim][0] = {o: [0] for o in range(sym_size)} super(P0Dual, self).__init__(nodes, ref_el, entity_ids, entity_permutations) diff --git a/FIAT/__init__.py b/FIAT/__init__.py index ae68cfd72..9e95b394b 100644 --- a/FIAT/__init__.py +++ b/FIAT/__init__.py @@ -30,6 +30,7 @@ from FIAT.morley import Morley from FIAT.nedelec import Nedelec from FIAT.nedelec_second_kind import NedelecSecondKind +from FIAT.hierarchical import Legendre, IntegratedLegendre from FIAT.P0 import P0 from FIAT.raviart_thomas import RaviartThomas from FIAT.crouzeix_raviart import CrouzeixRaviart @@ -48,7 +49,6 @@ from FIAT.restricted import RestrictedElement # noqa: F401 from FIAT.quadrature_element import QuadratureElement # noqa: F401 from FIAT.kong_mulder_veldhuizen import KongMulderVeldhuizen # noqa: F401 -from FIAT.hierarchical import Legendre, IntegratedLegendre # noqa: F401 from FIAT.fdm_element import FDMLagrange, FDMDiscontinuousLagrange, FDMQuadrature, FDMBrokenH1, FDMBrokenL2, FDMHermite # noqa: F401 # Important functionality @@ -81,6 +81,8 @@ "Gauss-Lobatto-Legendre": GaussLobattoLegendre, "Gauss-Legendre": GaussLegendre, "Gauss-Radau": GaussRadau, + "Legendre": Legendre, + "Integrated Legendre": IntegratedLegendre, "Morley": Morley, "Nedelec 1st kind H(curl)": Nedelec, "Nedelec 2nd kind H(curl)": NedelecSecondKind, diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index 2191be112..15fb44ad4 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -1,5 +1,20 @@ import re -from FIAT.macro import IsoSplit, AlfeldSplit +from FIAT.macro import AlfeldSplit, IsoSplit + +# dicts mapping Lagrange variant names to recursivenodes family names +supported_cg_variants = { + "spectral": "gll", + "chebyshev": "lgc", + "equispaced": "equispaced", + "gll": "gll"} + +supported_dg_variants = { + "spectral": "gl", + "chebyshev": "gc", + "equispaced": "equispaced", + "equispaced_interior": "equispaced_interior", + "gll": "gll", + "gl": "gl"} def check_format_variant(variant, degree): @@ -24,18 +39,19 @@ def check_format_variant(variant, degree): def parse_lagrange_variant(variant, discontinuous=False): + if variant is None: + variant = "equispaced" options = variant.replace(" ", "").split(",") assert len(options) <= 2 - spectral = "gl" if discontinuous else "gll" - supported_point_variants = {"equispaced": "equispaced", - "gll": "gll", - "spectral": spectral} - if discontinuous: - supported_point_variants["gl"] = "gl" - point_variant = spectral + if discontinuous: + supported_point_variants = supported_dg_variants + else: + supported_point_variants = supported_cg_variants + # defaults splitting = None + point_variant = supported_point_variants["spectral"] for pre_opt in options: opt = pre_opt.lower() @@ -52,4 +68,6 @@ def parse_lagrange_variant(variant, discontinuous=False): else: raise ValueError("Illegal variant option") + if discontinuous and splitting is not None and point_variant in supported_cg_variants.values(): + raise ValueError("Illegal variant. DG macroelements with DOFs on subcell boundaries are not unisolvent.") return splitting, point_variant diff --git a/FIAT/discontinuous_lagrange.py b/FIAT/discontinuous_lagrange.py index 098c322ea..8909e08e1 100644 --- a/FIAT/discontinuous_lagrange.py +++ b/FIAT/discontinuous_lagrange.py @@ -150,27 +150,21 @@ class BrokenLagrangeDualSet(dual_set.DualSet): all nodes are topologically associated with the cell itself""" def __init__(self, ref_el, degree, point_variant="equispaced"): - entity_ids = {} nodes = [] + entity_ids = {} entity_permutations = {} # make nodes by getting points # need to do this dimension-by-dimension, facet-by-facet top = ref_el.get_topology() - - cur = 0 for dim in sorted(top): entity_ids[dim] = {} entity_permutations[dim] = {} perms = make_entity_permutations(dim, degree + 1 if dim == len(top) - 1 else -1) for entity in sorted(top[dim]): pts_cur = ref_el.make_points(dim, entity, degree, variant=point_variant) - nodes_cur = [functional.PointEvaluation(ref_el, x) - for x in pts_cur] - nnodes_cur = len(nodes_cur) - nodes += nodes_cur + nodes.extend(functional.PointEvaluation(ref_el, x) for x in pts_cur) entity_ids[dim][entity] = [] - cur += nnodes_cur entity_permutations[dim][entity] = perms entity_ids[dim][0] = list(range(len(nodes))) @@ -178,8 +172,8 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): class DiscontinuousLagrangeDualSet(dual_set.DualSet): - """The dual basis for discontinuous elements with nodes at recursively-defined points.""" - + """The dual basis for discontinuous elements with nodes at recursively-defined points. + """ def __init__(self, ref_el, degree, point_variant="equispaced"): nodes = [] entity_ids = {} @@ -194,7 +188,7 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids[dim][entity] = [] entity_permutations[dim][entity] = perms - # make nodes by getting points + # make nodes by getting points on the interior facets for entity in top[sd]: cur = len(nodes) pts = make_lattice(ref_el.get_vertices_of_subcomplex(top[sd][entity]), @@ -205,20 +199,36 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): class DiscontinuousLagrange(finite_element.CiarletElement): - """Simplicial discontinuous element with nodes at the (recursive) Gauss-Legendre points.""" - def __new__(cls, ref_el, degree, variant="equsipaced"): + """The discontinuous Lagrange finite element. + + :arg ref_el: The reference element, which could be a standard FIAT simplex or a split complex + :arg degree: The polynomial degree + :arg variant: A comma-separated string that may specify the type of point distribution + and the splitting strategy if a macro element is desired. + Either option may be omitted. The default point type is equispaced + and the default splitting strategy is None. + Example: variant='gl' gives a standard unsplit point distribution with + spectral points. + variant='equispaced,Iso(2)' with degree=1 gives the P2:P1 iso element. + variant='Alfeld' can be used to obtain a barycentrically refined + macroelement for Scott-Vogelius. + """ + def __new__(cls, ref_el, degree, variant="equispaced"): if degree == 0: - return P0.P0(ref_el) + splitting, _ = parse_lagrange_variant(variant, discontinuous=True) + if splitting is None: + # FIXME P0 on the split requires implementing SplitSimplicialComplex.symmetry_group_size() + return P0.P0(ref_el) return super(DiscontinuousLagrange, cls).__new__(cls) def __init__(self, ref_el, degree, variant="equispaced"): splitting, point_variant = parse_lagrange_variant(variant, discontinuous=True) if splitting is not None: ref_el = splitting(ref_el) - if point_variant in ["equispaced", "gll"]: - dual = BrokenLagrangeDualSet(ref_el, degree, point_variant) + if point_variant in ("equispaced", "gll", "lgc"): + dual = BrokenLagrangeDualSet(ref_el, degree, point_variant=point_variant) else: - dual = DiscontinuousLagrangeDualSet(ref_el, degree, point_variant) + dual = DiscontinuousLagrangeDualSet(ref_el, degree, point_variant=point_variant) if ref_el.shape == LINE: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation diff --git a/FIAT/hierarchical.py b/FIAT/hierarchical.py index 5101b4065..dd193b470 100644 --- a/FIAT/hierarchical.py +++ b/FIAT/hierarchical.py @@ -9,13 +9,13 @@ import numpy import scipy -from FIAT import finite_element, dual_set, functional -from FIAT.reference_element import (POINT, LINE, TRIANGLE, TETRAHEDRON, - symmetric_simplex) +from FIAT import finite_element, dual_set, functional, P0 +from FIAT.reference_element import symmetric_simplex from FIAT.orientation_utils import make_entity_permutations_simplex from FIAT.quadrature import FacetQuadratureRule from FIAT.quadrature_schemes import create_quadrature from FIAT.polynomial_set import ONPolynomialSet, make_bubbles +from FIAT.check_format_variant import parse_lagrange_variant class LegendreDual(dual_set.DualSet): @@ -54,10 +54,18 @@ def __init__(self, ref_el, degree, codim=0): class Legendre(finite_element.CiarletElement): """Simplicial discontinuous element with Legendre polynomials.""" + def __new__(cls, ref_el, degree, variant=None): + if degree == 0: + splitting, _ = parse_lagrange_variant(variant, discontinuous=True) + if splitting is None: + # FIXME P0 on the split requires implementing SplitSimplicialComplex.symmetry_group_size() + return P0.P0(ref_el) + return super(Legendre, cls).__new__(cls) - def __init__(self, ref_el, degree): - if ref_el.shape not in {POINT, LINE, TRIANGLE, TETRAHEDRON}: - raise ValueError("%s is only defined on simplices." % type(self)) + def __init__(self, ref_el, degree, variant=None): + splitting, _ = parse_lagrange_variant(variant, discontinuous=True) + if splitting is not None: + ref_el = splitting(ref_el) poly_set = ONPolynomialSet(ref_el, degree) dual = LegendreDual(ref_el, degree) formdegree = ref_el.get_spatial_dimension() # n-form @@ -123,13 +131,12 @@ def make_reference_duals(self, ref_el, degree): class IntegratedLegendre(finite_element.CiarletElement): """Simplicial continuous element with integrated Legendre polynomials.""" - def __init__(self, ref_el, degree, variant=None): - if ref_el.shape not in {POINT, LINE, TRIANGLE, TETRAHEDRON}: - raise ValueError("%s is only defined on simplices." % type(self)) + splitting, _ = parse_lagrange_variant(variant) + if splitting is not None: + ref_el = splitting(ref_el) if degree < 1: raise ValueError(f"{type(self).__name__} elements only valid for k >= 1") - poly_set = ONPolynomialSet(ref_el, degree, variant="bubble") dual = IntegratedLegendreDual(ref_el, degree) formdegree = 0 # 0-form diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index da2b8d2bc..ee5d5d51d 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -15,8 +15,8 @@ class LagrangeDualSet(dual_set.DualSet): """The dual basis for Lagrange elements. This class works for simplices of any dimension. Nodes are point evaluation at - equispaced points.""" - + recursively-defined points. + """ def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids = {} nodes = [] @@ -58,7 +58,6 @@ class Lagrange(finite_element.CiarletElement): variant='Alfeld' can be used to obtain a barycentrically refined macroelement for Scott-Vogelius. """ - def __init__(self, ref_el, degree, variant="equispaced"): splitting, point_variant = parse_lagrange_variant(variant) if splitting is not None: diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index eb08e85fb..7200dc1f6 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -78,11 +78,14 @@ def make_lattice(verts, n, interior=0, variant=None): and interior = 0, this function will return the vertices and midpoint, but with interior = 1, it will only return the midpoint.""" - if variant is None or variant == "equispaced": - variant = "equi" - elif variant == "gll": - variant = "lgl" - family = _decode_family(variant) + if variant is None: + variant = "equispaced" + recursivenodes_families = { + "equispaced": "equi", + "equispaced_interior": "equi_interior", + "gll": "lgl"} + family = recursivenodes_families.get(variant, variant) + family = _decode_family(family) D = len(verts) X = numpy.array(verts) get_point = lambda alpha: tuple(numpy.dot(_recursive(D - 1, n, alpha, family), X)) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 930f22f4c..d3f54c9d0 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -1,8 +1,7 @@ import math import numpy import pytest -from FIAT.hierarchical import Legendre -from FIAT.lagrange import Lagrange +from FIAT import DiscontinuousLagrange, Lagrange, Legendre, P0 from FIAT.macro import AlfeldSplit, IsoSplit from FIAT.quadrature_schemes import create_quadrature from FIAT.reference_element import ufc_simplex @@ -131,7 +130,7 @@ def test_macro_lagrange(variant, degree, split, cell): @pytest.mark.parametrize("variant", ("gll", "Alfeld,equispaced", "gll,iso")) -def test_is_macro(variant): +def test_is_macro_lagrange(variant): is_macro = "alfeld" in variant.lower() or "iso" in variant.lower() fe = Lagrange(ufc_simplex(2), 2, variant) @@ -141,6 +140,20 @@ def test_is_macro(variant): assert fe.get_nodal_basis().get_reference_element().is_macrocell() == is_macro +@pytest.mark.parametrize("variant", ("gl", "Alfeld,equispaced_interior", "chebyshev,iso")) +@pytest.mark.parametrize("degree", (0, 2)) +def test_is_macro_discontinuous_lagrange(degree, variant): + is_macro = "alfeld" in variant.lower() or "iso" in variant.lower() + + fe = DiscontinuousLagrange(ufc_simplex(2), degree, variant) + if degree == 0 and not is_macro: + assert isinstance(fe, P0) + assert not fe.get_reference_element().is_macrocell() + assert fe.is_macroelement() == is_macro + assert fe.get_reference_complex().is_macrocell() == is_macro + assert fe.get_nodal_basis().get_reference_element().is_macrocell() == is_macro + + @pytest.mark.parametrize('split', [None, AlfeldSplit]) @pytest.mark.parametrize('codim', range(3)) def test_make_bubbles(cell, split, codim): From 31465ab1af25dec1472b678c7da4e1cc5dd9f994 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 31 Mar 2024 17:14:34 -0500 Subject: [PATCH 45/93] merge entity_dofs in sorted order --- FIAT/dual_set.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index 801c1855f..b941da0b6 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -205,8 +205,8 @@ def merge_entities(ref_el, entity_ids, entity_permutations): parent_top = parent_cell.get_topology() parent_ids = {dim: {entity: [] for entity in parent_top[dim]} for dim in parent_top} child_to_parent = ref_el.get_child_to_parent() - for dim in child_to_parent: - for entity in child_to_parent[dim]: + for dim in sorted(child_to_parent): + for entity in sorted(child_to_parent[dim]): parent_dim, parent_id = child_to_parent[dim][entity] dofs_cur = entity_ids[dim][entity] parent_ids[parent_dim][parent_id].extend(dofs_cur) From 12e2195e9b9d3002724ad68c7742687646036705 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 31 Mar 2024 17:15:08 -0500 Subject: [PATCH 46/93] P0: use get_dimension to handle tensor product cells --- FIAT/P0.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FIAT/P0.py b/FIAT/P0.py index 4ffedfc2f..c91dd171c 100644 --- a/FIAT/P0.py +++ b/FIAT/P0.py @@ -18,7 +18,7 @@ class P0Dual(dual_set.DualSet): def __init__(self, ref_el): entity_ids = {} entity_permutations = {} - sd = ref_el.get_spatial_dimension() + sd = ref_el.get_dimension() top = ref_el.get_topology() if sd == 0: pts = [tuple() for entity in sorted(top[sd])] From c8a1ae5ff715dc938f37f976c23a94728092f795 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 31 Mar 2024 17:17:48 -0500 Subject: [PATCH 47/93] DG: use get_dimension to handle tensor product cells --- FIAT/discontinuous_lagrange.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FIAT/discontinuous_lagrange.py b/FIAT/discontinuous_lagrange.py index 8909e08e1..e9bfdf954 100644 --- a/FIAT/discontinuous_lagrange.py +++ b/FIAT/discontinuous_lagrange.py @@ -178,7 +178,7 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): nodes = [] entity_ids = {} entity_permutations = {} - sd = ref_el.get_spatial_dimension() + sd = ref_el.get_dimension() top = ref_el.get_topology() for dim in sorted(top): entity_ids[dim] = {} From daa07982615f1484a208735eeee4bbf94fe97cf9 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Mon, 1 Apr 2024 14:23:44 -0500 Subject: [PATCH 48/93] Macro: reorder ISOSplit to match P2 --- FIAT/lagrange.py | 1 + FIAT/macro.py | 51 +++++++++++++++++++++++++++++------------------- 2 files changed, 32 insertions(+), 20 deletions(-) diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index ee5d5d51d..b6266c701 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -40,6 +40,7 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids[dim][entity] = list(range(cur, cur + nnodes_cur)) cur += nnodes_cur entity_permutations[dim][entity] = perms + super(LagrangeDualSet, self).__init__(nodes, ref_el, entity_ids, entity_permutations) diff --git a/FIAT/macro.py b/FIAT/macro.py index db19f4f95..ffd7ff61f 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -4,8 +4,7 @@ import numpy from FIAT.quadrature import FacetQuadratureRule, QuadratureRule -from FIAT.reference_element import (SimplicialComplex, lattice_iter, - make_lattice) +from FIAT.reference_element import SimplicialComplex, lattice_iter def bary_to_xy(verts, bary, result=None): @@ -23,11 +22,12 @@ def xy_to_bary(verts, pts, result=None): """Maps physical points to barycentric coordinates. :arg verts: A tuple of points. - :arg ots: A row-stacked numpy array of physical points. + :arg pts: A row-stacked numpy array of physical points. :arg result: A row-stacked numpy array of barycentric coordinates. :returns: result """ verts = numpy.asarray(verts) + pts = numpy.asarray(pts) npts = pts.shape[0] sdim = verts.shape[1] @@ -171,33 +171,44 @@ class IsoSplit(SplitSimplicialComplex): :kwarg depth: The number of subdivisions along each edge of the simplex. :kwarg variant: The point distribution variant. """ - def __init__(self, ref_el, depth=2, variant=None): + def __init__(self, ref_el, degree=2, variant=None): + # Construct new vertices entity-by-entity sd = ref_el.get_spatial_dimension() - old_verts = ref_el.get_vertices() - new_verts = make_lattice(old_verts, depth, variant=variant) + top = ref_el.get_topology() + new_verts = [] + ref_lattice = [] + for dim in top: + for entity in top[dim]: + new_verts.extend(ref_el.make_points(dim, entity, degree, variant=variant)) + ref_lattice.extend(ref_el.make_points(dim, entity, degree)) + + bary = xy_to_bary(ref_el.vertices, ref_lattice) + bary *= degree + alphas = numpy.rint(bary[:, :-1]) + flat_index = {tuple(alpha): i for i, alpha in enumerate(alphas)} new_topology = {} new_topology[0] = {i: (i,) for i in range(len(new_verts))} new_topology[1] = {} - - # Loop through vertex pairs - # Edges are oriented from low vertex id to high vertex id to avoid duplicates - # Place a new edge when the two lattice multiindices are at Manhattan distance < 3, - # this connects the midpoints of edges within a face - # Only include diagonal edges that are parallel to the simplex edges, - # we take the diagonal that goes through vertices at the same depth + # Loop through degree k-1 vertices + # Construct a P1 simplex by connecting edges between this vertex and each of its neighbors by shifing each coordinate up by 1, + # forming a P1 simplex cur = 0 - distance = lambda x, y: sum(abs(b-a) for a, b in zip(x, y)) - for j, v1 in enumerate(lattice_iter(0, depth+1, sd)): - for i, v0 in enumerate(lattice_iter(0, depth+1, sd)): - if i < j and distance(v0, v1) < 3 and sum(v1) - sum(v0) <= 1: - new_topology[1][cur] = (i, j) + for alpha in lattice_iter(0, degree, sd): + simplex = [] + for beta in lattice_iter(0, 2, sd): + v1 = flat_index[tuple(a+b for a, b in zip(alpha, beta))] + for v0 in simplex: + new_topology[1][cur] = tuple(sorted((v0, v1))) cur = cur + 1 + simplex.append(v1) + if sd == 3: # Cut the octahedron # FIXME do this more generically - assert depth == 2 - new_topology[1][cur] = (1, 8) + assert degree == 2 + v0, v1 = flat_index[(1, 0, 0)], flat_index[(0, 1, 1)] + new_topology[1][cur] = tuple(sorted((v0, v1))) # Get an adjacency list for each vertex edges = new_topology[1].values() From f76e95a8287603acb23a937f82044470afde83d5 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Mon, 1 Apr 2024 14:23:44 -0500 Subject: [PATCH 49/93] Macro: reorder ISOSplit to match P2 --- FIAT/lagrange.py | 1 + FIAT/macro.py | 59 ++++++++++++++++++++++++----------------- test/unit/test_macro.py | 37 +++++++++++++++++++++++++- 3 files changed, 72 insertions(+), 25 deletions(-) diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index ee5d5d51d..b6266c701 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -40,6 +40,7 @@ def __init__(self, ref_el, degree, point_variant="equispaced"): entity_ids[dim][entity] = list(range(cur, cur + nnodes_cur)) cur += nnodes_cur entity_permutations[dim][entity] = perms + super(LagrangeDualSet, self).__init__(nodes, ref_el, entity_ids, entity_permutations) diff --git a/FIAT/macro.py b/FIAT/macro.py index db19f4f95..b85119cf6 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -4,8 +4,7 @@ import numpy from FIAT.quadrature import FacetQuadratureRule, QuadratureRule -from FIAT.reference_element import (SimplicialComplex, lattice_iter, - make_lattice) +from FIAT.reference_element import SimplicialComplex, lattice_iter def bary_to_xy(verts, bary, result=None): @@ -23,11 +22,12 @@ def xy_to_bary(verts, pts, result=None): """Maps physical points to barycentric coordinates. :arg verts: A tuple of points. - :arg ots: A row-stacked numpy array of physical points. + :arg pts: A row-stacked numpy array of physical points. :arg result: A row-stacked numpy array of barycentric coordinates. :returns: result """ verts = numpy.asarray(verts) + pts = numpy.asarray(pts) npts = pts.shape[0] sdim = verts.shape[1] @@ -168,36 +168,47 @@ class IsoSplit(SplitSimplicialComplex): connecting points on a regular lattice. :arg ref_el: The parent Simplex to split. - :kwarg depth: The number of subdivisions along each edge of the simplex. + :kwarg degree: The number of subdivisions along each edge of the simplex. :kwarg variant: The point distribution variant. """ - def __init__(self, ref_el, depth=2, variant=None): + def __init__(self, ref_el, degree=2, variant=None): + # Construct new vertices entity-by-entity sd = ref_el.get_spatial_dimension() - old_verts = ref_el.get_vertices() - new_verts = make_lattice(old_verts, depth, variant=variant) + top = ref_el.get_topology() + new_verts = [] + ref_lattice = [] + for dim in top: + for entity in top[dim]: + new_verts.extend(ref_el.make_points(dim, entity, degree, variant=variant)) + ref_lattice.extend(ref_el.make_points(dim, entity, degree)) + + bary = xy_to_bary(ref_el.get_vertices(), ref_lattice) + bary *= degree + alphas = numpy.rint(bary[:, :-1]) + flat_index = {tuple(alpha): i for i, alpha in enumerate(alphas)} new_topology = {} new_topology[0] = {i: (i,) for i in range(len(new_verts))} - new_topology[1] = {} - - # Loop through vertex pairs - # Edges are oriented from low vertex id to high vertex id to avoid duplicates - # Place a new edge when the two lattice multiindices are at Manhattan distance < 3, - # this connects the midpoints of edges within a face - # Only include diagonal edges that are parallel to the simplex edges, - # we take the diagonal that goes through vertices at the same depth - cur = 0 - distance = lambda x, y: sum(abs(b-a) for a, b in zip(x, y)) - for j, v1 in enumerate(lattice_iter(0, depth+1, sd)): - for i, v0 in enumerate(lattice_iter(0, depth+1, sd)): - if i < j and distance(v0, v1) < 3 and sum(v1) - sum(v0) <= 1: - new_topology[1][cur] = (i, j) - cur = cur + 1 + # Loop through degree k-1 vertices + # Construct a P1 simplex by connecting edges between a vertex and + # its neighbors obtained by shifting each coordinate up by 1, forming a P1 simplex + edges = [] + for alpha in lattice_iter(0, degree, sd): + simplex = [] + for beta in lattice_iter(0, 2, sd): + v1 = flat_index[tuple(a+b for a, b in zip(alpha, beta))] + for v0 in simplex: + edges.append(tuple(sorted((v0, v1)))) + simplex.append(v1) + if sd == 3: # Cut the octahedron # FIXME do this more generically - assert depth == 2 - new_topology[1][cur] = (1, 8) + assert degree == 2 + v0, v1 = flat_index[(1, 0, 0)], flat_index[(0, 1, 1)] + edges.append(tuple(sorted((v0, v1)))) + + new_topology[1] = dict(enumerate(sorted(edges))) # Get an adjacency list for each vertex edges = new_topology[1].values() diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index d3f54c9d0..fa94e96ea 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -5,7 +5,7 @@ from FIAT.macro import AlfeldSplit, IsoSplit from FIAT.quadrature_schemes import create_quadrature from FIAT.reference_element import ufc_simplex -from FIAT.expansions import polynomial_entity_ids +from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet @@ -220,3 +220,38 @@ def test_make_bubbles(cell, split, codim): P_at_qpts = P.tabulate(qpts)[(0,) * sd] B_at_qpts = B.tabulate(qpts)[(0,) * sd] assert numpy.allclose(numpy.dot(numpy.multiply(P_at_qpts, qwts), B_at_qpts.T), 0.0) + + +@pytest.mark.parametrize("degree", (4,)) +@pytest.mark.parametrize("variant", (None, "bubble")) +@pytest.mark.parametrize("split", (AlfeldSplit, IsoSplit)) +def test_macro_expansion(cell, split, variant, degree): + ref_complex = split(cell) + top = ref_complex.get_topology() + sd = ref_complex.get_spatial_dimension() + P = ONPolynomialSet(ref_complex, degree, variant=variant, scale=1) + + npoints = degree + sd + 1 + cell_point_map = [] + pts = [] + for cell in top[sd]: + cur = len(pts) + pts.extend(ref_complex.make_points(sd, cell, npoints)) + cell_point_map.append(list(range(cur, len(pts)))) + + order = 2 + values = P.tabulate(pts, order) + cell_node_map = polynomial_cell_node_map(ref_complex, degree, continuity=P.expansion_set.continuity) + for cell in top[sd]: + sub_el = ref_complex.construct_subelement(sd) + sub_el.vertices = ref_complex.get_vertices_of_subcomplex(top[sd][cell]) + Pcell = ONPolynomialSet(sub_el, degree, variant=variant, scale=1) + + cell_pts = sub_el.make_points(sd, 0, npoints) + cell_values = Pcell.tabulate(cell_pts, order) + + ibfs = cell_node_map[cell] + ipts = cell_point_map[cell] + indices = numpy.ix_(ibfs, ipts) + for alpha in values: + assert numpy.allclose(cell_values[alpha], values[alpha][indices]) From b3974ff1a532e24a725184bc8d6e3423914b84bd Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Tue, 2 Apr 2024 22:48:59 -0500 Subject: [PATCH 50/93] WIP (formatting, doc string) --- FIAT/quadrature_schemes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/FIAT/quadrature_schemes.py b/FIAT/quadrature_schemes.py index 5be757e37..610eef687 100644 --- a/FIAT/quadrature_schemes.py +++ b/FIAT/quadrature_schemes.py @@ -30,13 +30,13 @@ # NumPy import numpy +from FIAT.macro import MacroQuadratureRule from FIAT.quadrature import (QuadratureRule, make_quadrature, make_tensor_product_quadrature, map_quadrature) # FIAT from FIAT.reference_element import (HEXAHEDRON, QUADRILATERAL, TENSORPRODUCT, TETRAHEDRON, TRIANGLE, UFCTetrahedron, UFCTriangle, symmetric_simplex) -from FIAT.macro import MacroQuadratureRule def create_quadrature(ref_el, degree, scheme="default"): @@ -49,7 +49,7 @@ def create_quadrature(ref_el, degree, scheme="default"): Gauss scheme on simplices. On tensor-product cells, it is a tensor-product quadrature rule of the subcells. - :arg cell: The FIAT cell to create the quadrature for. + :arg ref_el: The FIAT cell to create the quadrature for. :arg degree: The degree of polynomial that the rule should integrate exactly. """ From 0d9422b18d60261f9f43cd2a956e8e9dd45f03be Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 3 Apr 2024 13:12:21 -0500 Subject: [PATCH 51/93] Cell: implement construct_subcomplex --- FIAT/macro.py | 18 ++++++++++++++++++ FIAT/quadrature_schemes.py | 6 +++--- FIAT/reference_element.py | 19 +++++++++++++++++++ 3 files changed, 40 insertions(+), 3 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index b85119cf6..4bf43325a 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -162,6 +162,12 @@ def __init__(self, ref_el): new_topology[dim][offset+entity] = ids + (new_vert_id,) super(AlfeldSplit, self).__init__(ref_el, new_verts, new_topology) + def construct_subcomplex(self, dimension): + if dimension == self.get_dimension(): + return self + # Alfed on facets is just a simplex + return self.construct_subelement(dimension) + class IsoSplit(SplitSimplicialComplex): """Splits simplex into the simplicial complex obtained by @@ -172,6 +178,8 @@ class IsoSplit(SplitSimplicialComplex): :kwarg variant: The point distribution variant. """ def __init__(self, ref_el, degree=2, variant=None): + self.degree = degree + self.variant = variant # Construct new vertices entity-by-entity sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() @@ -227,6 +235,16 @@ def __init__(self, ref_el, degree=2, variant=None): new_topology[dim] = dict(enumerate(entities)) super(IsoSplit, self).__init__(ref_el, new_verts, new_topology) + def construct_subcomplex(self, dimension): + if dimension == self.get_dimension(): + return self + ref_el = self.construct_subelement(dimension) + if dimension == 0: + return ref_el + else: + # Iso on facets is Iso + return IsoSplit(ref_el, self.degree, self.variant) + class MacroQuadratureRule(QuadratureRule): """Composite quadrature rule on parent facets that respects the splitting. diff --git a/FIAT/quadrature_schemes.py b/FIAT/quadrature_schemes.py index 610eef687..578c9800a 100644 --- a/FIAT/quadrature_schemes.py +++ b/FIAT/quadrature_schemes.py @@ -54,9 +54,9 @@ def create_quadrature(ref_el, degree, scheme="default"): integrate exactly. """ if ref_el.is_macrocell(): - sd = ref_el.get_spatial_dimension() - cell = ref_el.construct_subelement(sd) - Q_ref = create_quadrature(cell, degree, scheme=scheme) + dimension = ref_el.get_dimension() + sub_el = ref_el.construct_subelement(dimension) + Q_ref = create_quadrature(sub_el, degree, scheme=scheme) return MacroQuadratureRule(ref_el, Q_ref) if ref_el.get_shape() == TENSORPRODUCT: diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 7200dc1f6..8bbc4f339 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -234,6 +234,16 @@ def construct_subelement(self, dimension): """ raise NotImplementedError("Should be implemented in a subclass.") + def construct_subcomplex(self, dimension): + """Constructs the reference subcomplex of the parent cell subentity + specified by subcomplex dimension. + + :arg dimension: `tuple` for tensor product cells, `int` otherwise + """ + if self.get_parent() is None: + return self.construct_subelement(dimension) + raise NotImplementedError("Should be implemented in a subclass.") + def get_entity_transform(self, dim, entity_i): """Returns a mapping of point coordinates from the `entity_i`-th subentity of dimension `dim` to the cell. @@ -994,6 +1004,15 @@ def construct_subelement(self, dimension): return TensorProductCell(*[c.construct_subelement(d) for c, d in zip(self.cells, dimension)]) + def construct_subcomplex(self, dimension): + """Constructs the reference subcomplex of the parent cell subentity + specified by subcomplex dimension. + + :arg dimension: dimension in each "direction" (tuple) + """ + return TensorProductCell(*[c.construct_subcomplex(d) + for c, d in zip(self.cells, dimension)]) + def get_entity_transform(self, dim, entity_i): """Returns a mapping of point coordinates from the `entity_i`-th subentity of dimension `dim` to the cell. From e27d346e92e9da03df3a376780b0ec17113ce278 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 3 Apr 2024 15:54:39 -0500 Subject: [PATCH 52/93] Macro: sort children lexicographically --- FIAT/dual_set.py | 29 +++++++++++++------------- FIAT/macro.py | 46 ++++++++++++++++++++++++----------------- test/unit/test_macro.py | 23 +++++++++++++++++++-- 3 files changed, 63 insertions(+), 35 deletions(-) diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index b941da0b6..f111f5343 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -13,7 +13,7 @@ class DualSet(object): def __init__(self, nodes, ref_el, entity_ids, entity_permutations=None): - ref_el, entity_ids, entity_permutations = merge_entities(ref_el, entity_ids, entity_permutations) + nodes, ref_el, entity_ids, entity_permutations = merge_entities(nodes, ref_el, entity_ids, entity_permutations) self.nodes = nodes self.ref_el = ref_el self.entity_ids = entity_ids @@ -196,20 +196,21 @@ def make_entity_closure_ids(ref_el, entity_ids): return entity_closure_ids -def merge_entities(ref_el, entity_ids, entity_permutations): +def merge_entities(nodes, ref_el, entity_ids, entity_permutations): """Collect DOFs from simplicial complex onto facets of parent cell""" parent_cell = ref_el.get_parent() if parent_cell is None: - return ref_el, entity_ids, entity_permutations - - parent_top = parent_cell.get_topology() - parent_ids = {dim: {entity: [] for entity in parent_top[dim]} for dim in parent_top} - child_to_parent = ref_el.get_child_to_parent() - for dim in sorted(child_to_parent): - for entity in sorted(child_to_parent[dim]): - parent_dim, parent_id = child_to_parent[dim][entity] - dofs_cur = entity_ids[dim][entity] - parent_ids[parent_dim][parent_id].extend(dofs_cur) - + return nodes, ref_el, entity_ids, entity_permutations + parent_nodes = [] + parent_ids = {} parent_permutations = None - return parent_cell, parent_ids, parent_permutations + + parent_to_children = ref_el.get_parent_to_children() + for dim in sorted(parent_to_children): + parent_ids[dim] = {} + for entity in sorted(parent_to_children[dim]): + cur = len(parent_nodes) + for child_dim, child_entity in parent_to_children[dim][entity]: + parent_nodes.extend(nodes[i] for i in entity_ids[child_dim][child_entity]) + parent_ids[dim][entity] = list(range(cur, len(parent_nodes))) + return parent_nodes, parent_cell, parent_ids, parent_permutations diff --git a/FIAT/macro.py b/FIAT/macro.py index 4bf43325a..62f08da56 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -4,7 +4,7 @@ import numpy from FIAT.quadrature import FacetQuadratureRule, QuadratureRule -from FIAT.reference_element import SimplicialComplex, lattice_iter +from FIAT.reference_element import SimplicialComplex, lattice_iter, make_lattice def bary_to_xy(verts, bary, result=None): @@ -65,7 +65,7 @@ class SplitSimplicialComplex(SimplicialComplex): def __init__(self, parent, vertices, topology): self._parent = parent - bary = xy_to_bary(numpy.asarray(parent.get_vertices()), numpy.asarray(vertices)) + bary = xy_to_bary(parent.get_vertices(), vertices) parent_top = parent.get_topology() parent_inv_top = invert_cell_topology(parent_top) @@ -84,6 +84,24 @@ def __init__(self, parent, vertices, topology): child_to_parent[dim][entity] = (parent_dim, parent_entity) parent_to_children[parent_dim][parent_entity].append((dim, entity)) + for dim in parent_to_children: + for entity in parent_to_children[dim]: + children = parent_to_children[dim][entity] + if len(children) > 1: + # sort children lexicographically + parent_verts = parent.get_vertices_of_subcomplex(parent_top[dim][entity]) + children_verts = [tuple(numpy.average([vertices[i] for i in topology[cdim][centity]], 0)) + for cdim, centity in children] + + B = numpy.transpose(children_verts) + A = numpy.transpose(parent_verts[::-1]) + B = B - A[:, -1:] + A = A[:, :-1] - A[:, -1:] + coords = numpy.linalg.solve(numpy.dot(A.T, A), numpy.dot(A.T, B)).T + coords = list(map(tuple, coords)) + children = (c for _, c in sorted(zip(coords, children))) + parent_to_children[dim][entity] = tuple(children) + self._child_to_parent = child_to_parent self._parent_to_children = parent_to_children @@ -180,33 +198,23 @@ class IsoSplit(SplitSimplicialComplex): def __init__(self, ref_el, degree=2, variant=None): self.degree = degree self.variant = variant - # Construct new vertices entity-by-entity + # Place new vertices on a lattice sd = ref_el.get_spatial_dimension() - top = ref_el.get_topology() - new_verts = [] - ref_lattice = [] - for dim in top: - for entity in top[dim]: - new_verts.extend(ref_el.make_points(dim, entity, degree, variant=variant)) - ref_lattice.extend(ref_el.make_points(dim, entity, degree)) - - bary = xy_to_bary(ref_el.get_vertices(), ref_lattice) - bary *= degree - alphas = numpy.rint(bary[:, :-1]) - flat_index = {tuple(alpha): i for i, alpha in enumerate(alphas)} + new_verts = make_lattice(ref_el.vertices, degree, variant=variant) + flat_index = {tuple(alpha): i for i, alpha in enumerate(lattice_iter(0, degree+1, sd))} new_topology = {} new_topology[0] = {i: (i,) for i in range(len(new_verts))} - # Loop through degree k-1 vertices + # Loop through degree-1 vertices # Construct a P1 simplex by connecting edges between a vertex and - # its neighbors obtained by shifting each coordinate up by 1, forming a P1 simplex + # its neighbors obtained by shifting each coordinate up by 1 edges = [] for alpha in lattice_iter(0, degree, sd): simplex = [] for beta in lattice_iter(0, 2, sd): v1 = flat_index[tuple(a+b for a, b in zip(alpha, beta))] for v0 in simplex: - edges.append(tuple(sorted((v0, v1)))) + edges.append((v0, v1)) simplex.append(v1) if sd == 3: @@ -216,7 +224,7 @@ def __init__(self, ref_el, degree=2, variant=None): v0, v1 = flat_index[(1, 0, 0)], flat_index[(0, 1, 1)] edges.append(tuple(sorted((v0, v1)))) - new_topology[1] = dict(enumerate(sorted(edges))) + new_topology[1] = dict(enumerate(edges)) # Get an adjacency list for each vertex edges = new_topology[1].values() diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index fa94e96ea..01483d03c 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -110,8 +110,6 @@ def test_macro_lagrange(variant, degree, split, cell): for dim in parent_top: assert len(entity_ids[dim]) == len(parent_top[dim]) - # TODO more thorough checks on entity_ids - # Test that tabulation onto lattice points gives the identity sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() @@ -129,6 +127,27 @@ def test_macro_lagrange(variant, degree, split, cell): assert numpy.allclose(fe.V, V) +@pytest.mark.parametrize("degree", (1, 2,)) +def test_lagrange_iso_duals(cell, degree): + iso = Lagrange(IsoSplit(cell), degree, variant="equispaced") + P2 = Lagrange(cell, 2*degree, variant="equispaced") + + def get_points(fe): + points = [] + for node in fe.dual_basis(): + pt, = node.get_point_dict() + points.append(pt) + return points + + assert numpy.allclose(get_points(iso), get_points(P2)) + + iso_ids = iso.entity_dofs() + P2_ids = P2.entity_dofs() + for dim in iso_ids: + for entity in iso_ids[dim]: + assert iso_ids[dim][entity] == P2_ids[dim][entity] + + @pytest.mark.parametrize("variant", ("gll", "Alfeld,equispaced", "gll,iso")) def test_is_macro_lagrange(variant): is_macro = "alfeld" in variant.lower() or "iso" in variant.lower() From d51b436e06b2352e249d140e46a8c3076797edbc Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 3 Apr 2024 16:03:39 -0500 Subject: [PATCH 53/93] Macro: test Lagrange IsoSplit with to_riesz --- test/unit/test_macro.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 01483d03c..2fa7d744c 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -147,6 +147,11 @@ def get_points(fe): for entity in iso_ids[dim]: assert iso_ids[dim][entity] == P2_ids[dim][entity] + poly_set = iso.get_nodal_basis() + assert numpy.allclose(numpy.eye(iso.space_dimension()), + numpy.dot(P2.get_dual_set().to_riesz(poly_set), + poly_set.get_coeffs().T)) + @pytest.mark.parametrize("variant", ("gll", "Alfeld,equispaced", "gll,iso")) def test_is_macro_lagrange(variant): From 20fe7d724bf104821641faca905b6557e763cc19 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 3 Apr 2024 19:04:34 -0500 Subject: [PATCH 54/93] order points according to parent entities to fix test --- FIAT/discontinuous_pc.py | 4 ++-- FIAT/finite_element.py | 6 +++--- FIAT/macro.py | 1 - test/unit/test_macro.py | 9 +++++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/FIAT/discontinuous_pc.py b/FIAT/discontinuous_pc.py index 11c628563..af02f8861 100644 --- a/FIAT/discontinuous_pc.py +++ b/FIAT/discontinuous_pc.py @@ -42,7 +42,7 @@ def __init__(self, ref_el): super(DPC0, self).__init__(poly_set=poly_set, dual=dual, order=degree, - ref_el=ref_el, + ref_complex=ref_el, formdegree=formdegree) @@ -108,7 +108,7 @@ def __init__(self, ref_el, degree): super(HigherOrderDPC, self).__init__(poly_set=poly_set, dual=dual, order=degree, - ref_el=ref_el, + ref_complex=ref_el, formdegree=formdegree) diff --git a/FIAT/finite_element.py b/FIAT/finite_element.py index d8d5b7072..61aa75db7 100644 --- a/FIAT/finite_element.py +++ b/FIAT/finite_element.py @@ -129,9 +129,9 @@ class CiarletElement(FiniteElement): basis generated from polynomials encoded in a `PolynomialSet`. """ - def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_el=None): - ref_el = ref_el or dual.get_reference_element() - ref_complex = poly_set.get_reference_element() + def __init__(self, poly_set, dual, order, formdegree=None, mapping="affine", ref_complex=None): + ref_el = dual.get_reference_element() + ref_complex = ref_complex or poly_set.get_reference_element() super(CiarletElement, self).__init__(ref_el, dual, order, formdegree, mapping, ref_complex) # build generalized Vandermonde matrix diff --git a/FIAT/macro.py b/FIAT/macro.py index 62f08da56..18f19f9ca 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -227,7 +227,6 @@ def __init__(self, ref_el, degree=2, variant=None): new_topology[1] = dict(enumerate(edges)) # Get an adjacency list for each vertex - edges = new_topology[1].values() adjacency = {v: set(chain.from_iterable(verts for verts in edges if v in verts)) for v in new_topology[0]} diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 2fa7d744c..97b1a55bc 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -112,11 +112,12 @@ def test_macro_lagrange(variant, degree, split, cell): # Test that tabulation onto lattice points gives the identity sd = ref_el.get_spatial_dimension() - top = ref_el.get_topology() + parent_to_children = ref_el.get_parent_to_children() pts = [] - for dim in sorted(top): - for entity in sorted(top[dim]): - pts.extend(ref_el.make_points(dim, entity, degree, variant=variant)) + for dim in sorted(parent_to_children): + for entity in sorted(parent_to_children[dim]): + for cdim, centity in parent_to_children[dim][entity]: + pts.extend(ref_el.make_points(cdim, centity, degree, variant=variant)) phis = fe.tabulate(2, pts) assert numpy.allclose(phis[(0,)*sd], numpy.eye(fe.space_dimension())) From 59d75748cc5e8939cfee6ed2c14bdf9488a37ef3 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 3 Apr 2024 21:59:08 -0500 Subject: [PATCH 55/93] handle integral variants --- FIAT/check_format_variant.py | 11 +++++++---- FIAT/hierarchical.py | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index 15fb44ad4..979443c91 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -38,20 +38,23 @@ def check_format_variant(variant, degree): return variant, interpolant_degree -def parse_lagrange_variant(variant, discontinuous=False): +def parse_lagrange_variant(variant, discontinuous=False, integral=False): if variant is None: - variant = "equispaced" + variant = "integral" if integral else "equispaced" options = variant.replace(" ", "").split(",") assert len(options) <= 2 - if discontinuous: + default = "integral" if integral else "spectral" + if integral: + supported_point_variants = {"integral": None} + elif discontinuous: supported_point_variants = supported_dg_variants else: supported_point_variants = supported_cg_variants # defaults splitting = None - point_variant = supported_point_variants["spectral"] + point_variant = supported_point_variants[default] for pre_opt in options: opt = pre_opt.lower() diff --git a/FIAT/hierarchical.py b/FIAT/hierarchical.py index dd193b470..a93b9fd42 100644 --- a/FIAT/hierarchical.py +++ b/FIAT/hierarchical.py @@ -63,7 +63,7 @@ def __new__(cls, ref_el, degree, variant=None): return super(Legendre, cls).__new__(cls) def __init__(self, ref_el, degree, variant=None): - splitting, _ = parse_lagrange_variant(variant, discontinuous=True) + splitting, _ = parse_lagrange_variant(variant, integral=True) if splitting is not None: ref_el = splitting(ref_el) poly_set = ONPolynomialSet(ref_el, degree) @@ -132,7 +132,7 @@ def make_reference_duals(self, ref_el, degree): class IntegratedLegendre(finite_element.CiarletElement): """Simplicial continuous element with integrated Legendre polynomials.""" def __init__(self, ref_el, degree, variant=None): - splitting, _ = parse_lagrange_variant(variant) + splitting, _ = parse_lagrange_variant(variant, integral=True) if splitting is not None: ref_el = splitting(ref_el) if degree < 1: From 856220107a3cfb76c68e3d5f4fd1dc025af11d2a Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Thu, 4 Apr 2024 17:25:48 -0500 Subject: [PATCH 56/93] add logic to find maximal complex in a list --- FIAT/reference_element.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 8bbc4f339..f5b01e360 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -1529,3 +1529,11 @@ def compute_unflattening_map(topology_dict): unflattening_map[(flat_dim, flat_entity)] = (dim, entity) return unflattening_map + + +def max_complex(complexes): + max_cell = max(complexes) + if all(max_cell >= b for b in complexes): + return max_cell + else: + return None From 29f6a2b01472459ac8b9983f2eb7665d87d4c829 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 4 Apr 2024 17:33:07 -0500 Subject: [PATCH 57/93] comparison of TP cells --- FIAT/check_format_variant.py | 6 +++++- FIAT/reference_element.py | 12 ++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index 979443c91..299aca65f 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -54,6 +54,7 @@ def parse_lagrange_variant(variant, discontinuous=False, integral=False): # defaults splitting = None + splitting_args = tuple() point_variant = supported_point_variants[default] for pre_opt in options: @@ -65,7 +66,8 @@ def parse_lagrange_variant(variant, discontinuous=False, integral=False): elif opt.startswith("iso"): match = re.match(r"^iso(?:\((\d+)\))?$", opt) k, = match.groups() - splitting = lambda T: IsoSplit(T, int(k)) + call_split = IsoSplit + splitting_args = (int(k),) elif opt in supported_point_variants: point_variant = supported_point_variants[opt] else: @@ -73,4 +75,6 @@ def parse_lagrange_variant(variant, discontinuous=False, integral=False): if discontinuous and splitting is not None and point_variant in supported_cg_variants.values(): raise ValueError("Illegal variant. DG macroelements with DOFs on subcell boundaries are not unisolvent.") + if len(splitting_args) > 0: + splitting = lambda T: call_split(T, *splitting_args, point_variant or "gll") return splitting, point_variant diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 8bbc4f339..30cad835f 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -1101,6 +1101,18 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" return make_cell_orientation_reflection_map_tensorproduct(self.cells) + def __gt__(self, other): + return all(a > b for a, b in zip(self.cells, other.cells)) + + def __lt__(self, other): + return all(a < b for a, b in zip(self.cells, other.cells)) + + def __ge__(self, other): + return all(a >= b for a, b in zip(self.cells, other.cells)) + + def __le__(self, other): + return all(a <= b for a, b in zip(self.cells, other.cells)) + class UFCQuadrilateral(Cell): r"""This is the reference quadrilateral with vertices From db6010159c352667481abada086865f50cd1cdb3 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 4 Apr 2024 18:42:49 -0500 Subject: [PATCH 58/93] Compare TP cell and flattened cell --- FIAT/reference_element.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 98ba7b03b..90a7b3bf1 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -1101,17 +1101,25 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" return make_cell_orientation_reflection_map_tensorproduct(self.cells) + def compare(self, op, other): + if hasattr(other, "product"): + other = other.product + if isinstance(other, type(self)): + return all(op(a, b) for a, b in zip(self.cells, other.cells)) + else: + return op(self, other) + def __gt__(self, other): - return all(a > b for a, b in zip(self.cells, other.cells)) + return self.compare(operator.gt, other) def __lt__(self, other): - return all(a < b for a, b in zip(self.cells, other.cells)) + return self.compare(operator.lt, other) def __ge__(self, other): - return all(a >= b for a, b in zip(self.cells, other.cells)) + return self.compare(operator.ge, other) def __le__(self, other): - return all(a <= b for a, b in zip(self.cells, other.cells)) + return self.compare(operator.le, other) class UFCQuadrilateral(Cell): From d2a85a4daee323bc86983e4bc420459677099b2f Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 4 Apr 2024 19:00:21 -0500 Subject: [PATCH 59/93] Compare UFCQuadrilateral and UFCHexahedron --- FIAT/reference_element.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index 90a7b3bf1..a611191a8 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -1252,6 +1252,18 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" return self.product.cell_orientation_reflection_map() + def __gt__(self, other): + return self.product > other + + def __lt__(self, other): + return self.product < other + + def __ge__(self, other): + return self.product >= other + + def __le__(self, other): + return self.product <= other + class UFCHexahedron(Cell): """This is the reference hexahedron with vertices @@ -1345,6 +1357,18 @@ def cell_orientation_reflection_map(self): """Return the map indicating whether each possible cell orientation causes reflection (``1``) or not (``0``).""" return self.product.cell_orientation_reflection_map() + def __gt__(self, other): + return self.product > other + + def __lt__(self, other): + return self.product < other + + def __ge__(self, other): + return self.product >= other + + def __le__(self, other): + return self.product <= other + def make_affine_mapping(xs, ys): """Constructs (A,b) such that x --> A * x + b is the affine From ede527edda285103a6b79ffba55a3b6abb605f2a Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 4 Apr 2024 20:43:01 -0500 Subject: [PATCH 60/93] Raise a ValueError if unable to find the maximal complex --- FIAT/hierarchical.py | 2 +- FIAT/reference_element.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/FIAT/hierarchical.py b/FIAT/hierarchical.py index a93b9fd42..4120106e8 100644 --- a/FIAT/hierarchical.py +++ b/FIAT/hierarchical.py @@ -56,7 +56,7 @@ class Legendre(finite_element.CiarletElement): """Simplicial discontinuous element with Legendre polynomials.""" def __new__(cls, ref_el, degree, variant=None): if degree == 0: - splitting, _ = parse_lagrange_variant(variant, discontinuous=True) + splitting, _ = parse_lagrange_variant(variant, integral=True) if splitting is None: # FIXME P0 on the split requires implementing SplitSimplicialComplex.symmetry_group_size() return P0.P0(ref_el) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index a611191a8..a27640e89 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -1580,4 +1580,4 @@ def max_complex(complexes): if all(max_cell >= b for b in complexes): return max_cell else: - return None + raise ValueError("Cannot find the maximal complex") From 9a2a91ddfaa263c67c8e860dd323445a827d07fe Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 5 Apr 2024 15:56:51 -0500 Subject: [PATCH 61/93] C1PolynomialSet --- FIAT/expansions.py | 45 +++++++++++++++++++++++++++++---------- FIAT/macro.py | 35 ++++++++++++++++++++++++++++++ FIAT/reference_element.py | 10 ++++----- test/unit/test_macro.py | 9 +++++++- 4 files changed, 82 insertions(+), 17 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 61be733b0..513cfbfaa 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -309,22 +309,27 @@ def get_cell_node_map(self, n): cell_node_map = polynomial_cell_node_map(self.ref_el, n, self.continuity) return self._cell_node_map_cache.setdefault(n, cell_node_map) + def _tabulate_on_cell(self, n, pts, order, cell=0, direction=None): + A, b = self.affine_mappings[cell] + ref_pts = apply_mapping(A, b, pts) + Jinv = A if direction is None else numpy.dot(A, direction)[:, None] + sd = self.ref_el.get_spatial_dimension() + phi = dubiner_recurrence(sd, n, order, ref_pts, Jinv, + self.scale, variant=self.variant) + if self.continuity == "C0": + phi = C0_basis(sd, n, phi) + return phi + def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point.""" - phis = [] cell_point_map = compute_cell_point_map(self.ref_el, pts) - sd = self.ref_el.get_spatial_dimension() - for ipts, (A, b) in zip(cell_point_map, self.affine_mappings): - ref_pts = apply_mapping(A, b, pts[:, ipts]) - phi = dubiner_recurrence(sd, n, order, ref_pts, A, - self.scale, variant=self.variant) - if self.continuity == "C0": - phi = C0_basis(sd, n, phi) - phis.append(phi) - - if len(self.affine_mappings) == 1: + phis = [self._tabulate_on_cell(n, pts[:, ipts], order, cell=k) + for k, ipts in enumerate(cell_point_map)] + + if len(phis) == 1: return phis[0] + sd = self.ref_el.get_spatial_dimension() results = [] num_phis = self.get_num_members(n) cell_node_map = self.get_cell_node_map(n) @@ -336,6 +341,24 @@ def _tabulate(self, n, pts, order=0): results.append(result) return tuple(results) + def tabulate_normal_derivative_jump(self, n, ref_pts, facet, order=1): + """Tabulate the normal derivative jump on refernece points on a facet""" + assert order == 1 + sd = self.ref_el.get_spatial_dimension() + transform = self.ref_el.get_entity_transform(sd-1, facet) + pts = numpy.transpose(list(map(transform, ref_pts))) + cell_point_map = compute_cell_point_map(self.ref_el, pts) + cell_node_map = self.get_cell_node_map(n) + + num_phis = self.get_num_members(n) + result = numpy.zeros((num_phis,) + pts.shape[1:]) + for k, (ibfs, ipts) in enumerate(zip(cell_node_map, cell_point_map)): + if len(ipts) > 0: + normal = self.ref_el.compute_normal(facet, cell=k) + phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k, direction=normal) + result[numpy.ix_(ibfs, ipts)] += numpy.asarray(phi[order])[:, 0, :] + return result + def get_dmats(self, degree): """Returns a numpy array with the expansion coefficients dmat[k, j, i] of the gradient of each member of the expansion set: diff --git a/FIAT/macro.py b/FIAT/macro.py index 18f19f9ca..d84eed9a7 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -5,6 +5,7 @@ from FIAT.quadrature import FacetQuadratureRule, QuadratureRule from FIAT.reference_element import SimplicialComplex, lattice_iter, make_lattice +from FIAT import expansions, polynomial_set def bary_to_xy(verts, bary, result=None): @@ -285,3 +286,37 @@ def __init__(self, ref_el, Q_ref, parent_facets=None): pts = tuple(pts) wts = tuple(wts) super(MacroQuadratureRule, self).__init__(ref_el, pts, wts) + + +class C1PolynomialSet(polynomial_set.PolynomialSet): + """Constructs a C1-continuous PolynomialSet on a simplicial complex. + + :arg ref_el: The simplicial complex. + :arg degree: The polynomial degree. + """ + def __init__(self, ref_el, degree): + from FIAT.quadrature_schemes import create_quadrature + expansion_set = expansions.ExpansionSet(ref_el, variant="bubble") + + sd = ref_el.get_spatial_dimension() + facet_el = ref_el.construct_subelement(sd-1) + + phi_deg = 0 if sd == 1 else degree-1 + phi = polynomial_set.ONPolynomialSet(facet_el, phi_deg) + Q = create_quadrature(facet_el, 2 * phi_deg) + qpts, qwts = Q.get_points(), Q.get_weights() + phi_at_qpts = phi.tabulate(qpts)[(0,) * (sd-1)] + weights = numpy.multiply(phi_at_qpts, qwts) + + rows = [] + child_to_parent = ref_el.get_child_to_parent() + for facet in child_to_parent[sd-1]: + if child_to_parent[sd-1][facet][0] == sd: + jumps = expansion_set.tabulate_normal_derivative_jump(degree, qpts, facet) + rows.append(numpy.dot(weights, jumps.T)) + + dual_mat = numpy.row_stack(rows) + _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) + num_sv = len([s for s in sig if abs(s) > 1.e-10]) + coeffs = vt[num_sv:] + super(C1PolynomialSet, self).__init__(ref_el, degree, degree, expansion_set, coeffs) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index a27640e89..de48d6501 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -298,7 +298,7 @@ def __init__(self, shape, vertices, topology): super(SimplicialComplex, self).__init__(shape, vertices, topology) - def compute_normal(self, facet_i): + def compute_normal(self, facet_i, cell=None): """Returns the unit normal vector to facet i of codimension 1.""" t = self.get_topology() @@ -308,13 +308,13 @@ def compute_normal(self, facet_i): # Find a subcell of which facet_i is on the boundary # Note: this is trivial and vastly overengineered for the single-cell # case. - cell = next(k for k, facets in enumerate(self.connectivity[(sd, sd-1)]) - if facet_i in facets) + if cell is None: + cell = next(k for k, facets in enumerate(self.connectivity[(sd, sd-1)]) + if facet_i in facets) verts = numpy.asarray(self.get_vertices_of_subcomplex(t[sd][cell])) - # Interval case if self.get_shape() == LINE: - v_i, = self.get_topology()[0][facet_i] + v_i = t[1][cell].index(t[0][facet_i][0]) n = verts[v_i] - verts[[1, 0][v_i]] return n / numpy.linalg.norm(n) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 97b1a55bc..c24f762c6 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -2,7 +2,7 @@ import numpy import pytest from FIAT import DiscontinuousLagrange, Lagrange, Legendre, P0 -from FIAT.macro import AlfeldSplit, IsoSplit +from FIAT.macro import AlfeldSplit, IsoSplit, C1PolynomialSet from FIAT.quadrature_schemes import create_quadrature from FIAT.reference_element import ufc_simplex from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map @@ -280,3 +280,10 @@ def test_macro_expansion(cell, split, variant, degree): indices = numpy.ix_(ibfs, ipts) for alpha in values: assert numpy.allclose(cell_values[alpha], values[alpha][indices]) + + +def test_C1_basis(cell): + degree = 3 + ref_el = AlfeldSplit(cell) + P = C1PolynomialSet(ref_el, degree) + print(P.expansion_set.get_num_members(degree), P.get_num_members()) From aa41d98685da22fde0a05058fce6bdc9edc08f9d Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 7 Apr 2024 18:04:28 -0500 Subject: [PATCH 62/93] Implement HCT element --- FIAT/hsieh_clough_tocher.py | 47 +++++++++++++++++++++++++++++++++++++ test/unit/test_macro.py | 23 ++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 FIAT/hsieh_clough_tocher.py diff --git a/FIAT/hsieh_clough_tocher.py b/FIAT/hsieh_clough_tocher.py new file mode 100644 index 000000000..a8f50be4f --- /dev/null +++ b/FIAT/hsieh_clough_tocher.py @@ -0,0 +1,47 @@ +from FIAT.functional import PointEvaluation, PointDerivative, IntegralMomentOfNormalDerivative +from FIAT import finite_element, dual_set, macro, polynomial_set +from FIAT.jacobi import eval_jacobi +from FIAT.quadrature_schemes import create_quadrature + + +class HCTDualSet(dual_set.DualSet): + def __init__(self, ref_el, degree): + if degree != 3: + raise ValueError("HCT elements only defined for degree=3") + top = ref_el.get_topology() + verts = ref_el.get_vertices() + sd = ref_el.get_spatial_dimension() + entity_ids = {dim: {entity: [] for entity in sorted(top[dim])} for dim in sorted(top)} + + # get first order jet at each vertex + alphas = polynomial_set.mis(sd, 1) + nodes = [] + for v in sorted(top[0]): + pt = verts[v] + cur = len(nodes) + nodes.append(PointEvaluation(ref_el, pt)) + nodes.extend(PointDerivative(ref_el, pt, alpha) for alpha in alphas) + entity_ids[0][v].extend(range(cur, len(nodes))) + + rline = ref_el.construct_subelement(1) + rline_verts = rline.get_vertices() + x0, = rline_verts[0] + x1, = rline_verts[1] + Q = create_quadrature(rline, 2*(degree-1)) + qpts = Q.get_points() + leg2_at_qpts = eval_jacobi(0, 0, degree-1, 2.0*(qpts-x0)/(x1-x0) - 1) + for e in sorted(top[1]): + cur = len(nodes) + nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, leg2_at_qpts)) + entity_ids[1][e].extend(range(cur, len(nodes))) + + return super(HCTDualSet, self).__init__(nodes, ref_el, entity_ids) + + +class HsiehCloughTocher(finite_element.CiarletElement): + """The HCT finite element.""" + + def __init__(self, ref_el, degree=3): + dual = HCTDualSet(ref_el, degree) + poly_set = macro.C1PolynomialSet(macro.AlfeldSplit(ref_el), degree) + super(HsiehCloughTocher, self).__init__(poly_set, dual, degree) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index c24f762c6..0e3009af5 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -7,6 +7,7 @@ from FIAT.reference_element import ufc_simplex from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet +from FIAT.hsieh_clough_tocher import HsiehCloughTocher @pytest.fixture(params=("I", "T", "S")) @@ -287,3 +288,25 @@ def test_C1_basis(cell): ref_el = AlfeldSplit(cell) P = C1PolynomialSet(ref_el, degree) print(P.expansion_set.get_num_members(degree), P.get_num_members()) + + child_to_parent = ref_el.get_child_to_parent() + sd = ref_el.get_spatial_dimension() + for facet in child_to_parent[sd-1]: + if child_to_parent[sd-1][facet][0] == sd: + pass + # TODO + + +def test_HCT(): + # FIXME move this test to its own file + ref_el = ufc_simplex(2) + fe = HsiehCloughTocher(ref_el) + degree = fe.degree() + assert degree == 3 + assert fe.is_macroelement() + assert fe.space_dimension() == 12 + + order = 2 + Q = create_quadrature(ref_el, 2*(degree-order)) + pts = Q.get_points() + fe.tabulate(order, pts) From 8c08c76271bc0981883179ae284bc2d58c3d7145 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Mon, 8 Apr 2024 00:28:26 -0500 Subject: [PATCH 63/93] Add HCT to __init__ --- FIAT/__init__.py | 2 ++ FIAT/{hsieh_clough_tocher.py => hct.py} | 8 +++----- test/unit/test_macro.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) rename FIAT/{hsieh_clough_tocher.py => hct.py} (88%) diff --git a/FIAT/__init__.py b/FIAT/__init__.py index 9e95b394b..f631c4eeb 100644 --- a/FIAT/__init__.py +++ b/FIAT/__init__.py @@ -7,6 +7,7 @@ # Import finite element classes from FIAT.finite_element import FiniteElement, CiarletElement # noqa: F401 from FIAT.argyris import Argyris +from FIAT.hct import HsiehCloughTocher from FIAT.bernstein import Bernstein from FIAT.bell import Bell from FIAT.argyris import QuinticArgyris @@ -61,6 +62,7 @@ # List of supported elements and mapping to element classes supported_elements = {"Argyris": Argyris, + "HsiehCloughTocher": HsiehCloughTocher, "Bell": Bell, "Bernstein": Bernstein, "Brezzi-Douglas-Marini": BrezziDouglasMarini, diff --git a/FIAT/hsieh_clough_tocher.py b/FIAT/hct.py similarity index 88% rename from FIAT/hsieh_clough_tocher.py rename to FIAT/hct.py index a8f50be4f..95740c050 100644 --- a/FIAT/hsieh_clough_tocher.py +++ b/FIAT/hct.py @@ -1,5 +1,6 @@ from FIAT.functional import PointEvaluation, PointDerivative, IntegralMomentOfNormalDerivative from FIAT import finite_element, dual_set, macro, polynomial_set +from FIAT.reference_element import ufc_simplex from FIAT.jacobi import eval_jacobi from FIAT.quadrature_schemes import create_quadrature @@ -23,13 +24,10 @@ def __init__(self, ref_el, degree): nodes.extend(PointDerivative(ref_el, pt, alpha) for alpha in alphas) entity_ids[0][v].extend(range(cur, len(nodes))) - rline = ref_el.construct_subelement(1) - rline_verts = rline.get_vertices() - x0, = rline_verts[0] - x1, = rline_verts[1] + rline = ufc_simplex(1) Q = create_quadrature(rline, 2*(degree-1)) qpts = Q.get_points() - leg2_at_qpts = eval_jacobi(0, 0, degree-1, 2.0*(qpts-x0)/(x1-x0) - 1) + leg2_at_qpts = eval_jacobi(0, 0, degree-1, 2.0*qpts - 1) for e in sorted(top[1]): cur = len(nodes) nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, leg2_at_qpts)) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 0e3009af5..10ea83c2e 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -7,7 +7,7 @@ from FIAT.reference_element import ufc_simplex from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet -from FIAT.hsieh_clough_tocher import HsiehCloughTocher +from FIAT.hct import HsiehCloughTocher @pytest.fixture(params=("I", "T", "S")) From 5c95c23318c9e915e30f115ca4fbd4859600cf78 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 9 Apr 2024 11:16:56 -0500 Subject: [PATCH 64/93] Test that Alfeld interface dofs agree with the non-macro Lagrange --- test/unit/test_macro.py | 44 +++++++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 10 deletions(-) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 10ea83c2e..11d17f52a 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -129,22 +129,46 @@ def test_macro_lagrange(variant, degree, split, cell): assert numpy.allclose(fe.V, V) +def get_lagrange_points(fe): + points = [] + for node in fe.dual_basis(): + pt, = node.get_point_dict() + points.append(pt) + return points + + +@pytest.mark.parametrize("degree", (1, 4,)) +@pytest.mark.parametrize("variant", ("equispaced", "gll")) +def test_lagrange_alfeld_duals(cell, degree, variant): + Pk = Lagrange(cell, degree, variant=variant) + alfeld = Lagrange(AlfeldSplit(cell), degree, variant=variant) + + Pk_dofs = Pk.entity_dofs() + alfeld_dofs = alfeld.entity_dofs() + + Pk_pts = numpy.asarray(get_lagrange_points(Pk)) + alfeld_pts = numpy.asarray(get_lagrange_points(alfeld)) + + sd = cell.get_dimension() + top = cell.get_topology() + for dim in sorted(top): + if dim == sd: + continue + for entity in sorted(top[dim]): + assert alfeld_dofs[dim][entity] == Pk_dofs[dim][entity] + assert numpy.allclose(Pk_pts[Pk_dofs[dim][entity]], + alfeld_pts[alfeld_dofs[dim][entity]]) + + @pytest.mark.parametrize("degree", (1, 2,)) def test_lagrange_iso_duals(cell, degree): - iso = Lagrange(IsoSplit(cell), degree, variant="equispaced") P2 = Lagrange(cell, 2*degree, variant="equispaced") + iso = Lagrange(IsoSplit(cell), degree, variant="equispaced") - def get_points(fe): - points = [] - for node in fe.dual_basis(): - pt, = node.get_point_dict() - points.append(pt) - return points - - assert numpy.allclose(get_points(iso), get_points(P2)) + assert numpy.allclose(get_lagrange_points(iso), get_lagrange_points(P2)) - iso_ids = iso.entity_dofs() P2_ids = P2.entity_dofs() + iso_ids = iso.entity_dofs() for dim in iso_ids: for entity in iso_ids[dim]: assert iso_ids[dim][entity] == P2_ids[dim][entity] From c04298aada4364b190d8a9f3547663731e103516 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 9 Apr 2024 11:21:05 -0500 Subject: [PATCH 65/93] Define HCT duals with normal derivative averages --- FIAT/hct.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/FIAT/hct.py b/FIAT/hct.py index 95740c050..928011ac8 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -3,6 +3,7 @@ from FIAT.reference_element import ufc_simplex from FIAT.jacobi import eval_jacobi from FIAT.quadrature_schemes import create_quadrature +import numpy class HCTDualSet(dual_set.DualSet): @@ -27,10 +28,10 @@ def __init__(self, ref_el, degree): rline = ufc_simplex(1) Q = create_quadrature(rline, 2*(degree-1)) qpts = Q.get_points() - leg2_at_qpts = eval_jacobi(0, 0, degree-1, 2.0*qpts - 1) + scale = numpy.ones(qpts.shape) for e in sorted(top[1]): cur = len(nodes) - nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, leg2_at_qpts)) + nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, scale)) entity_ids[1][e].extend(range(cur, len(nodes))) return super(HCTDualSet, self).__init__(nodes, ref_el, entity_ids) From 1447a9c80d11967a8c10e7e1e57418b864b2cb6f Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 9 Apr 2024 19:33:28 -0500 Subject: [PATCH 66/93] Fix bubble trace-similarity --- FIAT/expansions.py | 5 ++--- test/unit/test_macro.py | 29 +++++++++++++++++++++++++++-- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 513cfbfaa..5f462c74a 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -162,10 +162,9 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): if variant is not None: p = index[-1] + shift alpha = 2 * (sum(index[:-1]) + d * shift) - 1 - norm2 = 1.0 + norm2 = (2*d+1) / (2*d) if p > 0 and p + alpha > 0: - norm2 = (p + alpha) * (2*p + alpha) / p - norm2 *= (2*d+1) / (2*d) + norm2 *= (p + alpha) * (2*p + alpha) / p else: norm2 = (2*sum(index) + d) / d scale = math.sqrt(norm2) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 11d17f52a..c4f908f65 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -137,7 +137,16 @@ def get_lagrange_points(fe): return points -@pytest.mark.parametrize("degree", (1, 4,)) +def make_mass_matrix(fe, order=0): + sd = fe.ref_el.get_spatial_dimension() + Q = create_quadrature(fe.ref_complex, 2*fe.degree()) + qpts, qwts = Q.get_points(), Q.get_weights() + phi = fe.tabulate(order, qpts)[(0,) * sd] + M = numpy.dot(numpy.multiply(phi, qwts), phi.T) + return M + + +@pytest.mark.parametrize("degree", (1, 2, 4,)) @pytest.mark.parametrize("variant", ("equispaced", "gll")) def test_lagrange_alfeld_duals(cell, degree, variant): Pk = Lagrange(cell, degree, variant=variant) @@ -159,6 +168,12 @@ def test_lagrange_alfeld_duals(cell, degree, variant): assert numpy.allclose(Pk_pts[Pk_dofs[dim][entity]], alfeld_pts[alfeld_dofs[dim][entity]]) + phi = Pk.tabulate(0, alfeld_pts)[(0,) * sd] + M_Pk = make_mass_matrix(Pk) + M_alfeld = make_mass_matrix(alfeld) + M_galerkin = numpy.dot(numpy.dot(phi, M_alfeld), phi.T) + assert numpy.allclose(M_Pk, M_galerkin) + @pytest.mark.parametrize("degree", (1, 2,)) def test_lagrange_iso_duals(cell, degree): @@ -238,6 +253,14 @@ def test_make_bubbles(cell, split, codim): interior_values = values[:, num_pts_on_facet:] assert numpy.linalg.matrix_rank(interior_values.T, tol=1E-12) == num_members + # test block diagonal tabulation + bubbles_per_entity = num_members // len(top[sd-codim]) + for entity in top[sd-codim]: + i0 = entity * bubbles_per_entity + i1 = (entity+1) * bubbles_per_entity + assert numpy.allclose(interior_values[i0:i1, :i0], 0, atol=1E-12) + assert numpy.allclose(interior_values[i0:i1, i1:], 0, atol=1E-12) + # test trace similarity dim = sd - codim nfacets = len(top[dim]) @@ -247,12 +270,14 @@ def test_make_bubbles(cell, split, codim): ref_points = ref_facet.make_points(dim, 0, degree) ref_values = ref_bubbles.tabulate(ref_points)[(0,) * dim] + scale = None bubbles_per_entity = ref_bubbles.get_num_members() cur = 0 for entity in sorted(top[dim]): indices = list(range(cur, cur + bubbles_per_entity)) cur_values = interior_values[numpy.ix_(indices, indices)] - scale = numpy.max(abs(cur_values)) / numpy.max(abs(ref_values)) + if scale is None: + scale = numpy.max(abs(cur_values)) / numpy.max(abs(ref_values)) assert numpy.allclose(ref_values * scale, cur_values) cur += bubbles_per_entity From f34da1aab569fab8e6a7c5eb56426f5a84f28704 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 9 Apr 2024 19:50:55 -0500 Subject: [PATCH 67/93] flake8 --- FIAT/hct.py | 1 - 1 file changed, 1 deletion(-) diff --git a/FIAT/hct.py b/FIAT/hct.py index 928011ac8..79019aab9 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -1,7 +1,6 @@ from FIAT.functional import PointEvaluation, PointDerivative, IntegralMomentOfNormalDerivative from FIAT import finite_element, dual_set, macro, polynomial_set from FIAT.reference_element import ufc_simplex -from FIAT.jacobi import eval_jacobi from FIAT.quadrature_schemes import create_quadrature import numpy From cee3c83d5e5a998b75e851119f4dbc12d7b3fbc4 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 10 Apr 2024 15:06:14 -0500 Subject: [PATCH 68/93] HCT: fix quadrature degree --- FIAT/hct.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FIAT/hct.py b/FIAT/hct.py index 79019aab9..fedf33145 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -25,7 +25,7 @@ def __init__(self, ref_el, degree): entity_ids[0][v].extend(range(cur, len(nodes))) rline = ufc_simplex(1) - Q = create_quadrature(rline, 2*(degree-1)) + Q = create_quadrature(rline, degree-1) qpts = Q.get_points() scale = numpy.ones(qpts.shape) for e in sorted(top[1]): From fc40cb2dc81fda99d65d7d8cbef66a1d3dd1a50e Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Thu, 11 Apr 2024 20:22:42 -0500 Subject: [PATCH 69/93] HCT: test that tabulations are C1 --- FIAT/expansions.py | 16 +++++++++++++--- FIAT/hct.py | 3 ++- FIAT/macro.py | 23 +++++++++++++++++------ FIAT/morley.py | 15 +++++++++++---- test/unit/test_macro.py | 18 +----------------- 5 files changed, 44 insertions(+), 31 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 5f462c74a..cd3ef0e21 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -342,7 +342,6 @@ def _tabulate(self, n, pts, order=0): def tabulate_normal_derivative_jump(self, n, ref_pts, facet, order=1): """Tabulate the normal derivative jump on refernece points on a facet""" - assert order == 1 sd = self.ref_el.get_spatial_dimension() transform = self.ref_el.get_entity_transform(sd-1, facet) pts = numpy.transpose(list(map(transform, ref_pts))) @@ -355,7 +354,10 @@ def tabulate_normal_derivative_jump(self, n, ref_pts, facet, order=1): if len(ipts) > 0: normal = self.ref_el.compute_normal(facet, cell=k) phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k, direction=normal) - result[numpy.ix_(ibfs, ipts)] += numpy.asarray(phi[order])[:, 0, :] + V = numpy.asarray(phi[order]).reshape((len(ibfs), len(ipts))) + if order % 2 == 0: + V *= numpy.dot(normal, self.ref_el.compute_normal(facet)) + result[numpy.ix_(ibfs, ipts)] += V return result def get_dmats(self, degree): @@ -371,7 +373,15 @@ def get_dmats(self, degree): pass if degree == 0: return cache.setdefault(key, numpy.zeros((self.ref_el.get_spatial_dimension(), 1, 1), "d")) - pts = reference_element.make_lattice(self.ref_el.get_vertices(), degree, variant="gl") + + if self.ref_el.is_macrocell() and self.continuity is not None: + raise ValueError("Cannot create a differenation matrix on a continuous macroelement.") + sd = self.ref_el.get_spatial_dimension() + top = self.ref_el.get_topology() + pts = [] + for cell in top[sd]: + verts = self.ref_el.get_vertices_of_subcomplex(top[sd][cell]) + pts.extend(reference_element.make_lattice(verts, degree, variant="gl")) v, dv = self._tabulate(degree, numpy.transpose(pts), order=1) dv = numpy.transpose(dv, (1, 2, 0)) dmats = numpy.linalg.solve(numpy.transpose(v), dv) diff --git a/FIAT/hct.py b/FIAT/hct.py index fedf33145..20a7c1d02 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -1,4 +1,5 @@ -from FIAT.functional import PointEvaluation, PointDerivative, IntegralMomentOfNormalDerivative +from FIAT.functional import (PointEvaluation, PointDerivative, + IntegralMomentOfNormalDerivative) from FIAT import finite_element, dual_set, macro, polynomial_set from FIAT.reference_element import ufc_simplex from FIAT.quadrature_schemes import create_quadrature diff --git a/FIAT/macro.py b/FIAT/macro.py index d84eed9a7..9615b6936 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -154,6 +154,13 @@ def is_macrocell(self): def get_parent(self): return self._parent + def get_interior_facets(self, dim): + sd = self.get_spatial_dimension() + child_to_parent = self.get_child_to_parent() + interior_facets = [facet for facet in child_to_parent[dim] + if child_to_parent[dim][facet][0] == sd] + return interior_facets + class AlfeldSplit(SplitSimplicialComplex): """Splits a simplex into the simplicial complex obtained by @@ -294,7 +301,7 @@ class C1PolynomialSet(polynomial_set.PolynomialSet): :arg ref_el: The simplicial complex. :arg degree: The polynomial degree. """ - def __init__(self, ref_el, degree): + def __init__(self, ref_el, degree, shape=()): from FIAT.quadrature_schemes import create_quadrature expansion_set = expansions.ExpansionSet(ref_el, variant="bubble") @@ -309,14 +316,18 @@ def __init__(self, ref_el, degree): weights = numpy.multiply(phi_at_qpts, qwts) rows = [] - child_to_parent = ref_el.get_child_to_parent() - for facet in child_to_parent[sd-1]: - if child_to_parent[sd-1][facet][0] == sd: - jumps = expansion_set.tabulate_normal_derivative_jump(degree, qpts, facet) - rows.append(numpy.dot(weights, jumps.T)) + for facet in ref_el.get_interior_facets(sd-1): + jumps = expansion_set.tabulate_normal_derivative_jump(degree, qpts, facet) + rows.append(numpy.dot(weights, jumps.T)) dual_mat = numpy.row_stack(rows) _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) num_sv = len([s for s in sig if abs(s) > 1.e-10]) coeffs = vt[num_sv:] + + if shape != tuple(): + m, n = coeffs.shape + coeffs = coeffs.reshape((m,) + (1,)*len(shape) + (n,)) + coeffs = numpy.tile(coeffs, (1,) + shape + (1,)) + super(C1PolynomialSet, self).__init__(ref_el, degree, degree, expansion_set, coeffs) diff --git a/FIAT/morley.py b/FIAT/morley.py index 8db46f55a..67896426d 100644 --- a/FIAT/morley.py +++ b/FIAT/morley.py @@ -5,7 +5,9 @@ # SPDX-License-Identifier: LGPL-3.0-or-later from FIAT import finite_element, polynomial_set, dual_set, functional -from FIAT.reference_element import TRIANGLE +from FIAT.reference_element import TRIANGLE, ufc_simplex +from FIAT.quadrature_schemes import create_quadrature +import numpy class MorleyDualSet(dual_set.DualSet): @@ -34,11 +36,16 @@ def __init__(self, ref_el): entity_ids[0][v] = [cur] cur += 1 - # edge dof -- normal at each edge midpoint + # edge dof -- average of normal derivative at each edge + rline = ufc_simplex(1) + degree = 2 + Q = create_quadrature(rline, degree-1) + qpts = Q.get_points() + scale = numpy.ones(qpts.shape) + entity_ids[1] = {} for e in sorted(top[1]): - pt = ref_el.make_points(1, e, 2)[0] - n = functional.PointNormalDerivative(ref_el, e, pt) + n = functional.IntegralMomentOfNormalDerivative(ref_el, e, Q, scale) nodes.append(n) entity_ids[1][e] = [cur] cur += 1 diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index c4f908f65..d0f639bf9 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -7,7 +7,6 @@ from FIAT.reference_element import ufc_simplex from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet -from FIAT.hct import HsiehCloughTocher @pytest.fixture(params=("I", "T", "S")) @@ -335,7 +334,7 @@ def test_macro_expansion(cell, split, variant, degree): def test_C1_basis(cell): degree = 3 ref_el = AlfeldSplit(cell) - P = C1PolynomialSet(ref_el, degree) + P = C1PolynomialSet(ref_el, degree, shape=(2,)) print(P.expansion_set.get_num_members(degree), P.get_num_members()) child_to_parent = ref_el.get_child_to_parent() @@ -344,18 +343,3 @@ def test_C1_basis(cell): if child_to_parent[sd-1][facet][0] == sd: pass # TODO - - -def test_HCT(): - # FIXME move this test to its own file - ref_el = ufc_simplex(2) - fe = HsiehCloughTocher(ref_el) - degree = fe.degree() - assert degree == 3 - assert fe.is_macroelement() - assert fe.space_dimension() == 12 - - order = 2 - Q = create_quadrature(ref_el, 2*(degree-order)) - pts = Q.get_points() - fe.tabulate(order, pts) From 44bbce7272df1b4a83ca0c4503a1b215d951c8ab Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 11:24:13 -0500 Subject: [PATCH 70/93] Macro: unique bins for tabulation --- FIAT/expansions.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index cd3ef0e21..004ba4dab 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -345,7 +345,7 @@ def tabulate_normal_derivative_jump(self, n, ref_pts, facet, order=1): sd = self.ref_el.get_spatial_dimension() transform = self.ref_el.get_entity_transform(sd-1, facet) pts = numpy.transpose(list(map(transform, ref_pts))) - cell_point_map = compute_cell_point_map(self.ref_el, pts) + cell_point_map = compute_cell_point_map(self.ref_el, pts, unique=False) cell_node_map = self.get_cell_node_map(n) num_phis = self.get_num_members(n) @@ -354,7 +354,7 @@ def tabulate_normal_derivative_jump(self, n, ref_pts, facet, order=1): if len(ipts) > 0: normal = self.ref_el.compute_normal(facet, cell=k) phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k, direction=normal) - V = numpy.asarray(phi[order]).reshape((len(ibfs), len(ipts))) + V = numpy.reshape(phi[order], (len(ibfs), len(ipts))) if order % 2 == 0: V *= numpy.dot(normal, self.ref_el.compute_normal(facet)) result[numpy.ix_(ibfs, ipts)] += V @@ -573,11 +573,12 @@ def polynomial_cell_node_map(ref_el, n, continuity=None): return cell_node_map -def compute_cell_point_map(ref_el, pts, tol=1E-12): +def compute_cell_point_map(ref_el, pts, unique=True, tol=1E-12): """Maps cells on a simplicial complex to points. :arg ref_el: a SimplicialComplex. :arg pts: a column-stacked array of physical coordinates. + :kwarg unique: Do we want facet points to have a unique bin? :kwarg tol: the absolute tolerance. :returns: a numpy array mapping cell id to points located on that cell. """ @@ -586,17 +587,22 @@ def compute_cell_point_map(ref_el, pts, tol=1E-12): if len(top[sd]) == 1: return (Ellipsis,) + binned_pts = [] low, high = -tol, 1 + tol bins = [] ref_vertices = reference_element.ufc_simplex(sd).get_vertices() - for entity in top[sd]: - vertices = ref_el.get_vertices_of_subcomplex(top[sd][entity]) - A, b = reference_element.make_affine_mapping(vertices, ref_vertices) + for cell in top[sd]: + verts = ref_el.get_vertices_of_subcomplex(top[sd][cell]) + A, b = reference_element.make_affine_mapping(verts, ref_vertices) if sd > 1: A = numpy.vstack((A, numpy.sum(A, axis=0))) b = numpy.hstack((b, numpy.sum(b, axis=0))) x = numpy.dot(A, pts) + b[:, None] pts_on_cell = numpy.all(numpy.logical_and(x >= low, x <= high), axis=0) - bins.append(numpy.where(pts_on_cell)[0]) + ipts = numpy.where(pts_on_cell)[0] + if unique: + ipts = numpy.setdiff1d(ipts, binned_pts) + binned_pts.extend(ipts) + bins.append(ipts) return bins From 4a40a882aaca1357c7364d815f25210eafc9ac05 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 11:25:24 -0500 Subject: [PATCH 71/93] Construct C1PolynomialSet from either C0 or DG --- FIAT/hct.py | 2 +- FIAT/macro.py | 14 ++++++++------ test/unit/test_macro.py | 11 ++++------- 3 files changed, 13 insertions(+), 14 deletions(-) diff --git a/FIAT/hct.py b/FIAT/hct.py index 20a7c1d02..65b88b77e 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -42,5 +42,5 @@ class HsiehCloughTocher(finite_element.CiarletElement): def __init__(self, ref_el, degree=3): dual = HCTDualSet(ref_el, degree) - poly_set = macro.C1PolynomialSet(macro.AlfeldSplit(ref_el), degree) + poly_set = macro.C1PolynomialSet(macro.AlfeldSplit(ref_el), degree, variant="bubble") super(HsiehCloughTocher, self).__init__(poly_set, dual, degree) diff --git a/FIAT/macro.py b/FIAT/macro.py index 9615b6936..c6a71bd07 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -301,14 +301,15 @@ class C1PolynomialSet(polynomial_set.PolynomialSet): :arg ref_el: The simplicial complex. :arg degree: The polynomial degree. """ - def __init__(self, ref_el, degree, shape=()): + def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): from FIAT.quadrature_schemes import create_quadrature - expansion_set = expansions.ExpansionSet(ref_el, variant="bubble") + expansion_set = expansions.ExpansionSet(ref_el, **kwargs) + k = 1 if expansion_set.continuity == "C0" else 0 sd = ref_el.get_spatial_dimension() facet_el = ref_el.construct_subelement(sd-1) - phi_deg = 0 if sd == 1 else degree-1 + phi_deg = 0 if sd == 1 else degree - k phi = polynomial_set.ONPolynomialSet(facet_el, phi_deg) Q = create_quadrature(facet_el, 2 * phi_deg) qpts, qwts = Q.get_points(), Q.get_weights() @@ -316,9 +317,10 @@ def __init__(self, ref_el, degree, shape=()): weights = numpy.multiply(phi_at_qpts, qwts) rows = [] - for facet in ref_el.get_interior_facets(sd-1): - jumps = expansion_set.tabulate_normal_derivative_jump(degree, qpts, facet) - rows.append(numpy.dot(weights, jumps.T)) + for r in range(k, order+1): + for facet in ref_el.get_interior_facets(sd-1): + jumps = expansion_set.tabulate_normal_derivative_jump(degree, qpts, facet, order=r) + rows.append(numpy.dot(weights, jumps.T)) dual_mat = numpy.row_stack(rows) _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index d0f639bf9..0d75a156f 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -334,12 +334,9 @@ def test_macro_expansion(cell, split, variant, degree): def test_C1_basis(cell): degree = 3 ref_el = AlfeldSplit(cell) - P = C1PolynomialSet(ref_el, degree, shape=(2,)) + P = C1PolynomialSet(ref_el, degree) print(P.expansion_set.get_num_members(degree), P.get_num_members()) - child_to_parent = ref_el.get_child_to_parent() - sd = ref_el.get_spatial_dimension() - for facet in child_to_parent[sd-1]: - if child_to_parent[sd-1][facet][0] == sd: - pass - # TODO + for facet in ref_el.get_interior_facets(): + pass + # TODO From f91d2007e50049a517977260f00866382b17e5ca Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 11:49:21 -0500 Subject: [PATCH 72/93] LagrangeExpansionSet: get cell node map with point binning allowing repetitions --- FIAT/barycentric_interpolation.py | 2 +- test/unit/test_macro.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index a2d43c2e0..4a6a5ac8b 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -53,7 +53,7 @@ class LagrangeLineExpansionSet(expansions.LineExpansionSet): def __init__(self, ref_el, pts): self.points = pts self.x = numpy.array(pts).flatten() - self.cell_node_map = expansions.compute_cell_point_map(ref_el, numpy.transpose(pts)) + self.cell_node_map = expansions.compute_cell_point_map(ref_el, numpy.transpose(pts), unique=False) self.dmats = [] self.weights = [] for ibfs in self.cell_node_map: diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 0d75a156f..11218b655 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -337,6 +337,7 @@ def test_C1_basis(cell): P = C1PolynomialSet(ref_el, degree) print(P.expansion_set.get_num_members(degree), P.get_num_members()) - for facet in ref_el.get_interior_facets(): + sd = ref_el.get_spatial_dimension() + for facet in ref_el.get_interior_facets(sd-1): pass # TODO From a485a3155842a2f668ffb56243510e076e434fbc Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 14:30:40 -0500 Subject: [PATCH 73/93] CkPolynomialSet with arbitrary order of continuity --- FIAT/expansions.py | 30 +++++++++++++++++++++--------- FIAT/hct.py | 2 +- FIAT/macro.py | 19 ++++++++++++------- test/unit/test_macro.py | 4 ++-- 4 files changed, 36 insertions(+), 19 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 004ba4dab..ec88b945a 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -340,25 +340,37 @@ def _tabulate(self, n, pts, order=0): results.append(result) return tuple(results) - def tabulate_normal_derivative_jump(self, n, ref_pts, facet, order=1): - """Tabulate the normal derivative jump on refernece points on a facet""" + def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): + """Tabulates the normal derivative jumps on reference points on a facet. + + :arg n: the polynomial degree. + :arg ref_pts: an iterable of points on the reference facet. + :arg facet: the facet id. + :kwarg order: the order of differentiation. + + :returns: a numpy array of tabulations of normal derivative jumps. + """ sd = self.ref_el.get_spatial_dimension() transform = self.ref_el.get_entity_transform(sd-1, facet) pts = numpy.transpose(list(map(transform, ref_pts))) cell_point_map = compute_cell_point_map(self.ref_el, pts, unique=False) cell_node_map = self.get_cell_node_map(n) + num_jumps = order + 1 num_phis = self.get_num_members(n) - result = numpy.zeros((num_phis,) + pts.shape[1:]) + results = numpy.zeros((num_jumps, num_phis) + pts.shape[1:]) for k, (ibfs, ipts) in enumerate(zip(cell_node_map, cell_point_map)): if len(ipts) > 0: normal = self.ref_el.compute_normal(facet, cell=k) + side = numpy.dot(normal, self.ref_el.compute_normal(facet)) phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k, direction=normal) - V = numpy.reshape(phi[order], (len(ibfs), len(ipts))) - if order % 2 == 0: - V *= numpy.dot(normal, self.ref_el.compute_normal(facet)) - result[numpy.ix_(ibfs, ipts)] += V - return result + for r in range(order+1): + V = numpy.reshape(phi[r], (len(ibfs), len(ipts))) + if r % 2 == 0 and side < 0: + results[r][numpy.ix_(ibfs, ipts)] -= V + else: + results[r][numpy.ix_(ibfs, ipts)] += V + return results def get_dmats(self, degree): """Returns a numpy array with the expansion coefficients dmat[k, j, i] @@ -375,7 +387,7 @@ def get_dmats(self, degree): return cache.setdefault(key, numpy.zeros((self.ref_el.get_spatial_dimension(), 1, 1), "d")) if self.ref_el.is_macrocell() and self.continuity is not None: - raise ValueError("Cannot create a differenation matrix on a continuous macroelement.") + raise ValueError("Cannot create a differentiation matrix on a continuous macroelement.") sd = self.ref_el.get_spatial_dimension() top = self.ref_el.get_topology() pts = [] diff --git a/FIAT/hct.py b/FIAT/hct.py index 65b88b77e..67bc3fb16 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -42,5 +42,5 @@ class HsiehCloughTocher(finite_element.CiarletElement): def __init__(self, ref_el, degree=3): dual = HCTDualSet(ref_el, degree) - poly_set = macro.C1PolynomialSet(macro.AlfeldSplit(ref_el), degree, variant="bubble") + poly_set = macro.CkPolynomialSet(macro.AlfeldSplit(ref_el), degree, variant=None) super(HsiehCloughTocher, self).__init__(poly_set, dual, degree) diff --git a/FIAT/macro.py b/FIAT/macro.py index c6a71bd07..c0f059d8a 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -295,11 +295,15 @@ def __init__(self, ref_el, Q_ref, parent_facets=None): super(MacroQuadratureRule, self).__init__(ref_el, pts, wts) -class C1PolynomialSet(polynomial_set.PolynomialSet): - """Constructs a C1-continuous PolynomialSet on a simplicial complex. +class CkPolynomialSet(polynomial_set.PolynomialSet): + """Constructs a C^k-continuous PolynomialSet on a simplicial complex. :arg ref_el: The simplicial complex. :arg degree: The polynomial degree. + :kwarg order: The differentiation order of continuity across subcells. + :kwarg shape: The value shape. + :kwarg variant: The variant for the underlying ExpansionSet. + :kwarg scale: The scale for the underlying ExpansionSet. """ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): from FIAT.quadrature_schemes import create_quadrature @@ -317,10 +321,11 @@ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): weights = numpy.multiply(phi_at_qpts, qwts) rows = [] - for r in range(k, order+1): - for facet in ref_el.get_interior_facets(sd-1): - jumps = expansion_set.tabulate_normal_derivative_jump(degree, qpts, facet, order=r) - rows.append(numpy.dot(weights, jumps.T)) + for facet in ref_el.get_interior_facets(sd-1): + jumps = expansion_set.tabulate_normal_jumps(degree, qpts, facet, order=order) + for r in range(k, order+1): + dimPk = 1 if sd == 1 else expansions.polynomial_dimension(facet_el, degree - r) + rows.append(numpy.dot(weights[:dimPk], jumps[r].T)) dual_mat = numpy.row_stack(rows) _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) @@ -332,4 +337,4 @@ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): coeffs = coeffs.reshape((m,) + (1,)*len(shape) + (n,)) coeffs = numpy.tile(coeffs, (1,) + shape + (1,)) - super(C1PolynomialSet, self).__init__(ref_el, degree, degree, expansion_set, coeffs) + super(CkPolynomialSet, self).__init__(ref_el, degree, degree, expansion_set, coeffs) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 11218b655..df89c270e 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -2,7 +2,7 @@ import numpy import pytest from FIAT import DiscontinuousLagrange, Lagrange, Legendre, P0 -from FIAT.macro import AlfeldSplit, IsoSplit, C1PolynomialSet +from FIAT.macro import AlfeldSplit, IsoSplit, CkPolynomialSet from FIAT.quadrature_schemes import create_quadrature from FIAT.reference_element import ufc_simplex from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map @@ -334,7 +334,7 @@ def test_macro_expansion(cell, split, variant, degree): def test_C1_basis(cell): degree = 3 ref_el = AlfeldSplit(cell) - P = C1PolynomialSet(ref_el, degree) + P = CkPolynomialSet(ref_el, degree) print(P.expansion_set.get_num_members(degree), P.get_num_members()) sd = ref_el.get_spatial_dimension() From cdde0b54ab849554b4a31619dae1ae77e756cf55 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 14:54:51 -0500 Subject: [PATCH 74/93] docstrings --- FIAT/expansions.py | 4 ++-- FIAT/macro.py | 22 +++++++++++++++------- FIAT/reference_element.py | 1 + 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index ec88b945a..7464fcdcd 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -519,7 +519,7 @@ def __init__(self, ref_el, **kwargs): def polynomial_dimension(ref_el, n, continuity=None): """Returns the dimension of the space of polynomials of degree no - greater than n on the reference element.""" + greater than n on the reference complex.""" if ref_el.get_shape() == reference_element.POINT: if n > 0: raise ValueError("Only degree zero polynomials supported on point elements.") @@ -534,7 +534,7 @@ def polynomial_dimension(ref_el, n, continuity=None): def polynomial_entity_ids(ref_el, n, continuity=None): - """Maps facets to members of a polynomial basis. + """Maps entites of a cell complex to members of a polynomial basis. :arg ref_el: a SimplicialComplex. :arg n: the polynomial degree of the expansion set. diff --git a/FIAT/macro.py b/FIAT/macro.py index c0f059d8a..365cfffe4 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -140,6 +140,15 @@ def get_cell_connectivity(self): """ return self._cell_connectivity + def get_interior_facets(self, dim): + """Returns the dim-dimensional facets supported on the parent's interior. + """ + sd = self.get_spatial_dimension() + child_to_parent = self.get_child_to_parent() + interior_facets = [facet for facet in child_to_parent[dim] + if child_to_parent[dim][facet][0] == sd] + return interior_facets + def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity specified by subelement dimension. @@ -154,13 +163,6 @@ def is_macrocell(self): def get_parent(self): return self._parent - def get_interior_facets(self, dim): - sd = self.get_spatial_dimension() - child_to_parent = self.get_child_to_parent() - interior_facets = [facet for facet in child_to_parent[dim] - if child_to_parent[dim][facet][0] == sd] - return interior_facets - class AlfeldSplit(SplitSimplicialComplex): """Splits a simplex into the simplicial complex obtained by @@ -189,6 +191,9 @@ def __init__(self, ref_el): super(AlfeldSplit, self).__init__(ref_el, new_verts, new_topology) def construct_subcomplex(self, dimension): + """Constructs the reference subcomplex of the parent cell subentity + specified by subcomplex dimension. + """ if dimension == self.get_dimension(): return self # Alfed on facets is just a simplex @@ -251,6 +256,9 @@ def __init__(self, ref_el, degree=2, variant=None): super(IsoSplit, self).__init__(ref_el, new_verts, new_topology) def construct_subcomplex(self, dimension): + """Constructs the reference subcomplex of the parent cell subentity + specified by subcomplex dimension. + """ if dimension == self.get_dimension(): return self ref_el = self.construct_subelement(dimension) diff --git a/FIAT/reference_element.py b/FIAT/reference_element.py index de48d6501..8ad5bd1a5 100644 --- a/FIAT/reference_element.py +++ b/FIAT/reference_element.py @@ -269,6 +269,7 @@ def is_macrocell(self): return False def get_parent(self): + """Return the parent cell if this cell is a split and None otherwise.""" return None def __gt__(self, other): From 4183e80b0d509f7b6e41eec81209f05bb0ed0bf5 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 15:36:04 -0500 Subject: [PATCH 75/93] Test CkPolynomialSet --- test/unit/test_macro.py | 33 ++++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index df89c270e..a234ce068 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -331,13 +331,28 @@ def test_macro_expansion(cell, split, variant, degree): assert numpy.allclose(cell_values[alpha], values[alpha][indices]) -def test_C1_basis(cell): - degree = 3 - ref_el = AlfeldSplit(cell) - P = CkPolynomialSet(ref_el, degree) - print(P.expansion_set.get_num_members(degree), P.get_num_members()) +@pytest.mark.parametrize("variant", (None, "bubble")) +@pytest.mark.parametrize("degree", (1, 4)) +@pytest.mark.parametrize("order", (0, 1)) +def test_Ck_basis(cell, order, degree, variant): + # Test that we can correctly tabulate on points on facets. + # This breaks if we were binning points into more than one cell. + # It suffices to tabulate on the vertices of the simplicial complex. + A = AlfeldSplit(cell) + Ck = CkPolynomialSet(A, degree, order=order) + U = Ck.get_expansion_set() + + sd = A.get_spatial_dimension() + top = A.get_topology() + coeffs = Ck.get_coeffs() + coeffs = coeffs.reshape((Ck.get_num_members(), len(top[sd]), -1)) + + phis = Ck.tabulate(A.get_vertices())[(0,)*sd] - sd = ref_el.get_spatial_dimension() - for facet in ref_el.get_interior_facets(sd-1): - pass - # TODO + for cell in top[sd]: + ipts = list(top[sd][cell]) + verts = A.get_vertices_of_subcomplex(top[sd][cell]) + pts = numpy.transpose(verts) + Uvals, = U._tabulate_on_cell(degree, pts, 0, cell=cell) + local_phis = numpy.dot(coeffs[:, cell, :], Uvals) + assert numpy.allclose(local_phis, phis[:, ipts]) From a6f92aed32c3edb52cc4bdac59e84d0397a397dd Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 15:36:31 -0500 Subject: [PATCH 76/93] Test CkPolynomialSet --- test/unit/test_macro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index a234ce068..ca77b057e 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -339,7 +339,7 @@ def test_Ck_basis(cell, order, degree, variant): # This breaks if we were binning points into more than one cell. # It suffices to tabulate on the vertices of the simplicial complex. A = AlfeldSplit(cell) - Ck = CkPolynomialSet(A, degree, order=order) + Ck = CkPolynomialSet(A, degree, order=order, variant=variant) U = Ck.get_expansion_set() sd = A.get_spatial_dimension() From 3d6bb7d401ba70e4e6f2bb4206f7df4840fe82df Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 15:44:09 -0500 Subject: [PATCH 77/93] Support C0 expansion set with bubble variant --- FIAT/macro.py | 11 +++++++---- test/unit/test_macro.py | 7 +++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 365cfffe4..f4810b831 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -335,10 +335,13 @@ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): dimPk = 1 if sd == 1 else expansions.polynomial_dimension(facet_el, degree - r) rows.append(numpy.dot(weights[:dimPk], jumps[r].T)) - dual_mat = numpy.row_stack(rows) - _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) - num_sv = len([s for s in sig if abs(s) > 1.e-10]) - coeffs = vt[num_sv:] + if len(rows) == 0: + coeffs = numpy.eye(expansion_set.get_num_members(degree)) + else: + dual_mat = numpy.row_stack(rows) + _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) + num_sv = len([s for s in sig if abs(s) > 1.e-10]) + coeffs = vt[num_sv:] if shape != tuple(): m, n = coeffs.shape diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index ca77b057e..5e6ee5a0a 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -331,9 +331,9 @@ def test_macro_expansion(cell, split, variant, degree): assert numpy.allclose(cell_values[alpha], values[alpha][indices]) +@pytest.mark.parametrize("order", (0, 1)) @pytest.mark.parametrize("variant", (None, "bubble")) @pytest.mark.parametrize("degree", (1, 4)) -@pytest.mark.parametrize("order", (0, 1)) def test_Ck_basis(cell, order, degree, variant): # Test that we can correctly tabulate on points on facets. # This breaks if we were binning points into more than one cell. @@ -341,12 +341,11 @@ def test_Ck_basis(cell, order, degree, variant): A = AlfeldSplit(cell) Ck = CkPolynomialSet(A, degree, order=order, variant=variant) U = Ck.get_expansion_set() + cell_node_map = U.get_cell_node_map(degree) sd = A.get_spatial_dimension() top = A.get_topology() coeffs = Ck.get_coeffs() - coeffs = coeffs.reshape((Ck.get_num_members(), len(top[sd]), -1)) - phis = Ck.tabulate(A.get_vertices())[(0,)*sd] for cell in top[sd]: @@ -354,5 +353,5 @@ def test_Ck_basis(cell, order, degree, variant): verts = A.get_vertices_of_subcomplex(top[sd][cell]) pts = numpy.transpose(verts) Uvals, = U._tabulate_on_cell(degree, pts, 0, cell=cell) - local_phis = numpy.dot(coeffs[:, cell, :], Uvals) + local_phis = numpy.dot(coeffs[:, cell_node_map[cell]], Uvals) assert numpy.allclose(local_phis, phis[:, ipts]) From 0cf74c4903e1f5a84680c7416e9c3dd802ede743 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 12 Apr 2024 23:09:41 -0500 Subject: [PATCH 78/93] cleanup --- FIAT/expansions.py | 41 +++++++++++++++++++---------------------- FIAT/macro.py | 19 ++++++++++++------- 2 files changed, 31 insertions(+), 29 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 7464fcdcd..a37f7b5aa 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -162,7 +162,7 @@ def dubiner_recurrence(dim, n, order, ref_pts, Jinv, scale, variant=None): if variant is not None: p = index[-1] + shift alpha = 2 * (sum(index[:-1]) + d * shift) - 1 - norm2 = (2*d+1) / (2*d) + norm2 = (0.5 + d) / d if p > 0 and p + alpha > 0: norm2 *= (p + alpha) * (2*p + alpha) / p else: @@ -246,7 +246,7 @@ def xi_tetrahedron(eta): def apply_mapping(A, b, pts): - """Apply an affine mapping to an column-stacked array of points.""" + """Apply an affine mapping to a column-stacked array of points.""" if isinstance(pts, numpy.ndarray) and len(pts.shape) == 2: return numpy.dot(A, pts) + b[:, None] else: @@ -308,7 +308,7 @@ def get_cell_node_map(self, n): cell_node_map = polynomial_cell_node_map(self.ref_el, n, self.continuity) return self._cell_node_map_cache.setdefault(n, cell_node_map) - def _tabulate_on_cell(self, n, pts, order, cell=0, direction=None): + def _tabulate_on_cell(self, n, pts, order=0, cell=0, direction=None): A, b = self.affine_mappings[cell] ref_pts = apply_mapping(A, b, pts) Jinv = A if direction is None else numpy.dot(A, direction)[:, None] @@ -356,20 +356,20 @@ def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): cell_point_map = compute_cell_point_map(self.ref_el, pts, unique=False) cell_node_map = self.get_cell_node_map(n) - num_jumps = order + 1 num_phis = self.get_num_members(n) - results = numpy.zeros((num_jumps, num_phis) + pts.shape[1:]) + results = numpy.zeros((order+1, num_phis) + pts.shape[1:]) for k, (ibfs, ipts) in enumerate(zip(cell_node_map, cell_point_map)): if len(ipts) > 0: normal = self.ref_el.compute_normal(facet, cell=k) side = numpy.dot(normal, self.ref_el.compute_normal(facet)) phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k, direction=normal) + indices = numpy.ix_(ibfs, ipts) for r in range(order+1): V = numpy.reshape(phi[r], (len(ibfs), len(ipts))) if r % 2 == 0 and side < 0: - results[r][numpy.ix_(ibfs, ipts)] -= V + results[r][indices] -= V else: - results[r][numpy.ix_(ibfs, ipts)] += V + results[r][indices] += V return results def get_dmats(self, degree): @@ -418,8 +418,7 @@ def distance(alpha, beta): # Only use dmats if tabulate failed for i in range(lorder, order + 1): dmats = self.get_dmats(degree) - alphas = mis(D, i) - for alpha in alphas: + for alpha in mis(D, i): base_alpha = next(a for a in result if sum(a) == i-1 and distance(alpha, a) == 1) vals = result[base_alpha] for dmat, start, end in zip(dmats, base_alpha, alpha): @@ -590,7 +589,7 @@ def compute_cell_point_map(ref_el, pts, unique=True, tol=1E-12): :arg ref_el: a SimplicialComplex. :arg pts: a column-stacked array of physical coordinates. - :kwarg unique: Do we want facet points to have a unique bin? + :kwarg unique: Are we assigning a unique cell to points on facets? :kwarg tol: the absolute tolerance. :returns: a numpy array mapping cell id to points located on that cell. """ @@ -599,22 +598,20 @@ def compute_cell_point_map(ref_el, pts, unique=True, tol=1E-12): if len(top[sd]) == 1: return (Ellipsis,) - binned_pts = [] - low, high = -tol, 1 + tol - bins = [] + cell_point_map = [] ref_vertices = reference_element.ufc_simplex(sd).get_vertices() for cell in top[sd]: verts = ref_el.get_vertices_of_subcomplex(top[sd][cell]) A, b = reference_element.make_affine_mapping(verts, ref_vertices) - if sd > 1: - A = numpy.vstack((A, numpy.sum(A, axis=0))) - b = numpy.hstack((b, numpy.sum(b, axis=0))) + A = numpy.vstack((A, -numpy.sum(A, axis=0))) + b = numpy.hstack((b, 1-numpy.sum(b, axis=0))) x = numpy.dot(A, pts) + b[:, None] - pts_on_cell = numpy.all(numpy.logical_and(x >= low, x <= high), axis=0) - ipts = numpy.where(pts_on_cell)[0] + # Bin points based on l1 distance + pts_on_cell = abs(numpy.sum(abs(x) - x, axis=0)) < 2*tol if unique: - ipts = numpy.setdiff1d(ipts, binned_pts) - binned_pts.extend(ipts) - bins.append(ipts) - return bins + for other in cell_point_map: + pts_on_cell[other] = False + ipts = numpy.where(pts_on_cell)[0] + cell_point_map.append(ipts) + return cell_point_map diff --git a/FIAT/macro.py b/FIAT/macro.py index f4810b831..3270c8b51 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -121,6 +121,12 @@ def __init__(self, parent, vertices, topology): connectivity[cell][dim].append(inv_top[dim][global_verts]) self._cell_connectivity = connectivity + # dict mapping subentity dimension to interior facets + interior_facets = {dim: [entity for entity in child_to_parent[dim] + if child_to_parent[dim][entity][0] == sd] + for dim in sorted(child_to_parent)} + self._interior_facets = interior_facets + super(SplitSimplicialComplex, self).__init__(parent.shape, vertices, topology) def get_child_to_parent(self): @@ -140,14 +146,13 @@ def get_cell_connectivity(self): """ return self._cell_connectivity - def get_interior_facets(self, dim): - """Returns the dim-dimensional facets supported on the parent's interior. + def get_interior_facets(self, dimension): + """Returns the list of entities of the given dimension that are + supported on the parent's interior. + + :arg dimension: subentity dimension (integer) """ - sd = self.get_spatial_dimension() - child_to_parent = self.get_child_to_parent() - interior_facets = [facet for facet in child_to_parent[dim] - if child_to_parent[dim][facet][0] == sd] - return interior_facets + return self._interior_facets[dimension] def construct_subelement(self, dimension): """Constructs the reference element of a cell subentity From cf039040ab712262ff60c715c51acea6568fda85 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 14 Apr 2024 12:16:44 -0500 Subject: [PATCH 79/93] AlfeldSplit: split general complexes --- FIAT/macro.py | 47 +++++++++++++++++++++++++++-------------------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/FIAT/macro.py b/FIAT/macro.py index 3270c8b51..ed27da38d 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -170,30 +170,35 @@ def get_parent(self): class AlfeldSplit(SplitSimplicialComplex): - """Splits a simplex into the simplicial complex obtained by - connecting vertices to barycenter. + """Splits a simplicial complex by connecting subcell vertices to their + barycenter. """ def __init__(self, ref_el): sd = ref_el.get_spatial_dimension() top = ref_el.get_topology() # Keep old facets, respecting the old numbering new_topology = copy.deepcopy(top) - # Discard the cell interior + # Discard the cell interiors new_topology[sd] = {} - - # Append the barycenter as the new vertex - barycenter = ref_el.make_points(sd, 0, sd+1) - old_verts = ref_el.get_vertices() - new_verts = old_verts + tuple(barycenter) - new_vert_id = len(old_verts) - new_topology[0][new_vert_id] = (new_vert_id,) - - # Append new facets by adding the barycenter to old facets - for dim in range(1, sd + 1): - offset = len(new_topology[dim]) - for entity, ids in top[dim-1].items(): - new_topology[dim][offset+entity] = ids + (new_vert_id,) - super(AlfeldSplit, self).__init__(ref_el, new_verts, new_topology) + new_verts = tuple(ref_el.get_vertices()) + + for cell in top[sd]: + # Append the barycenter as the new vertex + barycenter = ref_el.make_points(sd, cell, sd+1) + new_verts += tuple(barycenter) + new_vert_id = len(new_topology[0]) + new_topology[0][new_vert_id] = (new_vert_id,) + + # Append new facets by adding the barycenter to old facets + for dim in range(1, sd + 1): + cur = len(new_topology[dim]) + for entity, ids in top[dim-1].items(): + if set(ids) < set(top[sd][cell]): + new_topology[dim][cur] = ids + (new_vert_id,) + cur = cur + 1 + + parent = ref_el.get_parent() or ref_el + super(AlfeldSplit, self).__init__(parent, new_verts, new_topology) def construct_subcomplex(self, dimension): """Constructs the reference subcomplex of the parent cell subentity @@ -201,8 +206,8 @@ def construct_subcomplex(self, dimension): """ if dimension == self.get_dimension(): return self - # Alfed on facets is just a simplex - return self.construct_subelement(dimension) + # Alfeld on facets is just the parent subcomplex + return self._parent.construct_subcomplex(dimension) class IsoSplit(SplitSimplicialComplex): @@ -258,7 +263,9 @@ def __init__(self, ref_el, degree=2, variant=None): if set(facet) < adjacency[v]: entities.append((v,) + facet) new_topology[dim] = dict(enumerate(entities)) - super(IsoSplit, self).__init__(ref_el, new_verts, new_topology) + + parent = ref_el.get_parent() or ref_el + super(IsoSplit, self).__init__(parent, new_verts, new_topology) def construct_subcomplex(self, dimension): """Constructs the reference subcomplex of the parent cell subentity From 2d2df6db32ef1936d2e8e6d94c775900027c9db5 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Mon, 15 Apr 2024 21:33:07 -0600 Subject: [PATCH 80/93] Attempt to correctly construct a C2 poly set --- FIAT/expansions.py | 20 ++++++++++++-------- FIAT/macro.py | 6 ++++-- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index a37f7b5aa..cedf914ed 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -357,19 +357,23 @@ def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): cell_node_map = self.get_cell_node_map(n) num_phis = self.get_num_members(n) - results = numpy.zeros((order+1, num_phis) + pts.shape[1:]) + results = [numpy.zeros((num_phis,) + (sd,) * (r-1) + pts.shape[1:]) + for r in range(order+1)] + for k, (ibfs, ipts) in enumerate(zip(cell_node_map, cell_point_map)): if len(ipts) > 0: normal = self.ref_el.compute_normal(facet, cell=k) side = numpy.dot(normal, self.ref_el.compute_normal(facet)) - phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k, direction=normal) - indices = numpy.ix_(ibfs, ipts) - for r in range(order+1): - V = numpy.reshape(phi[r], (len(ibfs), len(ipts))) - if r % 2 == 0 and side < 0: - results[r][indices] -= V + phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k) + for r, vr in enumerate(phi): + shape_indices = tuple(range(sd) for _ in range(r-1)) + indices = numpy.ix_(ibfs, *shape_indices, ipts) + if r > 0: + vr = numpy.tensordot(normal, vr, axes=(0, 1)) + if r == 0 and side < 0: + results[r][indices] -= vr else: - results[r][indices] += V + results[r][indices] += vr return results def get_dmats(self, degree): diff --git a/FIAT/macro.py b/FIAT/macro.py index ed27da38d..f409a4e35 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -328,6 +328,7 @@ class CkPolynomialSet(polynomial_set.PolynomialSet): def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): from FIAT.quadrature_schemes import create_quadrature expansion_set = expansions.ExpansionSet(ref_el, **kwargs) + num_members = expansion_set.get_num_members(degree) k = 1 if expansion_set.continuity == "C0" else 0 sd = ref_el.get_spatial_dimension() @@ -345,10 +346,11 @@ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): jumps = expansion_set.tabulate_normal_jumps(degree, qpts, facet, order=order) for r in range(k, order+1): dimPk = 1 if sd == 1 else expansions.polynomial_dimension(facet_el, degree - r) - rows.append(numpy.dot(weights[:dimPk], jumps[r].T)) + rows.append(numpy.tensordot(weights[:dimPk], jumps[r].T, + axes=(-1, 0)).reshape((-1, num_members))) if len(rows) == 0: - coeffs = numpy.eye(expansion_set.get_num_members(degree)) + coeffs = numpy.eye(num_members) else: dual_mat = numpy.row_stack(rows) _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) From f5fc86517038c934e0b861ab054c4c941f66fc09 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 16 Apr 2024 17:40:33 -0600 Subject: [PATCH 81/93] Reduced HCT extended dual set --- FIAT/hct.py | 19 +++++++++++-------- FIAT/jacobi.py | 3 ++- test/unit/test_hct.py | 33 +++++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 9 deletions(-) create mode 100644 test/unit/test_hct.py diff --git a/FIAT/hct.py b/FIAT/hct.py index 67bc3fb16..1d19e5c1a 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -1,15 +1,17 @@ from FIAT.functional import (PointEvaluation, PointDerivative, IntegralMomentOfNormalDerivative) from FIAT import finite_element, dual_set, macro, polynomial_set -from FIAT.reference_element import ufc_simplex +from FIAT.reference_element import TRIANGLE, ufc_simplex from FIAT.quadrature_schemes import create_quadrature -import numpy +from FIAT.jacobi import eval_jacobi class HCTDualSet(dual_set.DualSet): - def __init__(self, ref_el, degree): + def __init__(self, ref_el, degree, reduced=False): if degree != 3: - raise ValueError("HCT elements only defined for degree=3") + raise ValueError("HCT only defined for degree=3") + if ref_el.get_shape() != TRIANGLE: + raise ValueError("HCT only defined on triangles") top = ref_el.get_topology() verts = ref_el.get_vertices() sd = ref_el.get_spatial_dimension() @@ -28,10 +30,11 @@ def __init__(self, ref_el, degree): rline = ufc_simplex(1) Q = create_quadrature(rline, degree-1) qpts = Q.get_points() - scale = numpy.ones(qpts.shape) + k = 2 if reduced else 0 + f_at_qpts = eval_jacobi(0, 0, k, 2.0*qpts - 1) for e in sorted(top[1]): cur = len(nodes) - nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, scale)) + nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, f_at_qpts)) entity_ids[1][e].extend(range(cur, len(nodes))) return super(HCTDualSet, self).__init__(nodes, ref_el, entity_ids) @@ -40,7 +43,7 @@ def __init__(self, ref_el, degree): class HsiehCloughTocher(finite_element.CiarletElement): """The HCT finite element.""" - def __init__(self, ref_el, degree=3): - dual = HCTDualSet(ref_el, degree) + def __init__(self, ref_el, degree=3, reduced=False): + dual = HCTDualSet(ref_el, degree, reduced=reduced) poly_set = macro.CkPolynomialSet(macro.AlfeldSplit(ref_el), degree, variant=None) super(HsiehCloughTocher, self).__init__(poly_set, dual, degree) diff --git a/FIAT/jacobi.py b/FIAT/jacobi.py index d167ca74f..5c251537f 100644 --- a/FIAT/jacobi.py +++ b/FIAT/jacobi.py @@ -18,7 +18,8 @@ def eval_jacobi(a, b, n, x): given in Karniadakis and Sherwin, Appendix B""" if 0 == n: - return 1.0 + # Get zeros of the right shape + return 0.0 * x + 1.0 elif 1 == n: return 0.5 * (a - b + (a + b + 2.0) * x) else: # 2 <= n diff --git a/test/unit/test_hct.py b/test/unit/test_hct.py new file mode 100644 index 000000000..27b67560d --- /dev/null +++ b/test/unit/test_hct.py @@ -0,0 +1,33 @@ +import pytest +import numpy + +from FIAT import HsiehCloughTocher as HCT +from FIAT.reference_element import ufc_simplex, make_lattice +from FIAT.functional import PointEvaluation + + +@pytest.fixture +def cell(): + return ufc_simplex(2) + + +@pytest.mark.parametrize("reduced", (False, True)) +def test_hct_constant(cell, reduced): + # Test that bfs associated with point evaluation sum up to 1 + fe = HCT(cell, reduced=reduced) + + pts = make_lattice(cell.get_vertices(), 3) + tab = fe.tabulate(2, pts) + + coefs = numpy.zeros((fe.space_dimension(),)) + nodes = fe.dual_basis() + entity_dofs = fe.entity_dofs() + for v in entity_dofs[0]: + for k in entity_dofs[0][v]: + if isinstance(nodes[k], PointEvaluation): + coefs[k] = 1.0 + + for alpha in tab: + expected = 1 if sum(alpha) == 0 else 0 + vals = numpy.dot(coefs, tab[alpha]) + assert numpy.allclose(vals, expected) From 7d1cf9e4b144d9226e6a2fb2a065eb15c7e6264f Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 17 Apr 2024 14:22:32 -0600 Subject: [PATCH 82/93] Fix quadrature degree --- FIAT/hct.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/FIAT/hct.py b/FIAT/hct.py index 1d19e5c1a..877b1ba69 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -28,9 +28,9 @@ def __init__(self, ref_el, degree, reduced=False): entity_ids[0][v].extend(range(cur, len(nodes))) rline = ufc_simplex(1) - Q = create_quadrature(rline, degree-1) - qpts = Q.get_points() k = 2 if reduced else 0 + Q = create_quadrature(rline, degree-1+k) + qpts = Q.get_points() f_at_qpts = eval_jacobi(0, 0, k, 2.0*qpts - 1) for e in sorted(top[1]): cur = len(nodes) From c0d13ee334b3b5ff2fcaf4296d7e8d7abbeca1a9 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 19 Apr 2024 18:12:37 -0600 Subject: [PATCH 83/93] Support dmats for C0 macroelements --- FIAT/barycentric_interpolation.py | 12 +-- FIAT/expansions.py | 119 ++++++++++++++---------------- FIAT/polynomial_set.py | 16 ++-- test/unit/test_macro.py | 2 +- 4 files changed, 67 insertions(+), 82 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 4a6a5ac8b..f1919cacd 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -74,8 +74,8 @@ def get_cell_node_map(self, n): def get_points(self): return self.points - def get_dmats(self, degree): - return [dmat.T for dmat in self.dmats] + def get_dmats(self, degree, cell=0): + return self.dmats[cell].T def _tabulate(self, n, pts, order=0): num_members = self.get_num_members(n) @@ -93,12 +93,8 @@ def _tabulate(self, n, pts, order=0): indices = numpy.ix_(ibfs, ipts) for result, val in zip(results, vals): result[indices] = val - - for r in range(order+1): - shape = results[r].shape - shape = shape[:1] + (1,)*r + shape[1:] - results[r] = numpy.reshape(results[r], shape) - return tuple(results) + tabulations = {(r,): results[r] for r in range(order+1)} + return tabulations class LagrangePolynomialSet(polynomial_set.PolynomialSet): diff --git a/FIAT/expansions.py b/FIAT/expansions.py index cedf914ed..ed3b6511a 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -309,15 +309,40 @@ def get_cell_node_map(self, n): return self._cell_node_map_cache.setdefault(n, cell_node_map) def _tabulate_on_cell(self, n, pts, order=0, cell=0, direction=None): + from FIAT.polynomial_set import mis + lorder = min(order, self.recurrence_order) A, b = self.affine_mappings[cell] ref_pts = apply_mapping(A, b, pts) Jinv = A if direction is None else numpy.dot(A, direction)[:, None] sd = self.ref_el.get_spatial_dimension() - phi = dubiner_recurrence(sd, n, order, ref_pts, Jinv, + phi = dubiner_recurrence(sd, n, lorder, ref_pts, Jinv, self.scale, variant=self.variant) if self.continuity == "C0": phi = C0_basis(sd, n, phi) - return phi + + # Pack linearly independent components into a dictionary + result = {(0,) * sd: numpy.asarray(phi[0])} + for r in range(1, len(phi)): + vr = numpy.transpose(phi[r], tuple(range(1, r+1)) + (0, r+1)) + for indices in numpy.ndindex(vr.shape[:r]): + alpha = tuple(map(indices.count, range(sd))) + if alpha not in result: + result[alpha] = vr[indices] + + def distance(alpha, beta): + return sum(ai != bi for ai, bi in zip(alpha, beta)) + + # Only use dmats if tabulate failed + for i in range(len(phi), order + 1): + dmats = self.get_dmats(n, cell=cell) + for alpha in mis(sd, i): + base_alpha = next(a for a in result if sum(a) == i-1 and distance(alpha, a) == 1) + vals = result[base_alpha] + for dmat, start, end in zip(dmats, base_alpha, alpha): + for j in range(start, end): + vals = numpy.dot(dmat.T, vals) + result[alpha] = vals + return result def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point.""" @@ -328,17 +353,14 @@ def _tabulate(self, n, pts, order=0): if len(phis) == 1: return phis[0] - sd = self.ref_el.get_spatial_dimension() - results = [] num_phis = self.get_num_members(n) cell_node_map = self.get_cell_node_map(n) - for r in range(order+1): - result = numpy.zeros((num_phis,) + (sd,)*r + pts.shape[1:]) + result = {} + for alpha in phis[0]: + result[alpha] = numpy.zeros((num_phis,) + pts.shape[1:]) for ibfs, ipts, phi in zip(cell_node_map, cell_point_map, phis): - shape_indices = tuple(range(sd) for _ in range(r)) - result[numpy.ix_(ibfs, *shape_indices, ipts)] = phi[r] - results.append(result) - return tuple(results) + result[alpha][numpy.ix_(ibfs, ipts)] = phi[alpha] + return result def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): """Tabulates the normal derivative jumps on reference points on a facet. @@ -365,23 +387,29 @@ def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): normal = self.ref_el.compute_normal(facet, cell=k) side = numpy.dot(normal, self.ref_el.compute_normal(facet)) phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k) - for r, vr in enumerate(phi): + v0 = phi[(0,)*sd] + for r in range(order+1): + vr = numpy.zeros((sd,)*r + v0.shape, dtype=v0.dtype) + for index in numpy.ndindex(vr.shape[:r]): + vr[index] = phi[tuple(map(index.count, range(sd)))] + if r > 0: + vr = numpy.tensordot(normal, vr, axes=(0, 0)) + shape_indices = tuple(range(sd) for _ in range(r-1)) indices = numpy.ix_(ibfs, *shape_indices, ipts) - if r > 0: - vr = numpy.tensordot(normal, vr, axes=(0, 1)) if r == 0 and side < 0: results[r][indices] -= vr else: results[r][indices] += vr return results - def get_dmats(self, degree): + def get_dmats(self, degree, cell=0): """Returns a numpy array with the expansion coefficients dmat[k, j, i] of the gradient of each member of the expansion set: d/dx_k phi_j = sum_i dmat[k, j, i] phi_i. """ - key = degree + from FIAT.polynomial_set import mis + key = (degree, cell) cache = self._dmats_cache try: return cache[key] @@ -390,64 +418,33 @@ def get_dmats(self, degree): if degree == 0: return cache.setdefault(key, numpy.zeros((self.ref_el.get_spatial_dimension(), 1, 1), "d")) - if self.ref_el.is_macrocell() and self.continuity is not None: - raise ValueError("Cannot create a differentiation matrix on a continuous macroelement.") sd = self.ref_el.get_spatial_dimension() top = self.ref_el.get_topology() - pts = [] - for cell in top[sd]: - verts = self.ref_el.get_vertices_of_subcomplex(top[sd][cell]) - pts.extend(reference_element.make_lattice(verts, degree, variant="gl")) - v, dv = self._tabulate(degree, numpy.transpose(pts), order=1) - dv = numpy.transpose(dv, (1, 2, 0)) - dmats = numpy.linalg.solve(numpy.transpose(v), dv) + verts = self.ref_el.get_vertices_of_subcomplex(top[sd][cell]) + pts = reference_element.make_lattice(verts, degree, variant="gl") + v = self._tabulate_on_cell(degree, numpy.transpose(pts), order=1, cell=cell) + dv = [numpy.transpose(v[alpha]) for alpha in mis(sd, 1)] + dmats = numpy.linalg.solve(numpy.transpose(v[(0,)*sd]), dv) return cache.setdefault(key, dmats) - def _tabulate_jet(self, degree, pts, order=0): - from FIAT.polynomial_set import mis - vals = self._tabulate(degree, numpy.transpose(pts), order=min(order, self.recurrence_order)) - lorder = len(vals) - D = self.ref_el.get_spatial_dimension() - result = {(0,) * D: numpy.array(vals[0])} - for r in range(1, lorder): - vr = numpy.transpose(vals[r], tuple(range(1, r+1)) + (0, r+1)) - for indices in numpy.ndindex(vr.shape[:r]): - alpha = tuple(map(indices.count, range(D))) - if alpha not in result: - result[alpha] = vr[indices] - - def distance(alpha, beta): - return sum(ai != bi for ai, bi in zip(alpha, beta)) - - # Only use dmats if tabulate failed - for i in range(lorder, order + 1): - dmats = self.get_dmats(degree) - for alpha in mis(D, i): - base_alpha = next(a for a in result if sum(a) == i-1 and distance(alpha, a) == 1) - vals = result[base_alpha] - for dmat, start, end in zip(dmats, base_alpha, alpha): - for j in range(start, end): - vals = numpy.dot(dmat.T, vals) - result[alpha] = vals - return result - def tabulate(self, n, pts): if len(pts) == 0: return numpy.array([]) - results, = self._tabulate(n, numpy.transpose(pts)) - return numpy.asarray(results) + sd = self.ref_el.get_spatial_dimension() + return self._tabulate(n, numpy.transpose(pts))[(0,)*sd] def tabulate_derivatives(self, n, pts): - vals, deriv_vals = self._tabulate(n, numpy.transpose(pts), order=1) + from FIAT.polynomial_set import mis + vals = self._tabulate(n, numpy.transpose(pts), order=1) # Create the ordinary data structure. D = self.ref_el.get_spatial_dimension() - data = [[(vals[i][j], [deriv_vals[i][r][j] for r in range(D)]) + data = [[(vals[(0,) * D][i, j], [vals[alpha][i, j] for alpha in mis(D, 1)]) for j in range(len(vals[0]))] for i in range(len(vals))] return data def tabulate_jet(self, n, pts, order=1): - vals = self._tabulate_jet(n, pts, order=order) + vals = self._tabulate(n, pts, order=order) # Create the ordinary data structure. D = self.ref_el.get_spatial_dimension() v0 = vals[(0,)*D] @@ -488,7 +485,7 @@ def _tabulate(self, n, pts, order=0): A, b = self.affine_mappings[0] xs = apply_mapping(A, b, pts).T - results = [] + results = {} scale = self.scale * numpy.sqrt(2 * numpy.arange(n+1) + 1) for k in range(order+1): v = numpy.zeros((n + 1, len(xs)), xs.dtype) @@ -497,10 +494,8 @@ def _tabulate(self, n, pts, order=0): for p in range(n + 1): v[p] *= scale[p] scale[p] *= 0.5 * (p + k + 1) * A[0, 0] - shape = v.shape - shape = shape[:1] + (1,) * k + shape[1:] - results.append(v.reshape(shape)) - return tuple(results) + results[(k,)] = v + return results class TriangleExpansionSet(ExpansionSet): diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 5bee07b88..8887be13b 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -61,7 +61,6 @@ def __init__(self, ref_el, degree, embedded_degree, expansion_set, coeffs): self.embedded_degree = embedded_degree self.expansion_set = expansion_set self.coeffs = coeffs - self.dmats = [] def tabulate_new(self, pts): return numpy.dot(self.coeffs, @@ -69,12 +68,9 @@ def tabulate_new(self, pts): def tabulate(self, pts, jet_order=0): """Returns the values of the polynomial set.""" - base_vals = self.expansion_set._tabulate_jet(self.embedded_degree, pts, order=jet_order) - D = self.ref_el.get_spatial_dimension() - result = {} - for i in range(jet_order + 1): - for alpha in mis(D, i): - result[alpha] = numpy.dot(self.coeffs, base_vals[alpha]) + base_vals = self.expansion_set._tabulate(self.embedded_degree, numpy.transpose(pts), order=jet_order) + + result = {alpha: numpy.dot(self.coeffs, base_vals[alpha]) for alpha in base_vals} return result def get_expansion_set(self): @@ -92,10 +88,8 @@ def get_degree(self): def get_embedded_degree(self): return self.embedded_degree - def get_dmats(self): - if len(self.dmats) == 0: - self.dmats = self.expansion_set.get_dmats(self.embedded_degree) - return self.dmats + def get_dmats(self, cell=0): + return self.expansion_set.get_dmats(self.embedded_degree, cell=cell) def get_reference_element(self): return self.ref_el diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 5e6ee5a0a..ec0af5445 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -352,6 +352,6 @@ def test_Ck_basis(cell, order, degree, variant): ipts = list(top[sd][cell]) verts = A.get_vertices_of_subcomplex(top[sd][cell]) pts = numpy.transpose(verts) - Uvals, = U._tabulate_on_cell(degree, pts, 0, cell=cell) + Uvals = U._tabulate_on_cell(degree, pts, 0, cell=cell)[(0,)*sd] local_phis = numpy.dot(coeffs[:, cell_node_map[cell]], Uvals) assert numpy.allclose(local_phis, phis[:, ipts]) From 73fb402c1630a302f375c329cd9b5090293b2915 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 21 Apr 2024 11:36:24 +0100 Subject: [PATCH 84/93] fix regression tests --- FIAT/expansions.py | 14 ++++++++------ test/regression/test_regression.py | 4 ++-- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index ed3b6511a..0cab4147f 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -430,21 +430,23 @@ def get_dmats(self, degree, cell=0): def tabulate(self, n, pts): if len(pts) == 0: return numpy.array([]) - sd = self.ref_el.get_spatial_dimension() - return self._tabulate(n, numpy.transpose(pts))[(0,)*sd] + D = self.ref_el.get_spatial_dimension() + return self._tabulate(n, numpy.transpose(pts))[(0,) * D] def tabulate_derivatives(self, n, pts): from FIAT.polynomial_set import mis vals = self._tabulate(n, numpy.transpose(pts), order=1) # Create the ordinary data structure. D = self.ref_el.get_spatial_dimension() - data = [[(vals[(0,) * D][i, j], [vals[alpha][i, j] for alpha in mis(D, 1)]) - for j in range(len(vals[0]))] - for i in range(len(vals))] + v = vals[(0,) * D] + dv = [vals[alpha] for alpha in mis(D, 1)] + data = [[(v[i, j], [vi[i, j] for vi in dv]) + for j in range(v.shape[1])] + for i in range(v.shape[0])] return data def tabulate_jet(self, n, pts, order=1): - vals = self._tabulate(n, pts, order=order) + vals = self._tabulate(n, numpy.transpose(pts), order=order) # Create the ordinary data structure. D = self.ref_el.get_spatial_dimension() v0 = vals[(0,)*D] diff --git a/test/regression/test_regression.py b/test/regression/test_regression.py index 3e6bf77f3..af91ebded 100644 --- a/test/regression/test_regression.py +++ b/test/regression/test_regression.py @@ -84,7 +84,7 @@ def create_data(): ref_el=reference_element.DefaultTetrahedron(), degree=3 ) - return ps.dmats + return ps.get_dmats() # Try reading reference values filename = os.path.join(ref_path, "reference-polynomials.json") @@ -102,7 +102,7 @@ def create_data(): ref_el=reference_element.DefaultLine(), degree=3 ) - return ps.dmats + return ps.get_dmats() # Try reading reference values filename = os.path.join(ref_path, "reference-polynomials_1D.json") From 8c62d18ebf1e4895ebc0394a3fd5852345023e1d Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sun, 21 Apr 2024 12:10:04 +0100 Subject: [PATCH 85/93] refactor numpy.transpose(pts) --- FIAT/barycentric_interpolation.py | 2 +- FIAT/expansions.py | 29 +++++++++++++++-------------- FIAT/polynomial_set.py | 2 +- test/unit/test_macro.py | 3 +-- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index f1919cacd..3bd4b8744 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -53,7 +53,7 @@ class LagrangeLineExpansionSet(expansions.LineExpansionSet): def __init__(self, ref_el, pts): self.points = pts self.x = numpy.array(pts).flatten() - self.cell_node_map = expansions.compute_cell_point_map(ref_el, numpy.transpose(pts), unique=False) + self.cell_node_map = expansions.compute_cell_point_map(ref_el, pts, unique=False) self.dmats = [] self.weights = [] for ibfs in self.cell_node_map: diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 0cab4147f..b526e99dd 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -312,7 +312,7 @@ def _tabulate_on_cell(self, n, pts, order=0, cell=0, direction=None): from FIAT.polynomial_set import mis lorder = min(order, self.recurrence_order) A, b = self.affine_mappings[cell] - ref_pts = apply_mapping(A, b, pts) + ref_pts = apply_mapping(A, b, numpy.transpose(pts)) Jinv = A if direction is None else numpy.dot(A, direction)[:, None] sd = self.ref_el.get_spatial_dimension() phi = dubiner_recurrence(sd, n, lorder, ref_pts, Jinv, @@ -346,8 +346,9 @@ def distance(alpha, beta): def _tabulate(self, n, pts, order=0): """A version of tabulate() that also works for a single point.""" + pts = numpy.asarray(pts) cell_point_map = compute_cell_point_map(self.ref_el, pts) - phis = [self._tabulate_on_cell(n, pts[:, ipts], order, cell=k) + phis = [self._tabulate_on_cell(n, pts[ipts], order, cell=k) for k, ipts in enumerate(cell_point_map)] if len(phis) == 1: @@ -357,7 +358,7 @@ def _tabulate(self, n, pts, order=0): cell_node_map = self.get_cell_node_map(n) result = {} for alpha in phis[0]: - result[alpha] = numpy.zeros((num_phis,) + pts.shape[1:]) + result[alpha] = numpy.zeros((num_phis, len(pts))) for ibfs, ipts, phi in zip(cell_node_map, cell_point_map, phis): result[alpha][numpy.ix_(ibfs, ipts)] = phi[alpha] return result @@ -374,19 +375,19 @@ def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): """ sd = self.ref_el.get_spatial_dimension() transform = self.ref_el.get_entity_transform(sd-1, facet) - pts = numpy.transpose(list(map(transform, ref_pts))) + pts = numpy.array(list(map(transform, ref_pts))) cell_point_map = compute_cell_point_map(self.ref_el, pts, unique=False) cell_node_map = self.get_cell_node_map(n) num_phis = self.get_num_members(n) - results = [numpy.zeros((num_phis,) + (sd,) * (r-1) + pts.shape[1:]) + results = [numpy.zeros((num_phis,) + (sd,) * (r-1) + (len(pts),)) for r in range(order+1)] for k, (ibfs, ipts) in enumerate(zip(cell_node_map, cell_point_map)): if len(ipts) > 0: normal = self.ref_el.compute_normal(facet, cell=k) side = numpy.dot(normal, self.ref_el.compute_normal(facet)) - phi = self._tabulate_on_cell(n, pts[:, ipts], order, cell=k) + phi = self._tabulate_on_cell(n, pts[ipts], order, cell=k) v0 = phi[(0,)*sd] for r in range(order+1): vr = numpy.zeros((sd,)*r + v0.shape, dtype=v0.dtype) @@ -422,7 +423,7 @@ def get_dmats(self, degree, cell=0): top = self.ref_el.get_topology() verts = self.ref_el.get_vertices_of_subcomplex(top[sd][cell]) pts = reference_element.make_lattice(verts, degree, variant="gl") - v = self._tabulate_on_cell(degree, numpy.transpose(pts), order=1, cell=cell) + v = self._tabulate_on_cell(degree, pts, order=1, cell=cell) dv = [numpy.transpose(v[alpha]) for alpha in mis(sd, 1)] dmats = numpy.linalg.solve(numpy.transpose(v[(0,)*sd]), dv) return cache.setdefault(key, dmats) @@ -431,11 +432,11 @@ def tabulate(self, n, pts): if len(pts) == 0: return numpy.array([]) D = self.ref_el.get_spatial_dimension() - return self._tabulate(n, numpy.transpose(pts))[(0,) * D] + return self._tabulate(n, pts)[(0,) * D] def tabulate_derivatives(self, n, pts): from FIAT.polynomial_set import mis - vals = self._tabulate(n, numpy.transpose(pts), order=1) + vals = self._tabulate(n, pts, order=1) # Create the ordinary data structure. D = self.ref_el.get_spatial_dimension() v = vals[(0,) * D] @@ -446,7 +447,7 @@ def tabulate_derivatives(self, n, pts): return data def tabulate_jet(self, n, pts, order=1): - vals = self._tabulate(n, numpy.transpose(pts), order=order) + vals = self._tabulate(n, pts, order=order) # Create the ordinary data structure. D = self.ref_el.get_spatial_dimension() v0 = vals[(0,)*D] @@ -486,7 +487,7 @@ def _tabulate(self, n, pts, order=0): return super(LineExpansionSet, self)._tabulate(n, pts, order=order) A, b = self.affine_mappings[0] - xs = apply_mapping(A, b, pts).T + xs = apply_mapping(A, b, numpy.transpose(pts)).T results = {} scale = self.scale * numpy.sqrt(2 * numpy.arange(n+1) + 1) for k in range(order+1): @@ -589,7 +590,7 @@ def compute_cell_point_map(ref_el, pts, unique=True, tol=1E-12): """Maps cells on a simplicial complex to points. :arg ref_el: a SimplicialComplex. - :arg pts: a column-stacked array of physical coordinates. + :arg pts: an iterable of physical points on the complex. :kwarg unique: Are we assigning a unique cell to points on facets? :kwarg tol: the absolute tolerance. :returns: a numpy array mapping cell id to points located on that cell. @@ -606,10 +607,10 @@ def compute_cell_point_map(ref_el, pts, unique=True, tol=1E-12): A, b = reference_element.make_affine_mapping(verts, ref_vertices) A = numpy.vstack((A, -numpy.sum(A, axis=0))) b = numpy.hstack((b, 1-numpy.sum(b, axis=0))) - x = numpy.dot(A, pts) + b[:, None] + x = numpy.dot(pts, A.T) + b[None, :] # Bin points based on l1 distance - pts_on_cell = abs(numpy.sum(abs(x) - x, axis=0)) < 2*tol + pts_on_cell = abs(numpy.sum(abs(x) - x, axis=1)) < 2*tol if unique: for other in cell_point_map: pts_on_cell[other] = False diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 8887be13b..4638e9a8b 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -68,7 +68,7 @@ def tabulate_new(self, pts): def tabulate(self, pts, jet_order=0): """Returns the values of the polynomial set.""" - base_vals = self.expansion_set._tabulate(self.embedded_degree, numpy.transpose(pts), order=jet_order) + base_vals = self.expansion_set._tabulate(self.embedded_degree, pts, order=jet_order) result = {alpha: numpy.dot(self.coeffs, base_vals[alpha]) for alpha in base_vals} return result diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index ec0af5445..1e9c6ec4f 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -351,7 +351,6 @@ def test_Ck_basis(cell, order, degree, variant): for cell in top[sd]: ipts = list(top[sd][cell]) verts = A.get_vertices_of_subcomplex(top[sd][cell]) - pts = numpy.transpose(verts) - Uvals = U._tabulate_on_cell(degree, pts, 0, cell=cell)[(0,)*sd] + Uvals = U._tabulate_on_cell(degree, verts, 0, cell=cell)[(0,)*sd] local_phis = numpy.dot(coeffs[:, cell_node_map[cell]], Uvals) assert numpy.allclose(local_phis, phis[:, ipts]) From 56c5af02c81bd9b68b7ee3beb18cfebde7a73b19 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Mon, 22 Apr 2024 21:56:38 +0100 Subject: [PATCH 86/93] fix tabulation shape --- FIAT/expansions.py | 5 +++-- FIAT/polynomial_set.py | 1 - 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index b526e99dd..d830c469d 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -358,7 +358,7 @@ def _tabulate(self, n, pts, order=0): cell_node_map = self.get_cell_node_map(n) result = {} for alpha in phis[0]: - result[alpha] = numpy.zeros((num_phis, len(pts))) + result[alpha] = numpy.zeros((num_phis,) + pts.shape[:-1]) for ibfs, ipts, phi in zip(cell_node_map, cell_point_map, phis): result[alpha][numpy.ix_(ibfs, ipts)] = phi[alpha] return result @@ -380,7 +380,7 @@ def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): cell_node_map = self.get_cell_node_map(n) num_phis = self.get_num_members(n) - results = [numpy.zeros((num_phis,) + (sd,) * (r-1) + (len(pts),)) + results = [numpy.zeros((num_phis,) + (sd,) * (r-1) + pts.shape[:-1]) for r in range(order+1)] for k, (ibfs, ipts) in enumerate(zip(cell_node_map, cell_point_map)): @@ -395,6 +395,7 @@ def tabulate_normal_jumps(self, n, ref_pts, facet, order=0): vr[index] = phi[tuple(map(index.count, range(sd)))] if r > 0: vr = numpy.tensordot(normal, vr, axes=(0, 0)) + vr = vr.transpose((-2, *tuple(range(r-1)), -1)) shape_indices = tuple(range(sd) for _ in range(r-1)) indices = numpy.ix_(ibfs, *shape_indices, ipts) diff --git a/FIAT/polynomial_set.py b/FIAT/polynomial_set.py index 4638e9a8b..732d03892 100644 --- a/FIAT/polynomial_set.py +++ b/FIAT/polynomial_set.py @@ -69,7 +69,6 @@ def tabulate_new(self, pts): def tabulate(self, pts, jet_order=0): """Returns the values of the polynomial set.""" base_vals = self.expansion_set._tabulate(self.embedded_degree, pts, order=jet_order) - result = {alpha: numpy.dot(self.coeffs, base_vals[alpha]) for alpha in base_vals} return result From cc136bd90764db8f103de9401b246397c1838742 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Tue, 23 Apr 2024 13:21:52 +0100 Subject: [PATCH 87/93] Barycentric interpolation: Return list of single dmat --- FIAT/barycentric_interpolation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 3bd4b8744..918ec1f5d 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -75,7 +75,7 @@ def get_points(self): return self.points def get_dmats(self, degree, cell=0): - return self.dmats[cell].T + return [self.dmats[cell].T] def _tabulate(self, n, pts, order=0): num_members = self.get_num_members(n) From 58870a02b19eaab511687f159cacaa244f3b0643 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 24 Apr 2024 11:07:23 +0100 Subject: [PATCH 88/93] P0: fix permutations --- FIAT/P0.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/FIAT/P0.py b/FIAT/P0.py index c91dd171c..1a88ee2d5 100644 --- a/FIAT/P0.py +++ b/FIAT/P0.py @@ -30,12 +30,12 @@ def __init__(self, ref_el): entity_ids[dim] = {} entity_permutations[dim] = {} sym_size = ref_el.symmetry_group_size(dim) - perm = [0] if dim == sd else [] + num_points = 1 if dim == sd else 0 if isinstance(dim, tuple): assert isinstance(sym_size, tuple) - perms = {o: perm for o in numpy.ndindex(sym_size)} + perms = {o: list(range(num_points)) for o in numpy.ndindex(sym_size)} else: - perms = {o: perm for o in range(sym_size)} + perms = {o: list(range(num_points)) for o in range(sym_size)} for entity in sorted(top[dim]): entity_ids[dim][entity] = [entity] if dim == sd else [] entity_permutations[dim][entity] = perms From 8d7f49a8ab1d97f46060739a957adca001dab095 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Wed, 24 Apr 2024 13:43:16 +0100 Subject: [PATCH 89/93] DualSet can now be iterated --- FIAT/barycentric_interpolation.py | 9 +++++++++ FIAT/discontinuous_lagrange.py | 8 ++------ FIAT/dual_set.py | 3 +++ FIAT/lagrange.py | 8 ++------ test/unit/test_macro.py | 15 ++++----------- 5 files changed, 20 insertions(+), 23 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 918ec1f5d..7f3dc5ace 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -11,6 +11,15 @@ from FIAT.functional import index_iterator +def get_lagrange_points(nodes): + """Extract singleton point for each node.""" + points = [] + for node in nodes: + pt, = node.get_point_dict() + points.append(pt) + return points + + def barycentric_interpolation(nodes, wts, dmat, pts, order=0): """Evaluates a Lagrange basis on a line reference element via the second barycentric interpolation formula. See Berrut and Trefethen (2004) diff --git a/FIAT/discontinuous_lagrange.py b/FIAT/discontinuous_lagrange.py index e9bfdf954..85907b56b 100644 --- a/FIAT/discontinuous_lagrange.py +++ b/FIAT/discontinuous_lagrange.py @@ -10,7 +10,7 @@ from FIAT import finite_element, polynomial_set, dual_set, functional, P0 from FIAT.reference_element import LINE, make_lattice from FIAT.orientation_utils import make_entity_permutations_simplex -from FIAT.barycentric_interpolation import LagrangePolynomialSet +from FIAT.barycentric_interpolation import LagrangePolynomialSet, get_lagrange_points from FIAT.polynomial_set import mis from FIAT.check_format_variant import parse_lagrange_variant @@ -232,11 +232,7 @@ def __init__(self, ref_el, degree, variant="equispaced"): if ref_el.shape == LINE: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation - points = [] - for node in dual.nodes: - # Assert singleton point for each node. - pt, = node.get_point_dict().keys() - points.append(pt) + points = get_lagrange_points(dual) poly_set = LagrangePolynomialSet(ref_el, points) else: poly_set = polynomial_set.ONPolynomialSet(ref_el, degree) diff --git a/FIAT/dual_set.py b/FIAT/dual_set.py index f111f5343..d7015996a 100644 --- a/FIAT/dual_set.py +++ b/FIAT/dual_set.py @@ -32,6 +32,9 @@ def __init__(self, nodes, ref_el, entity_ids, entity_permutations=None): ids.sort() self.entity_closure_ids[d][e] = ids + def __iter__(self): + return iter(self.nodes) + def get_nodes(self): return self.nodes diff --git a/FIAT/lagrange.py b/FIAT/lagrange.py index b6266c701..1201369c9 100644 --- a/FIAT/lagrange.py +++ b/FIAT/lagrange.py @@ -7,7 +7,7 @@ from FIAT import finite_element, polynomial_set, dual_set, functional from FIAT.orientation_utils import make_entity_permutations_simplex -from FIAT.barycentric_interpolation import LagrangePolynomialSet +from FIAT.barycentric_interpolation import LagrangePolynomialSet, get_lagrange_points from FIAT.reference_element import LINE from FIAT.check_format_variant import parse_lagrange_variant @@ -67,11 +67,7 @@ def __init__(self, ref_el, degree, variant="equispaced"): if ref_el.shape == LINE: # In 1D we can use the primal basis as the expansion set, # avoiding any round-off coming from a basis transformation - points = [] - for node in dual.nodes: - # Assert singleton point for each node. - pt, = node.get_point_dict().keys() - points.append(pt) + points = get_lagrange_points(dual) poly_set = LagrangePolynomialSet(ref_el, points) else: poly_variant = "bubble" if ref_el.is_macrocell() else None diff --git a/test/unit/test_macro.py b/test/unit/test_macro.py index 1e9c6ec4f..65975c6bc 100644 --- a/test/unit/test_macro.py +++ b/test/unit/test_macro.py @@ -7,6 +7,7 @@ from FIAT.reference_element import ufc_simplex from FIAT.expansions import polynomial_entity_ids, polynomial_cell_node_map from FIAT.polynomial_set import make_bubbles, PolynomialSet, ONPolynomialSet +from FIAT.barycentric_interpolation import get_lagrange_points @pytest.fixture(params=("I", "T", "S")) @@ -128,14 +129,6 @@ def test_macro_lagrange(variant, degree, split, cell): assert numpy.allclose(fe.V, V) -def get_lagrange_points(fe): - points = [] - for node in fe.dual_basis(): - pt, = node.get_point_dict() - points.append(pt) - return points - - def make_mass_matrix(fe, order=0): sd = fe.ref_el.get_spatial_dimension() Q = create_quadrature(fe.ref_complex, 2*fe.degree()) @@ -154,8 +147,8 @@ def test_lagrange_alfeld_duals(cell, degree, variant): Pk_dofs = Pk.entity_dofs() alfeld_dofs = alfeld.entity_dofs() - Pk_pts = numpy.asarray(get_lagrange_points(Pk)) - alfeld_pts = numpy.asarray(get_lagrange_points(alfeld)) + Pk_pts = numpy.asarray(get_lagrange_points(Pk.dual_basis())) + alfeld_pts = numpy.asarray(get_lagrange_points(alfeld.dual_basis())) sd = cell.get_dimension() top = cell.get_topology() @@ -179,7 +172,7 @@ def test_lagrange_iso_duals(cell, degree): P2 = Lagrange(cell, 2*degree, variant="equispaced") iso = Lagrange(IsoSplit(cell), degree, variant="equispaced") - assert numpy.allclose(get_lagrange_points(iso), get_lagrange_points(P2)) + assert numpy.allclose(get_lagrange_points(iso.dual_basis()), get_lagrange_points(P2.dual_basis())) P2_ids = P2.entity_dofs() iso_ids = iso.entity_dofs() From ea9a7947a506070964379a5e0233ebfce3614f63 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Fri, 26 Apr 2024 11:43:15 +0100 Subject: [PATCH 90/93] Remove most of macro logic from barycentric_interpolation --- FIAT/barycentric_interpolation.py | 37 ++++++++++--------------------- FIAT/expansions.py | 13 ++++++----- 2 files changed, 19 insertions(+), 31 deletions(-) diff --git a/FIAT/barycentric_interpolation.py b/FIAT/barycentric_interpolation.py index 7f3dc5ace..dcb3a6fa4 100644 --- a/FIAT/barycentric_interpolation.py +++ b/FIAT/barycentric_interpolation.py @@ -30,7 +30,7 @@ def barycentric_interpolation(nodes, wts, dmat, pts, order=0): sp_simplify = numpy.vectorize(simplify) else: sp_simplify = lambda x: x - phi = numpy.add.outer(-nodes, pts) + phi = numpy.add.outer(-nodes, pts.flatten()) with numpy.errstate(divide='ignore', invalid='ignore'): numpy.reciprocal(phi, out=phi) numpy.multiply(phi, wts[:, None], out=phi) @@ -38,10 +38,10 @@ def barycentric_interpolation(nodes, wts, dmat, pts, order=0): phi[phi != phi] = 1.0 phi = sp_simplify(phi) - results = [phi] - for r in range(order): + results = {(0,): phi} + for r in range(1, order+1): phi = sp_simplify(numpy.dot(dmat, phi)) - results.append(phi) + results[(r,)] = phi return results @@ -61,14 +61,17 @@ class LagrangeLineExpansionSet(expansions.LineExpansionSet): """Lagrange polynomial expansion set for given points the line.""" def __init__(self, ref_el, pts): self.points = pts - self.x = numpy.array(pts).flatten() + self.x = numpy.array(pts, dtype="d").flatten() self.cell_node_map = expansions.compute_cell_point_map(ref_el, pts, unique=False) self.dmats = [] self.weights = [] + self.nodes = [] for ibfs in self.cell_node_map: - dmat, wts = make_dmat(self.x[ibfs]) + nodes = self.x[ibfs] + dmat, wts = make_dmat(nodes) self.dmats.append(dmat) self.weights.append(wts) + self.nodes.append(nodes) self.degree = max(len(wts) for wts in self.weights)-1 self.recurrence_order = self.degree + 1 @@ -86,24 +89,8 @@ def get_points(self): def get_dmats(self, degree, cell=0): return [self.dmats[cell].T] - def _tabulate(self, n, pts, order=0): - num_members = self.get_num_members(n) - cell_node_map = self.get_cell_node_map(n) - cell_point_map = expansions.compute_cell_point_map(self.ref_el, pts) - pts = numpy.asarray(pts).flatten() - results = None - for ibfs, ipts, wts, dmat in zip(cell_node_map, cell_point_map, self.weights, self.dmats): - vals = barycentric_interpolation(self.x[ibfs], wts, dmat, pts[ipts], order=order) - if len(cell_node_map) == 1: - results = vals - else: - if results is None: - results = [numpy.zeros((num_members, len(pts)), dtype=vals[0].dtype) for r in range(order+1)] - indices = numpy.ix_(ibfs, ipts) - for result, val in zip(results, vals): - result[indices] = val - tabulations = {(r,): results[r] for r in range(order+1)} - return tabulations + def _tabulate_on_cell(self, n, pts, order=0, cell=0, direction=None): + return barycentric_interpolation(self.nodes[cell], self.weights[cell], self.dmats[cell], pts, order=order) class LagrangePolynomialSet(polynomial_set.PolynomialSet): @@ -125,7 +112,7 @@ def __init__(self, ref_el, pts, shape=tuple()): # set up coefficients if shape == tuple(): - coeffs = numpy.eye(num_members) + coeffs = numpy.eye(num_members, dtype="d") else: coeffs_shape = (num_members, *shape, num_exp_functions) coeffs = numpy.zeros(coeffs_shape, "d") diff --git a/FIAT/expansions.py b/FIAT/expansions.py index d830c469d..33b1f4630 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -481,23 +481,24 @@ def __init__(self, ref_el, **kwargs): raise Exception("Must have a line") super(LineExpansionSet, self).__init__(ref_el, **kwargs) - def _tabulate(self, n, pts, order=0): + def _tabulate_on_cell(self, n, pts, order=0, cell=0, direction=None): """Returns a tuple of (vals, derivs) such that vals[i,j] = phi_i(pts[j]), derivs[i,j] = D vals[i,j].""" - if self.variant is not None or len(self.affine_mappings) > 1: - return super(LineExpansionSet, self)._tabulate(n, pts, order=order) + if self.variant is not None: + return super(LineExpansionSet, self)._tabulate_on_cell(n, pts, order=order, cell=cell, direction=direction) - A, b = self.affine_mappings[0] + A, b = self.affine_mappings[cell] + Jinv = A[0, 0] if direction is None else numpy.dot(A, direction) xs = apply_mapping(A, b, numpy.transpose(pts)).T results = {} scale = self.scale * numpy.sqrt(2 * numpy.arange(n+1) + 1) for k in range(order+1): - v = numpy.zeros((n + 1, len(xs)), xs.dtype) + v = numpy.zeros((n + 1, len(xs)), "d") if n >= k: v[k:] = jacobi.eval_jacobi_batch(k, k, n-k, xs) for p in range(n + 1): v[p] *= scale[p] - scale[p] *= 0.5 * (p + k + 1) * A[0, 0] + scale[p] *= 0.5 * (p + k + 1) * Jinv results[(k,)] = v return results From 025671cd664fc20383ad3a6fc08c1912cf50747e Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sat, 27 Apr 2024 19:01:42 +0100 Subject: [PATCH 91/93] refactoring --- FIAT/expansions.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/FIAT/expansions.py b/FIAT/expansions.py index 33b1f4630..c3f2fd2cc 100644 --- a/FIAT/expansions.py +++ b/FIAT/expansions.py @@ -420,28 +420,28 @@ def get_dmats(self, degree, cell=0): if degree == 0: return cache.setdefault(key, numpy.zeros((self.ref_el.get_spatial_dimension(), 1, 1), "d")) - sd = self.ref_el.get_spatial_dimension() + D = self.ref_el.get_dimension() top = self.ref_el.get_topology() - verts = self.ref_el.get_vertices_of_subcomplex(top[sd][cell]) + verts = self.ref_el.get_vertices_of_subcomplex(top[D][cell]) pts = reference_element.make_lattice(verts, degree, variant="gl") v = self._tabulate_on_cell(degree, pts, order=1, cell=cell) - dv = [numpy.transpose(v[alpha]) for alpha in mis(sd, 1)] - dmats = numpy.linalg.solve(numpy.transpose(v[(0,)*sd]), dv) + dv = [numpy.transpose(v[alpha]) for alpha in mis(D, 1)] + dmats = numpy.linalg.solve(numpy.transpose(v[(0,) * D]), dv) return cache.setdefault(key, dmats) def tabulate(self, n, pts): if len(pts) == 0: return numpy.array([]) - D = self.ref_el.get_spatial_dimension() - return self._tabulate(n, pts)[(0,) * D] + sd = self.ref_el.get_spatial_dimension() + return self._tabulate(n, pts)[(0,) * sd] def tabulate_derivatives(self, n, pts): from FIAT.polynomial_set import mis vals = self._tabulate(n, pts, order=1) # Create the ordinary data structure. - D = self.ref_el.get_spatial_dimension() - v = vals[(0,) * D] - dv = [vals[alpha] for alpha in mis(D, 1)] + sd = self.ref_el.get_spatial_dimension() + v = vals[(0,) * sd] + dv = [vals[alpha] for alpha in mis(sd, 1)] data = [[(v[i, j], [vi[i, j] for vi in dv]) for j in range(v.shape[1])] for i in range(v.shape[0])] @@ -450,13 +450,13 @@ def tabulate_derivatives(self, n, pts): def tabulate_jet(self, n, pts, order=1): vals = self._tabulate(n, pts, order=order) # Create the ordinary data structure. - D = self.ref_el.get_spatial_dimension() - v0 = vals[(0,)*D] + sd = self.ref_el.get_spatial_dimension() + v0 = vals[(0,) * sd] data = [v0] for r in range(1, order+1): - vr = numpy.zeros((D,)*r + v0.shape, dtype=v0.dtype) + vr = numpy.zeros((sd,) * r + v0.shape, dtype=v0.dtype) for index in numpy.ndindex(vr.shape[:r]): - vr[index] = vals[tuple(map(index.count, range(D)))] + vr[index] = vals[tuple(map(index.count, range(sd)))] data.append(vr.transpose((r, r+1) + tuple(range(r)))) return data From ed6e147246bfc111a4d077c6cf5d5bc75914a074 Mon Sep 17 00:00:00 2001 From: Pablo Brubeck Date: Sat, 27 Apr 2024 21:11:50 +0100 Subject: [PATCH 92/93] HDivSymPolynomialSet --- FIAT/hct.py | 2 +- FIAT/macro.py | 61 ++++++++++++++++++++++++++++++++++++++++++++------- 2 files changed, 54 insertions(+), 9 deletions(-) diff --git a/FIAT/hct.py b/FIAT/hct.py index 877b1ba69..00668653d 100644 --- a/FIAT/hct.py +++ b/FIAT/hct.py @@ -37,7 +37,7 @@ def __init__(self, ref_el, degree, reduced=False): nodes.append(IntegralMomentOfNormalDerivative(ref_el, e, Q, f_at_qpts)) entity_ids[1][e].extend(range(cur, len(nodes))) - return super(HCTDualSet, self).__init__(nodes, ref_el, entity_ids) + super(HCTDualSet, self).__init__(nodes, ref_el, entity_ids) class HsiehCloughTocher(finite_element.CiarletElement): diff --git a/FIAT/macro.py b/FIAT/macro.py index f409a4e35..29f6eabda 100644 --- a/FIAT/macro.py +++ b/FIAT/macro.py @@ -320,7 +320,7 @@ class CkPolynomialSet(polynomial_set.PolynomialSet): :arg ref_el: The simplicial complex. :arg degree: The polynomial degree. - :kwarg order: The differentiation order of continuity across subcells. + :kwarg order: The order of continuity across subcells. :kwarg shape: The value shape. :kwarg variant: The variant for the underlying ExpansionSet. :kwarg scale: The scale for the underlying ExpansionSet. @@ -328,7 +328,6 @@ class CkPolynomialSet(polynomial_set.PolynomialSet): def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): from FIAT.quadrature_schemes import create_quadrature expansion_set = expansions.ExpansionSet(ref_el, **kwargs) - num_members = expansion_set.get_num_members(degree) k = 1 if expansion_set.continuity == "C0" else 0 sd = ref_el.get_spatial_dimension() @@ -345,17 +344,16 @@ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): for facet in ref_el.get_interior_facets(sd-1): jumps = expansion_set.tabulate_normal_jumps(degree, qpts, facet, order=order) for r in range(k, order+1): - dimPk = 1 if sd == 1 else expansions.polynomial_dimension(facet_el, degree - r) - rows.append(numpy.tensordot(weights[:dimPk], jumps[r].T, - axes=(-1, 0)).reshape((-1, num_members))) + num_wt = 1 if sd == 1 else expansions.polynomial_dimension(facet_el, degree-r) + rows.append(numpy.tensordot(weights[:num_wt], jumps[r], axes=(-1, -1)).reshape(-1, jumps[r].shape[0])) - if len(rows) == 0: - coeffs = numpy.eye(num_members) - else: + if len(rows) > 0: dual_mat = numpy.row_stack(rows) _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) num_sv = len([s for s in sig if abs(s) > 1.e-10]) coeffs = vt[num_sv:] + else: + coeffs = numpy.eye(expansion_set.get_num_members(degree)) if shape != tuple(): m, n = coeffs.shape @@ -363,3 +361,50 @@ def __init__(self, ref_el, degree, order=1, shape=(), **kwargs): coeffs = numpy.tile(coeffs, (1,) + shape + (1,)) super(CkPolynomialSet, self).__init__(ref_el, degree, degree, expansion_set, coeffs) + + +class HDivSymPolynomialSet(polynomial_set.PolynomialSet): + """Constructs a symmetric tensor-valued PolynomialSet with continuous + normal components on a simplicial complex. + + :arg ref_el: The simplicial complex. + :arg degree: The polynomial degree. + :kwarg order: The order of continuity across subcells. + :kwarg variant: The variant for the underlying ExpansionSet. + :kwarg scale: The scale for the underlying ExpansionSet. + """ + def __init__(self, ref_el, degree, order=0, **kwargs): + from FIAT.quadrature_schemes import create_quadrature + U = polynomial_set.ONSymTensorPolynomialSet(ref_el, degree, **kwargs) + coeffs = U.get_coeffs() + expansion_set = U.get_expansion_set() + k = 1 if expansion_set.continuity == "C0" else 0 + + sd = ref_el.get_spatial_dimension() + facet_el = ref_el.construct_subelement(sd-1) + + phi_deg = 0 if sd == 1 else degree - k + phi = polynomial_set.ONPolynomialSet(facet_el, phi_deg, shape=(sd,)) + Q = create_quadrature(facet_el, 2 * phi_deg) + qpts, qwts = Q.get_points(), Q.get_weights() + phi_at_qpts = phi.tabulate(qpts)[(0,) * (sd-1)] + weights = numpy.multiply(phi_at_qpts, qwts) + + rows = [] + for facet in ref_el.get_interior_facets(sd-1): + normal = ref_el.compute_normal(facet) + jumps = expansion_set.tabulate_normal_jumps(degree, qpts, facet, order=order) + for r in range(k, order+1): + jump = numpy.dot(coeffs, jumps[r]) + # num_wt = 1 if sd == 1 else expansions.polynomial_dimension(facet_el, degree-r) + wn = weights[:, :, None, :] * normal[None, None, :, None] + ax = tuple(range(1, len(wn.shape))) + rows.append(numpy.tensordot(wn, jump, axes=(ax, ax))) + + if len(rows) > 0: + dual_mat = numpy.row_stack(rows) + _, sig, vt = numpy.linalg.svd(dual_mat, full_matrices=True) + num_sv = len([s for s in sig if abs(s) > 1.e-10]) + coeffs = numpy.tensordot(vt[num_sv:], coeffs, axes=(1, 0)) + + super(HDivSymPolynomialSet, self).__init__(ref_el, degree, degree, expansion_set, coeffs) From 23ad19a2a3306798f6fd22a9adde62a6873a89fd Mon Sep 17 00:00:00 2001 From: Rob Kirby Date: Tue, 30 Apr 2024 22:25:31 -0500 Subject: [PATCH 93/93] doc strings --- FIAT/check_format_variant.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/FIAT/check_format_variant.py b/FIAT/check_format_variant.py index 299aca65f..b42c01ac6 100644 --- a/FIAT/check_format_variant.py +++ b/FIAT/check_format_variant.py @@ -1,4 +1,5 @@ import re + from FIAT.macro import AlfeldSplit, IsoSplit # dicts mapping Lagrange variant names to recursivenodes family names @@ -39,6 +40,12 @@ def check_format_variant(variant, degree): def parse_lagrange_variant(variant, discontinuous=False, integral=False): + """Parses variant options for Lagrange elements. + + variant may be a single option or comma-separated pair + indicating the dof type (integral, equispaced, spectral, etc) + and the type of splitting to give a macro-element (Alfeld, iso) + """ if variant is None: variant = "integral" if integral else "equispaced" options = variant.replace(" ", "").split(",")