Skip to content

Commit fec9eab

Browse files
authored
Merge branch 'main' into stokes_biharmonic2
2 parents f388502 + 879ad35 commit fec9eab

File tree

5 files changed

+221
-48
lines changed

5 files changed

+221
-48
lines changed
Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
import numpy as np
2+
3+
from meshmode.array_context import PyOpenCLArrayContext
4+
from meshmode.discretization import Discretization
5+
from meshmode.discretization.poly_element import \
6+
InterpolatoryQuadratureSimplexGroupFactory
7+
8+
from pytential import bind, sym
9+
from pytential.target import PointsTarget
10+
11+
# {{{ set some constants for use below
12+
13+
nelements = 20
14+
bdry_quad_order = 4
15+
mesh_order = bdry_quad_order
16+
qbx_order = bdry_quad_order
17+
bdry_ovsmp_quad_order = 4*bdry_quad_order
18+
fmm_order = False
19+
k = 3
20+
21+
# }}}
22+
23+
24+
def main(mesh_name="starfish", visualize=False):
25+
import logging
26+
logging.basicConfig(level=logging.INFO)
27+
28+
import pyopencl as cl
29+
cl_ctx = cl.create_some_context()
30+
queue = cl.CommandQueue(cl_ctx)
31+
actx = PyOpenCLArrayContext(queue, force_device_scalars=True)
32+
33+
from meshmode.mesh.generation import ellipse, make_curve_mesh, starfish
34+
from functools import partial
35+
36+
if mesh_name == "ellipse":
37+
mesh = make_curve_mesh(
38+
partial(ellipse, 1),
39+
np.linspace(0, 1, nelements+1),
40+
mesh_order)
41+
elif mesh_name == "starfish":
42+
mesh = make_curve_mesh(
43+
starfish,
44+
np.linspace(0, 1, nelements+1),
45+
mesh_order)
46+
else:
47+
raise ValueError(f"unknown mesh name: {mesh_name}")
48+
49+
pre_density_discr = Discretization(
50+
actx, mesh,
51+
InterpolatoryQuadratureSimplexGroupFactory(bdry_quad_order))
52+
53+
from pytential.qbx import QBXLayerPotentialSource
54+
qbx = QBXLayerPotentialSource(
55+
pre_density_discr, fine_order=bdry_ovsmp_quad_order, qbx_order=qbx_order,
56+
fmm_order=fmm_order
57+
)
58+
59+
from sumpy.visualization import FieldPlotter
60+
fplot = FieldPlotter(np.zeros(2), extent=5, npoints=500)
61+
targets = actx.from_numpy(fplot.points)
62+
63+
from pytential import GeometryCollection
64+
places = GeometryCollection({
65+
"qbx": qbx,
66+
"qbx_high_target_assoc_tol": qbx.copy(target_association_tolerance=0.05),
67+
"targets": PointsTarget(targets)
68+
}, auto_where="qbx")
69+
density_discr = places.get_discretization("qbx")
70+
71+
# {{{ describe bvp
72+
73+
from sumpy.kernel import LaplaceKernel
74+
kernel = LaplaceKernel(2)
75+
76+
sigma_sym = sym.var("sigma")
77+
78+
# -1 for interior Dirichlet
79+
# +1 for exterior Dirichlet
80+
loc_sign = +1
81+
82+
bdry_op_sym = (-loc_sign*0.5*sigma_sym
83+
- sym.D(kernel, sigma_sym, qbx_forced_limit="avg"))
84+
85+
# }}}
86+
87+
bound_op = bind(places, bdry_op_sym)
88+
89+
# {{{ fix rhs and solve
90+
91+
nodes = actx.thaw(density_discr.nodes())
92+
bvp_rhs = actx.np.sin(nodes[0])
93+
94+
from pytential.linalg.gmres import gmres
95+
gmres_result = gmres(
96+
bound_op.scipy_op(actx, sigma_sym.name, dtype=np.float64),
97+
bvp_rhs, tol=1e-8, progress=True,
98+
stall_iterations=0,
99+
hard_failure=True)
100+
101+
# }}}
102+
103+
# {{{ postprocess/visualize
104+
105+
repr_kwargs = dict(
106+
source="qbx_high_target_assoc_tol",
107+
target="targets",
108+
qbx_forced_limit=None)
109+
representation_sym = (
110+
- sym.D(kernel, sigma_sym, **repr_kwargs))
111+
112+
fld_in_vol = actx.to_numpy(
113+
bind(places, representation_sym)(
114+
actx, sigma=gmres_result.solution, k=k))
115+
116+
if visualize:
117+
fplot.write_vtk_file("laplace-dirichlet-potential.vts", [
118+
("potential", fld_in_vol),
119+
])
120+
121+
# }}}
122+
123+
124+
if __name__ == "__main__":
125+
main(visualize=True)

pytential/qbx/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -643,7 +643,9 @@ def exec_compute_potential_insn_fmm(self, actx: PyOpenCLArrayContext,
643643
self.qbx_order,
644644
self.fmm_level_to_order,
645645
source_extra_kwargs=source_extra_kwargs,
646-
kernel_extra_kwargs=kernel_extra_kwargs)
646+
kernel_extra_kwargs=kernel_extra_kwargs,
647+
_use_target_specific_qbx=self._use_target_specific_qbx,
648+
)
647649

648650
from pytential.qbx.geometry import target_state
649651
if actx.to_numpy(actx.np.any(

pytential/qbx/fmm.py

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,13 +112,24 @@ class QBXExpansionWrangler(SumpyExpansionWrangler):
112112
def __init__(self, tree_indep, geo_data, dtype,
113113
qbx_order, fmm_level_to_order,
114114
source_extra_kwargs, kernel_extra_kwargs,
115+
translation_classes_data=None,
115116
_use_target_specific_qbx=None):
116117
if _use_target_specific_qbx:
117118
raise ValueError("TSQBX is not implemented in sumpy")
118119

120+
base_kernel = tree_indep.get_base_kernel()
121+
if translation_classes_data is None and base_kernel.is_translation_invariant:
122+
from pytential.qbx.fmm import translation_classes_builder
123+
traversal = geo_data.traversal()
124+
actx = geo_data._setup_actx
125+
126+
translation_classes_data, _ = translation_classes_builder(actx)(
127+
actx.queue, traversal, traversal.tree, is_translation_per_level=True)
128+
119129
super().__init__(
120-
tree_indep, geo_data.traversal(),
121-
dtype, fmm_level_to_order, source_extra_kwargs, kernel_extra_kwargs)
130+
tree_indep, traversal,
131+
dtype, fmm_level_to_order, source_extra_kwargs, kernel_extra_kwargs,
132+
translation_classes_data=translation_classes_data)
122133

123134
self.qbx_order = qbx_order
124135
self.geo_data = geo_data
@@ -375,6 +386,17 @@ def eval_target_specific_qbx_locals(self, src_weight_vecs):
375386

376387
# }}}
377388

389+
390+
def translation_classes_builder(actx):
391+
from pytools import memoize_in
392+
393+
@memoize_in(actx, (QBXExpansionWrangler, translation_classes_builder))
394+
def make_container():
395+
from boxtree.translation_classes import TranslationClassesBuilder
396+
return TranslationClassesBuilder(actx.context)
397+
398+
return make_container()
399+
378400
# }}}
379401

380402

pytential/symbolic/execution.py

Lines changed: 24 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -124,29 +124,31 @@ def map_node_min(self, expr):
124124
def _map_elementwise_reduction(self, reduction_name, expr):
125125
import loopy as lp
126126
from arraycontext import make_loopy_program
127-
from meshmode.transform_metadata import (
128-
ConcurrentElementInameTag, ConcurrentDOFInameTag)
127+
from meshmode.transform_metadata import ConcurrentElementInameTag
128+
actx = self.array_context
129129

130-
@memoize_in(self.places, "elementwise_node_"+reduction_name)
130+
@memoize_in(actx, (
131+
EvaluationMapperBase._map_elementwise_reduction,
132+
f"elementwise_node_{reduction_name}"))
131133
def node_knl():
132134
t_unit = make_loopy_program(
133135
"""{[iel, idof, jdof]:
134136
0<=iel<nelements and
135137
0<=idof, jdof<ndofs}""",
136138
"""
137-
result[iel, idof] = %s(jdof, operand[iel, jdof])
139+
<> el_result = %s(jdof, operand[iel, jdof])
140+
result[iel, idof] = el_result
138141
""" % reduction_name,
139-
name="nodewise_reduce")
142+
name=f"elementwise_node_{reduction_name}")
140143

141144
return lp.tag_inames(t_unit, {
142145
"iel": ConcurrentElementInameTag(),
143-
"idof": ConcurrentDOFInameTag(),
144146
})
145147

146-
@memoize_in(self.places, "elementwise_"+reduction_name)
148+
@memoize_in(actx, (
149+
EvaluationMapperBase._map_elementwise_reduction,
150+
f"elementwise_element_{reduction_name}"))
147151
def element_knl():
148-
# FIXME: This computes the reduction value redundantly for each
149-
# output DOF.
150152
t_unit = make_loopy_program(
151153
"""{[iel, jdof]:
152154
0<=iel<nelements and
@@ -155,37 +157,27 @@ def element_knl():
155157
"""
156158
result[iel, 0] = %s(jdof, operand[iel, jdof])
157159
""" % reduction_name,
158-
name="elementwise_reduce")
160+
name=f"elementwise_element_{reduction_name}")
159161

160162
return lp.tag_inames(t_unit, {
161163
"iel": ConcurrentElementInameTag(),
162164
})
163165

164-
discr = self.places.get_discretization(
165-
expr.dofdesc.geometry, expr.dofdesc.discr_stage)
166+
dofdesc = expr.dofdesc
166167
operand = self.rec(expr.operand)
167-
assert operand.shape == (len(discr.groups),)
168-
169-
def _reduce(knl, result):
170-
for g_operand, g_result in zip(operand, result):
171-
self.array_context.call_loopy(
172-
knl, operand=g_operand, result=g_result)
173-
174-
return result
175-
176-
dtype = operand.entry_dtype
177-
granularity = expr.dofdesc.granularity
178-
if granularity is sym.GRANULARITY_NODE:
179-
return _reduce(node_knl(),
180-
discr.empty(self.array_context, dtype=dtype))
181-
elif granularity is sym.GRANULARITY_ELEMENT:
182-
result = DOFArray(self.array_context, tuple([
183-
self.array_context.empty((grp.nelements, 1), dtype=dtype)
184-
for grp in discr.groups
168+
169+
if dofdesc.granularity is sym.GRANULARITY_NODE:
170+
return type(operand)(actx, tuple([
171+
actx.call_loopy(node_knl(), operand=operand_i)["result"]
172+
for operand_i in operand
173+
]))
174+
elif dofdesc.granularity is sym.GRANULARITY_ELEMENT:
175+
return type(operand)(actx, tuple([
176+
actx.call_loopy(element_knl(), operand=operand_i)["result"]
177+
for operand_i in operand
185178
]))
186-
return _reduce(element_knl(), result)
187179
else:
188-
raise ValueError(f"unsupported granularity: {granularity}")
180+
raise ValueError(f"unsupported granularity: {dofdesc.granularity}")
189181

190182
def map_elementwise_sum(self, expr):
191183
return self._map_elementwise_reduction("sum", expr)

test/test_symbolic.py

Lines changed: 45 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -306,26 +306,58 @@ def test_node_reduction(actx_factory):
306306

307307
# {{{ test
308308

309-
# create a shuffled [1, nelements + 1] array
310-
ary = []
311-
el_nr_base = 0
312-
for grp in discr.groups:
313-
x = 1 + np.arange(el_nr_base, grp.nelements)
314-
np.random.shuffle(x)
309+
# create a shuffled [1, ndofs + 1] array
310+
rng = np.random.default_rng(seed=42)
315311

316-
ary.append(actx.freeze(actx.from_numpy(x.reshape(-1, 1))))
317-
el_nr_base += grp.nelements
312+
def randrange_like(xi, offset):
313+
x = offset + np.arange(1, xi.size + 1)
314+
rng.shuffle(x)
315+
316+
return actx.from_numpy(x.reshape(xi.shape))
318317

319318
from meshmode.dof_array import DOFArray
320-
ary = DOFArray(actx, tuple(ary))
319+
base_node_nrs = np.cumsum([0] + [grp.ndofs for grp in discr.groups])
320+
ary = DOFArray(actx, tuple([
321+
randrange_like(xi, offset)
322+
for xi, offset in zip(discr.nodes()[0], base_node_nrs)
323+
]))
321324

325+
n = discr.ndofs
322326
for func, expected in [
323-
(sym.NodeSum, nelements * (nelements + 1) // 2),
324-
(sym.NodeMax, nelements),
327+
(sym.NodeSum, n * (n + 1) // 2),
328+
(sym.NodeMax, n),
325329
(sym.NodeMin, 1),
326330
]:
327-
r = bind(discr, func(sym.var("x")))(actx, x=ary)
328-
assert abs(actx.to_numpy(r) - expected) < 1.0e-15, r
331+
r = actx.to_numpy(
332+
bind(discr, func(sym.var("x")))(actx, x=ary)
333+
)
334+
assert r == expected, (r, expected)
335+
336+
arys = tuple([rng.random(size=xi.shape) for xi in ary])
337+
x = DOFArray(actx, tuple([actx.from_numpy(xi) for xi in arys]))
338+
339+
from meshmode.dof_array import flat_norm
340+
for func, np_func in [
341+
(sym.ElementwiseSum, np.sum),
342+
(sym.ElementwiseMax, np.max)
343+
]:
344+
expected = DOFArray(actx, tuple([
345+
actx.from_numpy(np.tile(np_func(xi, axis=1, keepdims=True), xi.shape[1]))
346+
for xi in arys
347+
]))
348+
r = bind(
349+
discr, func(sym.var("x"), dofdesc=sym.GRANULARITY_NODE)
350+
)(actx, x=x)
351+
assert actx.to_numpy(flat_norm(r - expected)) < 1.0e-15
352+
353+
expected = DOFArray(actx, tuple([
354+
actx.from_numpy(np_func(xi, axis=1, keepdims=True))
355+
for xi in arys
356+
]))
357+
r = bind(
358+
discr, func(sym.var("x"), dofdesc=sym.GRANULARITY_ELEMENT)
359+
)(actx, x=x)
360+
assert actx.to_numpy(flat_norm(r - expected)) < 1.0e-15
329361

330362
# }}}
331363

0 commit comments

Comments
 (0)