Contact
CoCalc Logo Icon
StoreFeaturesDocsShareSupport News AboutSign UpSign In
| Download
Project: admcycles
Views: 724
Visibility: Unlisted (only visible to those who know the link)
Image: ubuntu2004
1
from __future__ import absolute_import
2
from __future__ import print_function
3
4
import itertools
5
import sys
6
from collections import Counter, defaultdict
7
from sympy.utilities.iterables import partitions
8
9
# pylint does not know sage
10
from sage.structure.sage_object import SageObject # pylint: disable=import-error
11
from sage.matrix.constructor import matrix # pylint: disable=import-error
12
from sage.misc.flatten import flatten # pylint: disable=import-error
13
from sage.misc.cachefunc import cached_method # pylint: disable=import-error
14
from sage.structure.formal_sum import FormalSum, FormalSums # pylint: disable=import-error
15
from sage.rings.rational_field import QQ # pylint: disable=import-error
16
from sage.arith.functions import lcm # pylint: disable=import-error
17
from sage.functions.other import factorial # pylint: disable=import-error
18
from sage.symbolic.ring import SR # pylint: disable=import-error
19
from sage.combinat.integer_vector_weighted import WeightedIntegerVectors # pylint: disable=import-error
20
from sage.functions.other import binomial # pylint: disable=import-error
21
22
import sage.misc.persist
23
24
from copy import deepcopy
25
26
import admcycles.admcycles
27
28
import admcycles.diffstrata.levelgraph
29
import admcycles.diffstrata.bic
30
import admcycles.diffstrata.sig
31
import admcycles.stratarecursion
32
import admcycles.diffstrata.stratatautring
33
34
from .cache import ADM_EVALS, TOP_XIS
35
36
#######################################################################
37
#######################################################################
38
###### Recursive Calculations and Degeneration Graphs
39
#######################################################################
40
## The idea is to do all calculations recursively.
41
## In particular, the Degeneration Graph is itself a recursive object.
42
##
43
## The key observation is that:
44
## * Each 3-level graph arises by either clutching a top component of
45
## a BIC to a BIC of its bottom component of a BIC of the top
46
## component to the bottom component.
47
## * On the other hand, each 3-level graph is the intersection of
48
## two (different) BICs of the Stratum.
49
## * Therefore, for each BIC B of the Stratum, every BIC Bt in the top
50
## component corresponds to a unique BIC B' of the stratum, so
51
## that the 3-level graph (Bt clutched to the bottom component of B)
52
## is B*B' (i.e. delta_1 of this graph is B', delta_2 is B).
53
## The same is true for the BICs of the bottom component.
54
## * We thus obtain two maps, for each BIC B of the stratum:
55
## * top_to_bic mapping BICs of the top component to BICs of
56
## the stratum
57
## * bot_to_bic mapping BICs of the bottom component to BICs of
58
## the stratum
59
## * These maps have disjoint images.
60
## * These maps fail to be embeddings precisely when the intersection
61
## of two BICs is not irreducible (i.e. clutching different BICs
62
## to a component results in the intersection with the same divisor)
63
## or when there are automorphisms involved (i.e. several ways of
64
## undegenerating to the same BIC).
65
## We can thereby express the clutching of a product of BICs in the top
66
## and bottom components of a BIC in our stratum as a product of BICs of
67
## our stratum. Hence the procedure is recursive.
68
##
69
## Therefore, the GenDegenerationGraph needs to remember only the BICs
70
## together with, for each BIC, top and bottom components and the two maps.
71
##
72
## More precisely, the Degeneration Graph of a GeneralisedStratum
73
## consists of the following information:
74
## * The BICs inside the Stratum.
75
## * For each BIC, its top and bottom component (GeneralisedStratum
76
## together with a dictionary Stratum points -> LevelGraph points)
77
## * For each BIC, the BICs of its top and bottom component, together
78
## with the maps top_to_bic and bot_to_bic.
79
##
80
## We can now calculate the GenDegenerationGraph:
81
## * Step 1: Calculate all BICs in a GeneralisedStratum.
82
## * Step 2: Separate these into top an bottom component.
83
## * Step 3: Calculate all BICs in every top and bottom component.
84
## * Step 4: Calculate top_to_bic and bot_to_bic for each BIC in the
85
## Stratum (as dictionaries: index of BIC in top/bottom ->
86
## index of BIC in stratum)
87
##
88
## In particular, we this implies the following recursive algorithm for
89
## the EmbeddedLevelGraph of an arbitrary product of BICs in the stratum:
90
## INPUT: Product of BICs.
91
## OUTPUT: EmbeddedLevelGraph.
92
## * Step 1: Choose a BIC B from the product (e.g. the first).
93
## * Step 2: Find the preimages of the other BICs in the product under
94
## top_to_bic and bot_to_bic of B.
95
## * This gives (possibly multiple) products of BICs in the top and bottom
96
## stratum of B.
97
## * Step 3: Apply to product in top an bottom to get two EmbeddedLevelGraphs
98
## * Step 4: Return the clutching of the top and bottom graph.
99
##
100
## Moreover, we can generate the "lookup list", consisting of the non-empty
101
## products of BICs in each stratum.
102
## For this, we record all intersections that give 3-level graphs in each
103
## GenDegenerationGraph (i.e. (2,1) means that there exists a 3-level graph
104
## C such that delta(1) of C is bics[2] and delta(2) of C is bics[1]).
105
## Note that this is equivalent to 2 being in the image of top_to_bic(1).
106
##
107
## The key observation here is that any profile (i_1,...,i_n) can be
108
## written as a "domino" of 3-level graphs, i.e. (i_1,i_2)(i_2,_3)...(i_n-1,i_n).
109
##
110
## However, for the recursive generation of the lookup list, it is enough
111
## to take a profile and add the top generations of the first bic and the
112
## bottom degenerations of the last bic to obtain a profile with length
113
## increased by one (see the implementation below for more details.)
114
##
115
#######################################################################
116
#######################################################################
117
118
class GeneralisedStratum(SageObject):
119
"""
120
A union of (meromorphic) strata with residue conditions.
121
122
A GeneralisedStratum is uniquely identified by the following information:
123
124
* sig_list : list of signatures [sig_1,...,sig_n], where sig_i is the Signature
125
of the component i,
126
127
* res_cond : list of residue conditions, i.e. [R_1,...,R_n] where each R_l is
128
a list of tuples (i,j), corresponding to the j-th component of sig_i, that
129
share a residue condition (i.e. the residues at these poles add up to 0).
130
Note that the residue theorem for each component will be added automatically.
131
"""
132
def __init__(self,sig_list,res_cond=None):
133
self._h0 = len(sig_list)
134
self._sig_list = sig_list
135
self._n = sum([sig.n for sig in sig_list]) # total number of points
136
self._g = [sig.g for sig in sig_list]
137
# remember poles as (i,j) where i is the component and j is the index in sig_i
138
self._polelist = [(i,j) for i,sig in enumerate(sig_list) for j in sig.pole_ind]
139
self._p = len(self._polelist)
140
if res_cond is None:
141
res_cond = []
142
self._res_cond = res_cond
143
self.init_more()
144
145
def init_more(self):
146
self._bics = None
147
self._smooth_LG = None
148
self._all_graphs = None
149
self._lookup_list = None
150
self._lookup = {}
151
if not self.is_empty():
152
self.DG = admcycles.diffstrata.stratatautring.GenDegenerationGraph(self)
153
# cache AdditiveGenerators:
154
self._AGs = {}
155
# tautological class of self:
156
one = self.additive_generator((tuple(),0))
157
self.ONE = one.as_taut()
158
# tautological class of zero:
159
self.ZERO = ELGTautClass(self,[])
160
161
def __repr__(self):
162
return "GeneralisedStratum(sig_list=%r,res_cond=%r)" % (self._sig_list,self._res_cond)
163
def __str__(self):
164
rep = ''
165
if self._h0 > 1:
166
rep += 'Product of Strata:\n'
167
else:
168
rep += 'Stratum: '
169
for sig in self._sig_list:
170
rep += repr(sig.sig) + '\n'
171
rep += 'with residue conditions: '
172
if not self._res_cond:
173
rep += repr([]) + '\n'
174
for res in self._res_cond:
175
rep += repr(res) + '\n'
176
return rep
177
178
def info(self):
179
"""
180
Print facts about self.
181
182
This calculates everything, so could take long(!)
183
184
EXAMPLES ::
185
186
sage: from admcycles.diffstrata import *
187
sage: X=GeneralisedStratum([Signature((1,1))])
188
sage: X.info()
189
Stratum: (1, 1)
190
with residue conditions: []
191
Genus: [2]
192
Dimension: 4
193
Boundary Graphs (without horizontal edges):
194
Codimension 0: 1 graph
195
Codimension 1: 4 graphs
196
Codimension 2: 4 graphs
197
Codimension 3: 1 graph
198
Total graphs: 10
199
200
sage: X=GeneralisedStratum([Signature((4,))])
201
sage: X.info()
202
Stratum: (4,)
203
with residue conditions: []
204
Genus: [3]
205
Dimension: 5
206
Boundary Graphs (without horizontal edges):
207
Codimension 0: 1 graph
208
Codimension 1: 8 graphs
209
Codimension 2: 19 graphs
210
Codimension 3: 16 graphs
211
Codimension 4: 4 graphs
212
Total graphs: 48
213
"""
214
print(self)
215
print("Genus: %s" % self._g)
216
print("Dimension: %s" % self.dim())
217
print("Boundary Graphs (without horizontal edges):")
218
tot = 0
219
for c, graphs in enumerate(self.all_graphs):
220
n = len(graphs)
221
print("Codimension %s: %s %s" % (c,n,_graph_word(n)))
222
tot += n
223
print("Total graphs: %s" % tot)
224
225
def additive_generator(self,enh_profile,leg_dict=None):
226
"""
227
The AdditiveGenerator for the psi-polynomial given by leg_dict on enh_profile.
228
229
For example, if psi_2 is the psi-class at leg 2 of enh_profile,
230
the polynomial psi_2^3 would be encoded by the leg_dict {2 : 3}.
231
232
This method should always be used instead of generating AdditiveGenerators
233
directly, as the objects are cached here, i.e. the _same_ object is returned
234
on every call.
235
236
Args:
237
enh_profile (tuple): enhanced profile
238
leg_dict (dict, optional): Dictionary mapping legs of the underlying
239
graph of enh_profile to positive integers, corresponding to
240
the power of the psi class associated to this leg. Defaults to None.
241
242
Returns:
243
AdditiveGenerator: the (cached) AdditiveGenerator
244
245
EXAMPLES ::
246
247
sage: from admcycles.diffstrata import *
248
sage: X=Stratum((2,))
249
sage: a = X.additive_generator(((0,),0))
250
sage: a is X.additive_generator(((0,),0))
251
True
252
sage: a is AdditiveGenerator(X,((0,),0))
253
False
254
"""
255
ag_hash = hash_AG(leg_dict, enh_profile)
256
return self.additive_generator_from_hash(ag_hash)
257
258
def additive_generator_from_hash(self,ag_hash):
259
if not ag_hash in self._AGs:
260
self._AGs[ag_hash] = AdditiveGenerator.from_hash(self,ag_hash)
261
return self._AGs[ag_hash]
262
263
def simple_poles(self):
264
simple_poles = [p for p in self._polelist if self.stratum_point_order(p) == -1]
265
return simple_poles
266
267
@cached_method
268
def is_empty(self):
269
"""
270
Checks if self fails to exist for residue reasons (simple pole with residue forced zero).
271
272
Returns:
273
bool: existence of simple pole with residue zero.
274
275
EXAMPLES ::
276
277
sage: from admcycles.diffstrata import *
278
sage: X=Stratum((1,-1))
279
sage: X.is_empty()
280
True
281
"""
282
for p in self.simple_poles():
283
if self.smooth_LG.residue_zero(p):
284
return True
285
return False
286
287
def is_disconnected(self):
288
return self._h0 > 1
289
290
def stratum_point_order(self,p):
291
"""
292
The pole order at the stratum point p.
293
294
Args:
295
p (tuple): Point (i,j) of self.
296
297
Returns:
298
int: Pole order of p.
299
"""
300
i, j = p
301
return self._sig_list[i].sig[j]
302
303
@property
304
def bics(self):
305
"""
306
Initialise BIC list on first call.
307
308
Note that _bics is a list of tuples of EmbeddedLevelGraphs
309
(each tuple consists of one EmbeddedLevelGraph for every
310
connected component).
311
"""
312
if self.is_empty():
313
return []
314
if self._bics is None:
315
return self.gen_bic()
316
return self._bics
317
318
@property
319
def res_cond(self):
320
return self._res_cond
321
322
@property
323
def lookup_list(self):
324
"""
325
The list of all (ordered) profiles.
326
327
Note that starting with SAGE 9.0 profile numbering is no longer deterministic.
328
329
Returns:
330
list: Nested list of tuples.
331
332
EXAMPLES ::
333
334
sage: from admcycles.diffstrata import *
335
sage: X=Stratum((2,))
336
sage: assert len(X.lookup_list) == 3
337
sage: X.lookup_list[0]
338
[()]
339
sage: X.lookup_list[1]
340
[(0,), (1,)]
341
sage: assert len(X.lookup_list[2]) == 1
342
"""
343
if self.is_empty():
344
return []
345
if self._lookup_list is None:
346
# First, we build the "lookup-list", i.e. the list of all profiles:
347
# the non-empty profiles can be found recursively:
348
# given a profile, we create new profiles by adding top and bottom
349
# degenerations of the corresponding bic to the begining and end.
350
self._lookup_list = [[tuple()]] # only one with 0 levels
351
n = len(self.bics)
352
self._lookup_list += [[(i,) for i in range(n)]] # bics
353
new_profiles = n
354
while new_profiles:
355
# we temporarily work with a set to avoid duplicates
356
self._lookup_list.append(set())
357
for profile in self._lookup_list[-2]:
358
first = profile[0]
359
for i in self.DG.top_to_bic(first).values():
360
self._lookup_list[-1].add((i,)+profile)
361
if len(profile) > 1:
362
last = profile[-1]
363
for i in self.DG.bot_to_bic(last).values():
364
self._lookup_list[-1].add(profile+(i,))
365
self._lookup_list[-1] = list(self._lookup_list[-1])
366
new_profiles = len(self._lookup_list[-1])
367
self._lookup_list.pop()
368
return self._lookup_list
369
370
@property
371
def all_graphs(self):
372
"""
373
Nested list of all EmbeddedLevelGraphs in self.
374
375
This list is built on first call.
376
377
EXAMPLES ::
378
379
sage: from admcycles.diffstrata import *
380
sage: X=GeneralisedStratum([Signature((1,1))])
381
sage: assert comp_list(X.all_graphs[0], [EmbeddedLevelGraph(X, LG=LevelGraph([2],[[1, 2]],[],{1: 1, 2: 1},[0],True),dmp={1: (0, 0), 2: (0, 1)},dlevels={0: 0})])
382
sage: assert comp_list(X.all_graphs[1], \
383
[EmbeddedLevelGraph(X, LG=LevelGraph([1, 0],[[1, 2], [3, 4, 5, 6]],[(1, 5), (2, 6)],{1: 0, 2: 0, 3: 1, 4: 1, 5: -2, 6: -2},[0, -1],True),dmp={3: (0, 0), 4: (0, 1)},dlevels={0: 0, -1: -1}),\
384
EmbeddedLevelGraph(X, LG=LevelGraph([1, 1, 0],[[1], [2], [3, 4, 5, 6]],[(2, 5), (1, 6)],{1: 0, 2: 0, 3: 1, 4: 1, 5: -2, 6: -2},[0, 0, -1],True),dmp={3: (0, 0), 4: (0, 1)},dlevels={0: 0, -1: -1}),\
385
EmbeddedLevelGraph(X, LG=LevelGraph([1, 1],[[1], [2, 3, 4]],[(1, 4)],{1: 0, 2: 1, 3: 1, 4: -2},[0, -1],True),dmp={2: (0, 0), 3: (0, 1)},dlevels={0: 0, -1: -1}),\
386
EmbeddedLevelGraph(X, LG=LevelGraph([2, 0],[[1], [2, 3, 4]],[(1, 4)],{1: 2, 2: 1, 3: 1, 4: -4},[0, -1],True),dmp={2: (0, 0), 3: (0, 1)},dlevels={0: 0, -1: -1})])
387
sage: assert comp_list(X.all_graphs[2],\
388
[EmbeddedLevelGraph(X, LG=LevelGraph([1, 0, 0],[[1], [2, 3, 4], [5, 6, 7, 8]],[(1, 4), (3, 8), (2, 7)],{1: 0, 2: 0, 3: 0, 4: -2, 5: 1, 6: 1, 7: -2, 8: -2},[0, -1, -2],True),dmp={5: (0, 0), 6: (0, 1)},dlevels={0: 0, -2: -2, -1: -1}),\
389
EmbeddedLevelGraph(X, LG=LevelGraph([1, 0, 0],[[1, 2], [3, 4, 5], [6, 7, 8]],[(1, 4), (2, 5), (3, 8)],{1: 0, 2: 0, 3: 2, 4: -2, 5: -2, 6: 1, 7: 1, 8: -4},[0, -1, -2],True),dmp={6: (0, 0), 7: (0, 1)},dlevels={0: 0, -2: -2, -1: -1}),\
390
EmbeddedLevelGraph(X, LG=LevelGraph([1, 1, 0],[[1], [2, 3], [4, 5, 6]],[(1, 3), (2, 6)],{1: 0, 2: 2, 3: -2, 4: 1, 5: 1, 6: -4},[0, -1, -2],True),dmp={4: (0, 0), 5: (0, 1)},dlevels={0: 0, -2: -2, -1: -1}),\
391
EmbeddedLevelGraph(X, LG=LevelGraph([1, 1, 0],[[1], [2], [3, 4, 5, 6]],[(2, 5), (1, 6)],{1: 0, 2: 0, 3: 1, 4: 1, 5: -2, 6: -2},[0, -1, -2],True),dmp={3: (0, 0), 4: (0, 1)},dlevels={0: 0, -2: -2, -1: -1})])
392
sage: assert comp_list(X.all_graphs[2], [EmbeddedLevelGraph(X, LG=LevelGraph([1, 0, 0, 0],[[1], [2, 3, 4], [5, 6, 7], [8, 9, 10]],[(1, 4), (3, 7), (2, 6), (5, 10)],{1: 0, 2: 0, 3: 0, 4: -2, 5: 2, 6: -2, 7: -2, 8: 1, 9: 1, 10: -4},[0, -1, -2, -3],True),dmp={8: (0, 0), 9: (0, 1)},dlevels={0: 0, -2: -2, -1: -1, -3: -3})])
393
"""
394
if self.is_empty():
395
return []
396
if self._all_graphs is None:
397
# We build the graph list from the lookup list:
398
# Note that lookup returns a list of graphs.
399
self._all_graphs = []
400
for l in self.lookup_list:
401
self._all_graphs.append(
402
list(itertools.chain.from_iterable(self.lookup(g)
403
for g in l))
404
)
405
# Ensure that degenerations of top and bottom match up:
406
assert all(k in self.DG.bot_to_bic(j).values()
407
for k in range(self.DG.n)
408
for j in self.DG.top_to_bic(k).values())
409
return self._all_graphs
410
411
@property
412
def smooth_LG(self):
413
"""
414
The smooth EmbeddedLevelGraph inside a LevelStratum.
415
416
Note that the graph might be disconnected!
417
418
EXAMPLES ::
419
420
sage: from admcycles.diffstrata import *
421
sage: X=GeneralisedStratum([Signature((1,1))])
422
sage: assert X.smooth_LG.is_isomorphic(EmbeddedLevelGraph(X,LG=LevelGraph([2],[[1, 2]],[],{1: 1, 2: 1},[0],True),dmp={1: (0, 0), 2: (0, 1)},dlevels={0: 0}))
423
424
Note that we get a single disconnected graph if the stratum is
425
disconnected.
426
427
sage: X=GeneralisedStratum([Signature((0,)), Signature((0,))])
428
sage: X.smooth_LG
429
EmbeddedLevelGraph(LG=LevelGraph([1, 1],[[1], [2]],[],{1: 0, 2: 0},[0, 0],True),dmp={1: (0, 0), 2: (1, 0)},dlevels={0: 0})
430
431
Returns:
432
EmbeddedLevelGraph: The output of unite_embedded_graphs applied to
433
the (embedded) smooth_LGs of each component of self.
434
"""
435
if not self._smooth_LG:
436
graph_list = []
437
for i,sig in enumerate(self._sig_list):
438
g = admcycles.diffstrata.levelgraph.smooth_LG(sig)
439
dmp = {j:(i,j-1) for j in range(1,sig.n+1)}
440
graph_list.append((self,g,dmp,{0:0}))
441
self._smooth_LG = unite_embedded_graphs(tuple(graph_list))
442
return self._smooth_LG
443
444
@cached_method
445
def residue_matrix(self):
446
"""
447
Calculate the matrix associated to the residue space,
448
i.e. a matrix with a line for every residue condition and a column for every pole of self.
449
450
The residue conditions consist ONLY of the ones coming from the GRC (in _res_cond)
451
For inclusion of the residue theorem on each component, use smooth_LG.full_residue_matrix!
452
"""
453
return self.matrix_from_res_conditions(self._res_cond)
454
455
def matrix_from_res_conditions(self,res_conds):
456
"""
457
Calculate the matrix for a list of residue conditions, i.e.
458
a matrix with one line for every residue condition and a column for each pole of self.
459
460
Args:
461
res_conds (list): list of residue conditions, i.e. a nested list R
462
R = [R_1,R_2,...] where each R_i is a list of poles (stratum points)
463
whose residues add up to zero.
464
465
Returns:
466
SAGE matrix: residue matrix (with QQ coefficients)
467
468
EXAMPLES ::
469
470
sage: from admcycles.diffstrata import *
471
sage: X=GeneralisedStratum([Signature((2,-2,-2)),Signature((2,-2,-2))])
472
sage: X.matrix_from_res_conditions([[(0,1),(0,2),(1,2)],[(0,1),(1,1)],[(1,1),(1,2)]])
473
[1 1 0 1]
474
[1 0 1 0]
475
[0 0 1 1]
476
"""
477
res_vec = []
478
for res_c in res_conds:
479
# note: int(True)=1, int(False)=0
480
res_vec += [[int(p in res_c) for p in self._polelist]]
481
return matrix(QQ,res_vec)
482
483
@cached_method
484
def residue_matrix_rank(self):
485
return self.residue_matrix().rank()
486
487
@cached_method
488
def dim(self):
489
"""
490
Return the dimension of the stratum, that is the sum of 2g_i + n_i - 1 - residue conditions -1 for projective.
491
492
The residue conditions are given by the rank of the (full!) residue matrix.
493
494
Empty strata return -1.
495
496
EXAMPLES ::
497
498
sage: from admcycles.diffstrata import *
499
sage: X=GeneralisedStratum([Signature((4,))])
500
sage: all(B.top.dim() + B.bot.dim() == X.dim()-1 for B in X.bics)
501
True
502
"""
503
if self.is_empty():
504
return -1
505
# add residue conditions from RT for every connected component:
506
M = self.smooth_LG.full_residue_matrix
507
return (sum([2*sig.g + sig.n - 1 for sig in self._sig_list])
508
- M.rank() - 1)
509
510
def gen_bic(self):
511
"""
512
Generates all BICs (using bic) as EmbeddedLevelGraphs.
513
514
Returns:
515
list: self._bics i.e. a list of (possibly disconnected)
516
EmbeddedLevelGraphs.
517
(More precisely, each one is a tuple consisting of one
518
EmbeddedLevelGraph for every connected component that has
519
been fed to unite_embedded_graphs).
520
"""
521
self._bics = []
522
if self.is_empty():
523
return
524
## The BICs are the products of BICs for each connected component
525
## (satisfying the residue condition).
526
## Moreover, if there are several connected components, we also need
527
## to include the smooth stratum on each level.
528
emb_bic_list = []
529
530
## First, we establish the dictionaries for the EmbeddedLevelGraphs:
531
## * the marked points of the stratum are numbered (i,j) where (i,j)
532
## is the j-th point on the i-th connected component.
533
## Note that j is the index in sig, i.e. starts at 0.
534
## * on each BIC, the i-th point of the signature is the point i+1
535
## mp_dict maps the points on the BIC to the points of the stratum
536
for i, sig in enumerate(self._sig_list):
537
mp_dict = {j : (i,j-1) for j in range(1,sig.n+1)}
538
# We can't build the EmbeddedLevelGraph until we have the data for all
539
# components (otherwise we mess up the legality check, etc.)
540
# So for now, we just store the generating info for each connected component seperately.
541
emb_bic_list_cur = []
542
for g in admcycles.diffstrata.bic.bic_alt_noiso(sig.sig): # TODO: Adjust for Signature!!
543
level_dict = {g.internal_level_number(0): 0,
544
g.internal_level_number(-1): -1}
545
EG = (self,g,mp_dict,level_dict)
546
emb_bic_list_cur.append(EG)
547
if self._h0 > 1:
548
# we need the smooth component on each level
549
for l in [0,-1]:
550
emb_bic_list_cur.append((self, admcycles.diffstrata.levelgraph.smooth_LG(sig), mp_dict,
551
{0 : l}, # one for each level
552
)
553
)
554
emb_bic_list.append(emb_bic_list_cur)
555
# The elements of _bics are now products of the (embedded) bics of the components
556
# Careful: The only ones that are not allowed are those, where all
557
# components are on one level!!
558
prod_bics = itertools.product(*emb_bic_list)
559
for prod_graph in prod_bics:
560
# levels are stored in values of the dict in component 3 of each tuple:
561
if (any(0 in g[3].values() for g in prod_graph) and
562
any(-1 in g[3].values() for g in prod_graph)
563
):
564
# NOTE: This actually builds the EmbeddedLevelGraphs!
565
pg = unite_embedded_graphs(prod_graph)
566
if pg.is_legal(): # check r-GRC
567
self._bics.append(pg)
568
# isomorphism classes: (possibly figure out how to check earlier?)
569
self._bics = admcycles.diffstrata.bic.isom_rep(self._bics)
570
return self._bics
571
572
#### Ideally, we could always work with enhanced profiles, never with graphs.
573
#### Then edge maps could work like this:
574
## Def: A leg is a tuple consisting of:
575
## * an enhanced profile (of a levelgraph)
576
## * the levelstratum inside the profile (e.g. for the profile (1,2,3) this would
577
## be either 1^top, 3^bot, (12) or (23)). These were computed for the stratum anyway.
578
## * an orbit of a marked point of this gen levelstratum, which corresponds to an edge
579
## of the corresponding graph
580
## i.e. an ordered tuple of marked points equivalent by automorphisms of the corresponding
581
## BIC or 3-level graph (which should be also an automorphism of the full graph??!!)
582
##
583
## Then:
584
## INPUT: (leg, enhanced profile)
585
## The enhanced profile should be that of a degeneration of the graph of leg (!)
586
##
587
## OUTPUT: leg (on the second profile)
588
##
589
## Case 1:
590
## The levelstratum of the leg is unchanged by the degeneration.
591
## i.e.: (1,2) and (1,2,3) for an edge on (1,2).
592
## In this case the output is trivially the same edge embedded into (1,2,3)
593
## (because (1,2) is still a level of (1,2,3)).
594
##
595
## Case 2:
596
## The levelstratum is degenerated,
597
## i.e.: (1,2) and (1,3,2) for an leg e on (1,2).
598
## In this case we know that e (by checking the sign of the order) is either
599
## a leg on 1^bot or 2^top and the degeneration is given by top_to_bic_inv (or
600
## bot_to_bic_inv) of 3, where we can then track the marked point associated to e.
601
####
602
603
# TODO: This should work "smarter", see above.
604
@cached_method
605
def explicit_leg_maps(self,enh_profile,enh_deg_profile,only_one=False):
606
"""
607
Provide explicit leg maps (as list of dictionaries: legs of LG to legs of LG), from
608
the graph associated to enh_profile to the one associated to enh_deg_profile.
609
610
If enh_deg_profile is not a degeneration (on the level of profiles), None is
611
returned.
612
613
Args:
614
enh_profile (enhanced profile): tuple (profile, index).
615
enh_deg_profile (enhanced profile): tuple (profile, index).
616
only_one (bool, optional): Give only one (the 'first') map (or None if none exist).
617
Defaults to False.
618
619
Raises:
620
RuntimeError: If enh_profile is empty.
621
UserWarning: If there are no degenerations in the appropriate profile.
622
623
Returns:
624
list of dicts: List of the leg isomorphisms, None if not a degeneration,
625
only one dict if only_one=True.
626
627
EXAMPLES ::
628
629
"""
630
profile = enh_profile[0]
631
deg_profile = enh_deg_profile[0]
632
# check if deg_profile is actually a (profile) degeneration:
633
if not set(profile).issubset(set(deg_profile)):
634
return None
635
g = self.lookup_graph(*enh_profile)
636
degen = self.lookup_graph(*enh_deg_profile)
637
if not degen:
638
raise RuntimeError ("%r is not a graph in %r!" % (enh_deg_profile,self))
639
# To obtain g, we have to squish degen at the places in the profile
640
# of degen that are not in the profile of g.
641
# We work from right to left to avoid confusion with the level numbering.
642
degen_squish = degen
643
for level, bic_index in list(enumerate(deg_profile))[::-1]:
644
if bic_index in profile:
645
continue
646
degen_squish = degen_squish.squish_vertical(level)
647
isoms = (l_iso for v_iso, l_iso in g.isomorphisms(degen_squish))
648
try:
649
first_isom = next(isoms)
650
except StopIteration:
651
# No isomorphisms found
652
raise UserWarning("Squish of %r not isomorphic to %r!" % (enh_deg_profile, enh_profile))
653
if only_one:
654
return first_isom
655
else:
656
return [first_isom] + list(isoms)
657
658
#####
659
### Common degenerations:
660
## This should eat two graphs, given by their "enhanced profile" i.e. things we can
661
## feed to graph_lookup (a list of BICs and an index) and also return a list of
662
## enhanced profiles.
663
664
### Naive approach:
665
## do a (set-wise) degeneration of the profile and just go through the list
666
## checking which ones are actually degenerations:
667
## INPUT: Profile + index
668
## OUTPUT: List of profiles + indices
669
670
## TODO: There should be a smart way! For that one has to understand
671
## how to correctly encode the irreducible components of the profiles.
672
673
@cached_method
674
def common_degenerations(self,s_enh_profile,o_enh_profile):
675
"""
676
Find common degenerations of two graphs.
677
678
Args:
679
s_enh_profile (tuple): Enhanced profile, i.e. a tuple (p,k) consisting of
680
* a sorted (!) profile p
681
* an index in self.lookup(p)
682
thus giving the information of an EmbeddedLevelGraph in self.
683
o_enh_profile (tuple): Enhanced profile.
684
685
Returns:
686
list: list of enhanced profiles, i.e. entries of type [tuple profile, index]
687
(that undegenerate to the two given graphs).
688
689
EXAMPLES ::
690
691
sage: from admcycles.diffstrata import *
692
sage: X=GeneralisedStratum([Signature((4,))])
693
694
To retrieve the actual EmbeddedLevelGraphs, we must use lookup_graph.
695
(Because of BIC renumbering between different SAGE versions we can't provide any concrete examples :/)
696
697
Note that the number of components can also go down.
698
699
Providing common graphs works:
700
701
sage: X.common_degenerations(((2,),0),((2,),0))
702
[((2,), 0)]
703
704
Empty intersection gives empty list.
705
706
"""
707
s_profile = s_enh_profile[0]
708
o_profile = o_enh_profile[0]
709
try:
710
# merge_profiles returns None if there aren't any...
711
deg_profile = tuple(self.merge_profiles(s_profile,o_profile))
712
except TypeError:
713
return []
714
return_list = []
715
# careful with reducible profiles:
716
for i in range(len(self.lookup(deg_profile))):
717
if self.is_degeneration((deg_profile,i),s_enh_profile) and self.is_degeneration((deg_profile,i),o_enh_profile):
718
return_list.append((deg_profile,i))
719
return return_list
720
721
# Excess intersection of two additive generators in an ambient graph
722
def intersection(self,s_taut_class,o_taut_class,amb_enh_profile=None):
723
"""
724
Excess intersection of two tautological classes in Chow of ambient_enh_profile.
725
726
Args:
727
s_taut_class (ELGTautClass): tautological class
728
o_taut_class (ELGTautClass): tautological class
729
amb_enh_profile (tuple, optional): enhanced profile of ambient graph.
730
Defaults to None.
731
732
Raises:
733
RuntimeError: raised if any summand of any tautological class is not on
734
a degeneration of ambient_enh_profile.
735
736
Returns:
737
ELGTautClass: Tautological class on common degenerations
738
"""
739
# check input:
740
if amb_enh_profile is None:
741
amb_enh_profile = ((),0)
742
if s_taut_class == 0 or s_taut_class == self.ZERO:
743
return self.ZERO
744
if s_taut_class == 1 or s_taut_class == self.ONE:
745
return o_taut_class
746
if o_taut_class == 0 or o_taut_class == self.ZERO:
747
return self.ZERO
748
if o_taut_class == 1 or o_taut_class == self.ONE:
749
return s_taut_class
750
return_list = []
751
# unpack tautological classes:
752
for s_coeff, s_add_gen in s_taut_class.psi_list:
753
for o_coeff, o_add_gen in o_taut_class.psi_list:
754
prod = self.intersection_AG(s_add_gen, o_add_gen, amb_enh_profile)
755
if prod == 0 or prod == self.ZERO:
756
continue
757
return_list.append(s_coeff*o_coeff * prod)
758
return_value = self.ELGsum(return_list)
759
if return_value == 0:
760
return self.ZERO
761
if s_taut_class.is_equidimensional() and o_taut_class.is_equidimensional():
762
assert return_value.is_equidimensional(),\
763
"Product of equidimensional classes is not equidimensional! %s * %s = %s"\
764
% (s_taut_class, o_taut_class, return_value)
765
return return_value
766
767
@cached_method
768
def intersection_AG(self, s_add_gen, o_add_gen, amb_enh_profile=None):
769
"""
770
Excess intersection formula for two AdditiveGenerators in Chow of amb_enh_profile.
771
772
Note that as AdditiveGenerators and enhanced profiles are hashable,
773
this method can (and will) be cached (in contrast with intersection).
774
775
Args:
776
s_add_gen (AdditiveGenerator): first AG
777
o_add_gen (AdditiveGenerator): second AG
778
amb_enh_profile (tuple, optional): enhanced profile of ambient graph.
779
Defaults to None.
780
781
Raises:
782
RuntimeError: raised if any of the AdditiveGenerators is not on
783
a degeneration of ambient_enh_profile.
784
785
Returns:
786
ELGTautClass: Tautological class on common degenerations
787
"""
788
if amb_enh_profile is None:
789
amb_enh_profile = ((),0)
790
s_enh_profile = s_add_gen.enh_profile
791
o_enh_profile = o_add_gen.enh_profile
792
if not self.is_degeneration(s_enh_profile,amb_enh_profile):
793
raise RuntimeError("%r is not a degeneration of %r" % (s_enh_profile,amb_enh_profile))
794
if not self.is_degeneration(o_enh_profile,amb_enh_profile):
795
raise RuntimeError("%r is not a degeneration of %r" % (o_enh_profile,amb_enh_profile))
796
# Degree check:
797
# * the degree of the product is the sum of the degreees
798
# * the product is supported on a set of codim >= max(codim(s),codim(o))
799
# => if the sum of the degrees is > (dim(self) - max(codim(s),codim(o)))
800
# the product will be 0 in any case
801
# NOTE: degree = codim + psi-degree
802
deg_sum = s_add_gen.psi_degree + o_add_gen.psi_degree
803
if deg_sum > self.dim() - max(len(s_enh_profile[0]),len(o_enh_profile[0])):
804
return self.ZERO
805
degenerations = self.common_degenerations(s_enh_profile,o_enh_profile)
806
if not degenerations:
807
return self.ZERO
808
NB = self.cnb(s_enh_profile,o_enh_profile,amb_enh_profile)
809
if NB == 1:
810
# Intersection is transversal, in this case we are done:
811
# the pullback of an additive generator is a taut class
812
# where all classes live on the same graph:
813
prod = [(_cs * _co, s_pb * o_pb)
814
for L in degenerations
815
for _cs, s_pb in s_add_gen.pull_back(L).psi_list
816
for _co, o_pb in o_add_gen.pull_back(L).psi_list]
817
return ELGTautClass(self,[(c, AG) for c, AG in prod])
818
elif NB == 0 or NB == self.ZERO:
819
# product with 0 is 0 ...
820
return NB
821
else:
822
# intersect the pullback to L with the normal bundle pulled back to L (in L):
823
summands = [self.intersection(
824
self.intersection(
825
s_add_gen.pull_back(L),
826
o_add_gen.pull_back(L),
827
L),
828
self.gen_pullback_taut(
829
NB,
830
L,
831
self.minimal_common_undegeneration(s_enh_profile,o_enh_profile)
832
),
833
L)
834
for L in degenerations]
835
return self.ELGsum([tcls for tcls in summands])
836
837
def normal_bundle(self, enh_profile, ambient=None):
838
"""
839
Normal bundle of enh_profile in ambient.
840
841
Note that this is equivalent to cnb(enh_profile, enh_profile, ambient).
842
843
Args:
844
enh_profile (tuple): enhanced profile
845
ambient (tuple, optional): enhanced profile. Defaults to None.
846
847
Raises:
848
ValueError: Raised if enh_profile is not a codim 1 degeneration of ambient
849
850
Returns:
851
ELGTautClass: Normal bundle N_{enh_profile, ambient}
852
"""
853
if ambient is None:
854
ambient = ((),0)
855
else:
856
ambient = (tuple(ambient[0]),ambient[1])
857
if len(enh_profile[0]) != len(ambient[0]) + 1 or not self.is_degeneration(enh_profile,ambient):
858
raise ValueError("%r is not a codim 1 degeneration of %r" % (enh_profile,ambient))
859
return self.cnb(enh_profile, enh_profile, ambient)
860
861
# This is an element of CH^s(ambient) where s is the cardinality of the intersection of profiles
862
# or equivalently in CH^(c+s)(B) where c is the codimension of ambient.
863
@cached_method
864
def cnb(self,s_enh_profile,o_enh_profile,amb_enh_profile=None):
865
"""
866
Common Normal bundle of two graphs in an ambient graph.
867
868
Note that for a trivial normal bundle (transversal intersection)
869
we return 1 (int) and NOT self.ONE !!
870
871
The reason is that the ``correct'' ONE would be the ambient graph and that
872
is a pain to keep track of in intersection....
873
874
Args:
875
s_enh_profile (tuple): enhanced profile
876
o_enh_profile (tuple): enhanced profile
877
amb_enh_profile (tuple, optional): enhanced profile. Defaults to None.
878
879
Raises:
880
RuntimeError: Raised if s_enh_profile or o_enh_profile do not degenerate
881
from amb_enh_profile.
882
883
Returns:
884
ELGTautClass: Product of normal bundles appearing.
885
1 if the intersection is transversal.
886
"""
887
# check/normalise input:
888
if amb_enh_profile is None:
889
amb_enh_profile = ((),0)
890
else:
891
amb_enh_profile = (tuple(amb_enh_profile[0]),amb_enh_profile[1])
892
if not self.is_degeneration(s_enh_profile,amb_enh_profile):
893
raise RuntimeError("%r is not a degeneration of %r" % (s_enh_profile,amb_enh_profile))
894
if not self.is_degeneration(o_enh_profile,amb_enh_profile):
895
raise RuntimeError("%r is not a degeneration of %r" % (o_enh_profile,amb_enh_profile))
896
min_com = self.minimal_common_undegeneration(s_enh_profile,o_enh_profile)
897
if min_com == amb_enh_profile:
898
return 1 # terminating condition, transversal
899
else:
900
assert self.codim_one_common_undegenerations(s_enh_profile,o_enh_profile,amb_enh_profile),\
901
"minimal common undegeneration is %r, ambient profile is %r, but there aren't codim one common undegenerations!"\
902
% (min_com,amb_enh_profile)
903
return_list = []
904
for ep in self.codim_one_common_undegenerations(s_enh_profile,o_enh_profile,amb_enh_profile):
905
G = self.lookup_graph(*ep)
906
p, i = ep
907
AGG = self.additive_generator(ep,None)
908
# This is the "difference" between ep and amb_enh_profile:
909
# i.e. the inserted level, i in paper notation
910
squished_level = get_squished_level(ep,amb_enh_profile)
911
ll = self.bics[p[squished_level]].ell
912
xi_top = self.xi_at_level(squished_level,ep,quiet=True)
913
xi_bot = self.xi_at_level(squished_level+1,ep,quiet=True)
914
xis = -xi_top + xi_bot
915
summand = 1/QQ(ll) * self.gen_pullback_taut(xis,min_com,ep)
916
# calL pulled back to min_com:
917
summand -= 1/QQ(ll) * self.gen_pullback_taut(self.calL(ep, squished_level),min_com,ep)
918
if summand == 0:
919
# product is zero!
920
return self.ZERO
921
assert summand.is_equidimensional(),\
922
"Not all summands in %s of same degree!" % summand
923
return_list.append(summand)
924
# product over normal bundles:
925
if not return_list:
926
return 1 # empty product => transversal
927
NBprod = return_list[0]
928
for nb in return_list[1:]:
929
NBprod = self.intersection(NBprod,nb,min_com)
930
assert NBprod.is_equidimensional(), "Not all summands in %s of same degree!" % NBprod
931
return NBprod
932
933
@cached_method
934
def gen_pullback(self,add_gen,o_enh_profile,amb_enh_profile=None):
935
"""
936
Generalised pullback of additive generator to o_enh_profile in amb_enh_profile.
937
938
Args:
939
add_gen (AdditiveGenerator): additive generator on a degeneration of amb_enh_profile.
940
o_enh_profile (tuple): enhanced profile (degeneration of amb_enh_profile)
941
amb_enh_profile (tuple, optional): enhanced profile. Defaults to None.
942
943
Raises:
944
RuntimeError: If one of the above is not actually a degeneration of amb_enh_profile.
945
946
Returns:
947
ELGTautClass: Tautological class on common degenerations of AdditiveGenerator profile and o_enh_profile.
948
"""
949
# check input:
950
if amb_enh_profile is None:
951
amb_enh_profile = ((),0)
952
if not self.is_degeneration(o_enh_profile,amb_enh_profile):
953
raise RuntimeError("%r is not a degeneration of %r" % (o_enh_profile,amb_enh_profile))
954
s_enh_profile = add_gen.enh_profile
955
if not self.is_degeneration(s_enh_profile,amb_enh_profile):
956
raise RuntimeError("%r is not a degeneration of %r" % (s_enh_profile,amb_enh_profile))
957
degenerations = self.common_degenerations(s_enh_profile,o_enh_profile)
958
# if there are no common degenerations, pullback is 0
959
if not degenerations:
960
return self.ZERO
961
NB = self.cnb(s_enh_profile,o_enh_profile,amb_enh_profile)
962
# stop condition
963
if NB == 0 or NB == self.ZERO:
964
return 0
965
return_list = []
966
for L in degenerations:
967
if NB == 1:
968
# transversal
969
return_list.append(add_gen.pull_back(L))
970
else:
971
return_list.append(
972
self.intersection(
973
self.gen_pullback_taut(NB, L, self.minimal_common_undegeneration(s_enh_profile,o_enh_profile)),
974
add_gen.pull_back(L),
975
L
976
)
977
)
978
return_value = self.ELGsum(return_list)
979
if return_value != 0:
980
return return_value
981
else:
982
return self.ZERO
983
984
def gen_pullback_taut(self, taut_class, o_enh_profile,amb_enh_profile=None):
985
"""
986
Generalised pullback of tautological class to o_enh_profile in amb_enh_profile.
987
988
This simply returns the ELGSum of gen_pullback of all AdditiveGenerators.
989
990
Args:
991
taut_class (ELGTautClass): tautological class each summand on a degeneration of amb_enh_profile.
992
o_enh_profile (tuple): enhanced profile (degeneration of amb_enh_profile)
993
amb_enh_profile (tuple, optional): enhanced profile. Defaults to None.
994
995
Raises:
996
RuntimeError: If one of the above is not actually a degeneration of amb_enh_profile.
997
998
Returns:
999
ELGTautClass: Tautological class on common degenerations of AdditiveGenerator profile and o_enh_profile.
1000
"""
1001
return_list = []
1002
for c, AG in taut_class.psi_list:
1003
return_list.append(c * self.gen_pullback(AG, o_enh_profile, amb_enh_profile))
1004
return self.ELGsum(return_list)
1005
1006
## TODO: There should be a better way for this, using just BICs and where
1007
## marked points go ... (see discussion above)
1008
@cached_method
1009
def explicit_edge_becomes_long(self,enh_profile,edge):
1010
"""
1011
A list of enhanced profiles where the (explicit) edge 'edge' became long.
1012
1013
We go through the codim one degenerations of enh_profile and check
1014
each graph, if edge became long (under any degeneration).
1015
1016
However, we count each graph only once, even if there are several ways
1017
to undegenerate (see examples).
1018
1019
Args:
1020
enh_profile (tuple): enhanced profile: (profile, index).
1021
edge (tuple): edge of the LevelGraph associated to enh_profile:
1022
(start leg, end leg).
1023
1024
Raises:
1025
RuntimeError: Raised if the leg is not a leg of the graph of enh_profile.
1026
1027
Returns:
1028
list: list of enhanced profiles.
1029
1030
EXAMPLES ::
1031
1032
sage: from admcycles.diffstrata import *
1033
sage: X=Stratum((1,1))
1034
sage: V=[ep for ep in X.enhanced_profiles_of_length(1) if X.lookup_graph(*ep).level(0)._h0 == 2]
1035
sage: epV=V[0]
1036
sage: VLG=X.lookup_graph(*epV).LG
1037
sage: assert len(X.explicit_edge_becomes_long(epV, VLG.edges[1])) == 1
1038
sage: assert X.explicit_edge_becomes_long(epV, VLG.edges[1]) == X.explicit_edge_becomes_long(epV, VLG.edges[1])
1039
1040
"""
1041
ep_list = []
1042
for ep in self.codim_one_degenerations(enh_profile):
1043
g = self.lookup_graph(*ep)
1044
if g.LG.has_long_edge:
1045
for leg_map in self.explicit_leg_maps(enh_profile,ep):
1046
try:
1047
if g.LG.is_long((leg_map[edge[0]],leg_map[edge[1]])):
1048
ep_list.append(ep)
1049
break # Not sure, if we want to record several occurences...
1050
except KeyError:
1051
raise RuntimeError("%r does not seem to be an edge of %r"
1052
% (edge, enh_profile))
1053
return ep_list
1054
1055
@cached_method
1056
def explicit_edges_between_levels(self,enh_profile,start_level,stop_level):
1057
"""
1058
Edges going from (relative) level start_level to (relative) level stop_level.
1059
1060
Note that we assume here that edges respect the level structure, i.e.
1061
start on start_level and end on end_level!
1062
1063
Args:
1064
enh_profile (tuple): enhanced profile
1065
start_level (int): relative level number (0...codim)
1066
stop_level (int): relative level number (0...codim)
1067
1068
Returns:
1069
list: list of edges, i.e. tuples (start_point,end_point)
1070
1071
EXAMPLES ::
1072
1073
sage: from admcycles.diffstrata import *
1074
sage: X=Stratum((2,))
1075
1076
Compact type:
1077
1078
sage: assert len([ep for ep in X.enhanced_profiles_of_length(1) if len(X.explicit_edges_between_levels(ep,0,1)) == 1]) == 1
1079
1080
Banana:
1081
1082
sage: assert len([ep for ep in X.enhanced_profiles_of_length(1) if len(X.explicit_edges_between_levels(ep,0,1)) == 2]) == 1
1083
1084
"""
1085
G = self.lookup_graph(*enh_profile)
1086
# TODO: There should be a way smarter way for doing this...
1087
edges = [e for e in G.LG.edges
1088
if (G.LG.level_number(G.LG.levelofleg(e[0])) == start_level and
1089
G.LG.level_number(G.LG.levelofleg(e[1])) == stop_level)]
1090
return edges
1091
1092
### Finding codimension one degenerations:
1093
# This is not very fancy yet.
1094
# At the moment, we take a profile and check at which places we can compatibly
1095
# insert a BIC (similarly to creating the lookup_list).
1096
# We then check "by hand", if this is ok with the enhanced structure, i.e.
1097
# on connected components.
1098
# Note that this check is bypassed if the input profile is irreducible.
1099
@cached_method
1100
def codim_one_degenerations(self,enh_profile):
1101
"""
1102
Degenerations of enh_profile with one more level.
1103
1104
Args:
1105
enh_profile (enhanced profile): tuple (profile, index)
1106
1107
Raises:
1108
RuntimeError: Error if we find a degeneration that doesn't squish
1109
back to the graph we started with.
1110
1111
Returns:
1112
list: list of enhanced profiles.
1113
1114
EXAMPLES ::
1115
1116
sage: from admcycles.diffstrata import *
1117
sage: X=GeneralisedStratum([Signature((4,))])
1118
sage: assert all(len(p) == 2 for p, _ in X.codim_one_degenerations(((2,),0)))
1119
1120
Empty profile gives all bics:
1121
1122
sage: assert X.codim_one_degenerations(((),0)) == [((0,), 0), ((1,), 0), ((2,), 0), ((3,), 0), ((4,), 0), ((5,), 0), ((6,), 0), ((7,), 0)]
1123
"""
1124
profile = list(enh_profile[0])
1125
# empty profile gives all bics:
1126
if not profile:
1127
return [((b,),0) for b in range(len(self.bics))]
1128
deg_list = []
1129
# build all length 1 profile extensions:
1130
# The first and last entry don't have any compatibility conditions:
1131
# add all top degenerations of the first guy
1132
for bic in self.DG.top_to_bic(profile[0]).values():
1133
deg_list.append(tuple([bic] + profile[:]))
1134
# and all bottom degenerations of the last guy
1135
for bic in self.DG.bot_to_bic(profile[-1]).values():
1136
deg_list.append(tuple(profile[:] + [bic]))
1137
# For the "middle" entries of the profile, we have to check compatibility
1138
for i in range(len(profile)-1):
1139
for bic in self.DG.bot_to_bic(profile[i]).values(): # candidates
1140
if bic in self.DG.top_to_bic(profile[i+1]).values():
1141
deg_list.append(tuple(profile[:i+1] + [bic] + profile[i+1:]))
1142
deg_list = list(set(deg_list)) # remove duplicates
1143
# We now build the list of enhanced profiles:
1144
enh_list = []
1145
for p in deg_list:
1146
for i in range(len(self.lookup(p))):
1147
if self.is_degeneration((p,i),enh_profile):
1148
enh_list.append((p,i))
1149
return enh_list
1150
1151
@cached_method
1152
def codim_one_common_undegenerations(self,s_enh_profile,o_enh_profile,amb_enh_profile=None):
1153
"""
1154
Profiles that are 1-level degenerations of amb_enh_profile and include
1155
s_enh_profile and o_enh_profile.
1156
1157
Args:
1158
s_enh_profile (tuple): enhanced profile
1159
o_enh_profile (tuple): enhanced profile
1160
amb_enh_profile (tuple): enhanced profile
1161
1162
Returns:
1163
list: list of enhanced profiles
1164
1165
EXAMPLES ::
1166
1167
"""
1168
if amb_enh_profile is None:
1169
amb_enh_profile = ((),0)
1170
profile_list = []
1171
for ep in self.codim_one_degenerations(amb_enh_profile):
1172
if self.is_degeneration(s_enh_profile,ep) and self.is_degeneration(o_enh_profile,ep):
1173
profile_list.append(ep)
1174
return profile_list
1175
1176
@cached_method
1177
def minimal_common_undegeneration(self,s_enh_profile,o_enh_profile):
1178
"""
1179
The minimal dimension graph that is undegeneration of both s_enh_profile
1180
and o_enh_profile.
1181
1182
Args:
1183
s_enh_profile (tuple): enhanced profile
1184
o_enh_profile (tuple): enhanced profile
1185
1186
Raises:
1187
RuntimeError: If there are no common undgenerations in the intersection profile.
1188
1189
Returns:
1190
tuple: enhanced profile
1191
1192
EXAMPLES ::
1193
1194
"""
1195
s_profile = s_enh_profile[0]
1196
o_profile = o_enh_profile[0]
1197
# build "sorted" intersection
1198
intersection = []
1199
for b in s_profile:
1200
if b in o_profile:
1201
intersection.append(b)
1202
# make hashable
1203
intersection = tuple(intersection)
1204
# if the intersection profile is irreducible, we are done:
1205
if len(self.lookup(intersection)) == 1:
1206
return (intersection, 0)
1207
else:
1208
for i in range(len(self.lookup(intersection))):
1209
if (self.is_degeneration(s_enh_profile,(intersection,i)) and
1210
self.is_degeneration(o_enh_profile,(intersection,i))):
1211
return (intersection, i)
1212
else:
1213
raise RuntimeError("No common undegeneration in profile %r" % intersection)
1214
1215
@cached_method
1216
def is_degeneration(self,s_enh_profile,o_enh_profile):
1217
"""
1218
Check if s_enh_profile is a degeneration of o_enh_profile.
1219
1220
Args:
1221
s_enh_profile (tuple): enhanced profile
1222
o_enh_profile (tuple): enhanced profile
1223
1224
Returns:
1225
bool: True if the graph associated to s_enh_profile is a degeneration
1226
of the graph associated to o_enh_profile, False otherwise.
1227
1228
EXAMPLES ::
1229
1230
sage: from admcycles.diffstrata import *
1231
sage: X=GeneralisedStratum([Signature((4,))])
1232
sage: assert X.is_degeneration(((7,),0),((7,),0))
1233
1234
The empty tuple corresponds to the stratum:
1235
1236
sage: assert X.is_degeneration(((2,),0),((),0))
1237
"""
1238
s_profile = s_enh_profile[0]
1239
o_profile = o_enh_profile[0]
1240
# first check: subset:
1241
if not set(o_profile) <= set(s_profile):
1242
return False
1243
# in the irreducible case, we are done:
1244
if len(self.lookup(s_profile)) == len(self.lookup(o_profile)) == 1:
1245
assert self.explicit_leg_maps(o_enh_profile,s_enh_profile),\
1246
"%r and %r contain only one graph, but these are not degenerations!"\
1247
% (o_enh_profile,s_enh_profile)
1248
return True
1249
else:
1250
# otherwise: check if an undegeneration map exists:
1251
try:
1252
if self.explicit_leg_maps(o_enh_profile,s_enh_profile,only_one=True) is None:
1253
return False
1254
else:
1255
return True
1256
except UserWarning:
1257
# This is raised if there is no undegeneration inside the expected profile...
1258
return False
1259
1260
@cached_method
1261
def squish(self, enh_profile, l):
1262
"""
1263
Squish level l of the graph associated to enh_profile. Returns the enhanced profile
1264
associated to the squished graph.
1265
1266
Args:
1267
enh_profile (tuple): enhanced profile
1268
l (int): level of graph associated to enhanced profile
1269
1270
Raises:
1271
RuntimeError: Raised if a BIC is squished at a level other than 0.
1272
RuntimeError: Raised if the squished graph is not found in the squished profile.
1273
1274
Returns:
1275
tuple: enhanced profile.
1276
1277
EXAMPLES ::
1278
1279
sage: from admcycles.diffstrata import *
1280
sage: X=Stratum((2,))
1281
sage: assert all(X.squish(ep,0) == ((),0) for ep in X.enhanced_profiles_of_length(1))
1282
sage: assert all(X.squish((p,i),1-l) == ((p[l],),0) for p, i in X.enhanced_profiles_of_length(2) for l in range(2))
1283
"""
1284
p, i = enh_profile
1285
if len(p) == 1:
1286
if l != 0:
1287
raise RuntimeError("BIC can only be squished at level 0!" % (enh_profile, l))
1288
return ((), 0)
1289
new_p = list(p)
1290
new_p.pop(l)
1291
new_p = tuple(new_p)
1292
enhancements = []
1293
for j in range(len(self.lookup(new_p))):
1294
if self.is_degeneration(enh_profile, (new_p,j)):
1295
enhancements.append(j)
1296
if len(enhancements) != 1:
1297
raise RuntimeError("Cannot squish %r at level %r! No unique graph found in %r!" % (enh_profile, l, new_p))
1298
return (new_p, enhancements[0])
1299
1300
### Partial order
1301
## The lookup graph gives a partial order on the BICs (the 3-level graph (i,j)
1302
## implies i > j).
1303
@cached_method
1304
def lies_over (self,i,j):
1305
"""
1306
Determine if (i,j) is a 3-level graph.
1307
1308
Args:
1309
i (int): Index of BIC.
1310
j (int): Index of BIC.
1311
1312
Returns:
1313
bool: True if (i,j) is a 3-level graph, False otherwise.
1314
1315
EXAMPLES ::
1316
1317
"""
1318
if j in self.DG.bot_to_bic(i).values():
1319
assert i in self.DG.top_to_bic(j).values(),\
1320
"%r is a bottom degeneration of %r, but %r is not a top degeneration of %r!"\
1321
% (j,i,i,j)
1322
return True
1323
else:
1324
assert i not in self.DG.top_to_bic(j).values(),\
1325
"%r is not a bottom degeneration of %r, but %r is a top degeneration of %r!"\
1326
% (j,i,i,j)
1327
return False
1328
1329
### Merging profiles (with respect to lies_over)
1330
@cached_method
1331
def merge_profiles(self,p,q):
1332
"""
1333
Merge profiles with respect to the ordering "lies_over".
1334
1335
Args:
1336
p (iterable): sorted profile
1337
q (iterable): sorted profile
1338
1339
Returns:
1340
tuple: sorted profile or None if no such sorted profile exists.
1341
1342
EXAMPLES ::
1343
1344
sage: from admcycles.diffstrata import *
1345
sage: X=GeneralisedStratum([Signature((4,))])
1346
sage: X.merge_profiles((5,),(5,))
1347
(5,)
1348
"""
1349
# input profiles should be sorted:
1350
assert all(self.lies_over(p[i],p[i+1]) for i in range(len(p)-1)),\
1351
"Profile %r not sorted!" % (p,)
1352
assert all(self.lies_over(q[i],q[i+1]) for i in range(len(q)-1)),\
1353
"Profile %r not sorted!" % (q,)
1354
new_profile = []
1355
next_p = 0
1356
next_q = 0
1357
while next_p < len(p) and next_q < len(q):
1358
if p[next_p] == q[next_q]:
1359
new_profile.append(p[next_p])
1360
next_p += 1
1361
next_q += 1
1362
else:
1363
if self.lies_over(p[next_p],q[next_q]):
1364
new_profile.append(p[next_p])
1365
next_p += 1
1366
else:
1367
if self.lies_over(q[next_q],p[next_p]):
1368
new_profile.append(q[next_q])
1369
else:
1370
return None
1371
next_q += 1
1372
# pick up rest (one of these is empty!):
1373
new_profile += p[next_p:]
1374
new_profile += q[next_q:]
1375
return tuple(new_profile)
1376
1377
### Better graph lookup:
1378
## Here we should really work with "enhanced dominos", because
1379
## otherwise it's not clear how the list indices of degenerations are related
1380
## to each other.
1381
## Therefore, arguments are:
1382
## * a sorted(!) list of BICs, i.e. an element of the lookup_list
1383
## * a (consistent) choice of components of the involved 3-level graph (i.e.
1384
## enhanced dominos)
1385
## This can consistently produce a graph.
1386
##
1387
## For now, we use the workaround to forcably only work with sorted profiles
1388
## where the indexing is at least consistent.
1389
###
1390
1391
def lookup_graph(self,bic_list,index=0):
1392
"""
1393
Return the graph associated to an enhanced profile.
1394
1395
Note that starting in SAGE 9.0 profile numbering will change between sessions!
1396
1397
Args:
1398
bic_list (iterable): (sorted!) tuple/list of indices of bics.
1399
index (int, optional): Index in lookup list. Defaults to 0.
1400
1401
Returns:
1402
EmbeddedLevelGraph: graph associated to the enhanced (sorted) profile
1403
(None if empty).
1404
1405
EXAMPLES ::
1406
1407
sage: from admcycles.diffstrata import *
1408
sage: X=Stratum((2,))
1409
sage: X.lookup_graph(())
1410
EmbeddedLevelGraph(LG=LevelGraph([2],[[1]],[],{1: 2},[0],True),dmp={1: (0, 0)},dlevels={0: 0})
1411
1412
Note that an enhanced profile needs to be unpacked with *:
1413
1414
sage: X.lookup_graph(*X.enhanced_profiles_of_length(2)[0]) # 'unsafe' (edge ordering may change) # doctest:+SKIP
1415
EmbeddedLevelGraph(LG=LevelGraph([1, 0, 0],[[1], [2, 3, 4], [5, 6, 7]],[(1, 4), (2, 6), (3, 7)],{1: 0, 2: 0, 3: 0, 4: -2, 5: 2, 6: -2, 7: -2},[0, -1, -2],True),dmp={5: (0, 0)},dlevels={0: 0, -1: -1, -2: -2})
1416
1417
"""
1418
# this is a bit stupid, but whatever...
1419
if all(self.lies_over(bic_list[i],bic_list[i+1]) for i in range(len(bic_list)-1)):
1420
return self.lookup(bic_list)[index]
1421
else:
1422
return None
1423
1424
def lookup(self,bic_list, quiet=True):
1425
"""
1426
Take a profile (i.e. a list of indices of BIC) and return the corresponding
1427
EmbeddedLevelGraphs (i.e. the product of these BICs).
1428
1429
Note that this will be a one element list "most of the time", but
1430
it can happen that this is not irreducible:
1431
1432
This implementation is not dependent on the order (!) (we look in top and
1433
bottom degenerations and clutch...)
1434
1435
However, for caching purposes, it makes sense to use only the sorted profiles...
1436
1437
NOTE THAT IN PYTHON3 PROFILES ARE NO LONGER DETERMINISTIC!!!!!
1438
1439
(they typically change with every python session...)
1440
1441
Args:
1442
bic_list (iterable): list of indices of bics
1443
1444
Returns:
1445
list: The list of EmbeddedLevelGraphs corresponding to the profile.
1446
1447
EXAMPLES ::
1448
1449
sage: from admcycles.diffstrata import *
1450
sage: X=GeneralisedStratum([Signature((4,))])
1451
1452
This is independent of the order.
1453
1454
sage: p, _ = X.enhanced_profiles_of_length(2)[0]
1455
sage: assert any(X.lookup(p)[0].is_isomorphic(G) for G in X.lookup((p[1],p[0])))
1456
1457
Note that the profile can be empty or reducible.
1458
1459
"""
1460
if not quiet:
1461
print("Looking up enhanced profiles in %r..." % (bic_list,))
1462
sys.stdout.flush() # MPI computer has congestion issues...
1463
lookup_key = tuple(bic_list)
1464
if not bic_list: # empty
1465
if not quiet:
1466
print("Empty profile, returning smooth_LG. Done.")
1467
sys.stdout.flush()
1468
return [self.smooth_LG]
1469
if len(bic_list) == 1:
1470
if not quiet:
1471
print("BIC, profile irreducible by definition. Done.")
1472
sys.stdout.flush()
1473
return [self.bics[bic_list[0]]]
1474
try:
1475
cached_list = self._lookup[lookup_key]
1476
if not quiet:
1477
print("Using cached lookup. Done.")
1478
sys.stdout.flush()
1479
return cached_list
1480
except KeyError:
1481
bic_list = list(bic_list) # in case we are passed a tuple...
1482
# otherwise, making a copy if we're about to manipulate is also not
1483
# such a bad idea...
1484
i = bic_list.pop() # index in self.bics
1485
B = self.bics[i] # this might build bics (!)
1486
# We split the remainder of bic_list into those coming from
1487
# degenerations of the top component and those from bottom.
1488
# Note that these lists will contain their indices in B.top
1489
# and B.bot, respectively.
1490
# Moreover, they have to be nested in case there are multiple components.
1491
top_lists = [[]]
1492
bot_lists = [[]]
1493
for j in bic_list:
1494
if not quiet:
1495
print("Looking at BIC %r:" % j, end=' ')
1496
sys.stdout.flush()
1497
# a bic is either in the image of top_to_bic
1498
# or bot_to_bic.
1499
# If it isn't in any image, the intersection is empty
1500
# and we return None.
1501
# Note that again this might build the maps.
1502
try:
1503
top_bics = self.DG.top_to_bic_inv(i)[j]
1504
if not quiet:
1505
print("Adding %r BICs from top component to top_lists..." % len(top_bics))
1506
sys.stdout.flush()
1507
# if there are several components, we "branch out":
1508
new_top_lists = []
1509
for b in top_bics:
1510
for top_list in top_lists:
1511
new_top_lists.append(top_list + [b])
1512
top_lists = new_top_lists
1513
except KeyError:
1514
try:
1515
bot_bics = self.DG.bot_to_bic_inv(i)[j]
1516
if not quiet:
1517
print("Adding %r BICs from bottom component to bot_lists..." % len(bot_bics))
1518
sys.stdout.flush()
1519
# if there are several components, we "branch out":
1520
new_bot_lists = []
1521
for b in bot_bics:
1522
for bot_list in bot_lists:
1523
new_bot_lists.append(bot_list + [b])
1524
bot_lists = new_bot_lists
1525
except KeyError:
1526
# Intersection empty.
1527
return []
1528
if not quiet:
1529
print("Done building top_lists and bot_lists.")
1530
print("This gives us %r profiles in %s and %r profiles in %s that we will now clutch pairwise and recursively." % \
1531
(len(top_lists), B.top, len(bot_lists), B.bot))
1532
sys.stdout.flush()
1533
graph_list = [admcycles.diffstrata.stratatautring.clutch(
1534
self,
1535
top,
1536
bot,
1537
B.clutch_dict,
1538
B.emb_top,
1539
B.emb_bot
1540
)
1541
for top_list, bot_list in itertools.product(top_lists,bot_lists)
1542
for top in B.top.lookup(top_list, quiet=quiet)
1543
for bot in B.bot.lookup(bot_list, quiet=quiet)
1544
]
1545
# we might have picked up isomorphic guys (e.g. v-graph)
1546
if not quiet:
1547
print("For profile %r in %s, we have thus obtained %r graphs." %\
1548
(bic_list, self, len(graph_list)))
1549
print("Sorting these by isomorphism class...", end=' ')
1550
sys.stdout.flush()
1551
rep_list = admcycles.diffstrata.bic.isom_rep(graph_list)
1552
self._lookup[lookup_key] = rep_list
1553
if not quiet:
1554
print("Done. Found %r isomorphism classes." % len(rep_list))
1555
sys.stdout.flush() # MPI computer has congestion issues...
1556
return rep_list
1557
1558
@cached_method
1559
def sub_graph_from_level(self,enh_profile,l,direction='below',return_split_edges=False):
1560
"""
1561
Extract an EmbeddedLevelGraph from the subgraph of enh_profile that is either
1562
above or below level l.
1563
1564
This is embedded into the top/bottom component of the bic at profile[l-1].
1565
In particular, this is a 'true' sub graph, i.e. the names of the vertices and
1566
legs are the same as in enh_profile.
1567
1568
Note: For l==0 or l==number_of_levels we just return enh_profile.
1569
1570
Args:
1571
l (int): (relative) level number.
1572
direction (str, optional): 'above' or 'below'. Defaults to 'below'.
1573
return_split_edges (bool, optional. Defaults to False): also return a tuple
1574
of the edges split across level l.
1575
1576
Returns:
1577
EmbeddedLevelGraph: Subgraph of top/bottom component of the bic at profile[l-1].
1578
1579
If return_split_edges=True: Returns tuple (G,e) where
1580
* G is the EmbeddedLevelGraph
1581
* e is a tuple of edges of enh_profile that connect legs above level
1582
l with those below (i.e. those edges needed for clutching!)
1583
1584
EXAMPLES ::
1585
1586
sage: from admcycles.diffstrata import *
1587
sage: X=Stratum((2,))
1588
sage: ep = X.enhanced_profiles_of_length(2)[0]
1589
sage: X.sub_graph_from_level(ep, 1, 'above')
1590
EmbeddedLevelGraph(LG=LevelGraph([1],[[1]],[],{1: 0},[0],True),dmp={1: (0, 0)},dlevels={0: 0})
1591
sage: X.sub_graph_from_level(ep, 1, 'below') # 'unsafe' (edge order might change) # doctest:+SKIP
1592
EmbeddedLevelGraph(LG=LevelGraph([0, 0],[[2, 3, 4], [5, 6, 7]],[(2, 6), (3, 7)],{2: 0, 3: 0, 4: -2, 5: 2, 6: -2, 7: -2},[-1, -2],True),dmp={5: (0, 0), 4: (0, 1)},dlevels={-1: -1, -2: -2})
1593
sage: X.bics[ep[0][0]].top
1594
LevelStratum(sig_list=[Signature((0,))],res_cond=[],leg_dict={1: (0, 0)})
1595
sage: X.bics[ep[0][1]].bot
1596
LevelStratum(sig_list=[Signature((2, -2, -2))],res_cond=[[(0, 1), (0, 2)]],leg_dict={3: (0, 0), 4: (0, 1), 5: (0, 2)})
1597
"""
1598
G = self.lookup_graph(*enh_profile)
1599
if l == 0:
1600
if direction == 'below':
1601
if return_split_edges:
1602
return (G,tuple())
1603
return G
1604
if return_split_edges:
1605
return (None,tuple())
1606
return None
1607
if l == G.number_of_levels:
1608
if direction == 'above':
1609
if return_split_edges:
1610
return (G,tuple())
1611
return G
1612
if return_split_edges:
1613
return (None,tuple())
1614
return None
1615
profile, _i = enh_profile
1616
# The BIC that will give us the level is BIC l-1 in the profile:
1617
bic_number = profile[l-1]
1618
B = self.bics[bic_number]
1619
# We extract the subgraph from the underlying LevelGraph, so we have
1620
# to work with the internal level numbering:
1621
internal_l = G.LG.internal_level_number(l)
1622
# Actually only three things depend on above/below:
1623
# * The choice of vertices in the subgraph.
1624
# * The choice of level to embed into (top/bottom of B).
1625
# * The new level dictionary (as extracting does not change the levels,
1626
# this just consists of the releveant part of G.dlevels)
1627
# Note that in the 'below' case we consider levels <= l, while in 'above'
1628
# we consider > l (we want to cut level passage l!)
1629
if direction == 'below':
1630
new_vertices = [v for v in range(len(G.LG.genera))
1631
if G.LG.levelofvertex(v) <= internal_l]
1632
# in this case, the level we want to embed into is the bottom of B
1633
L = B.bot
1634
# the levels <= internal_l survive into dlevels
1635
new_dlevels = {k:v for k,v in G.dlevels.items() if k <= internal_l}
1636
else:
1637
assert direction == 'above'
1638
new_vertices = [v for v in range(len(G.LG.genera))
1639
if G.LG.levelofvertex(v) > internal_l]
1640
# in this case, the level we want to embed into is the top of B
1641
L = B.top
1642
# the levels >= internal_l survive into dlevels
1643
new_dlevels = {k:v for k,v in G.dlevels.items() if k > internal_l}
1644
vertex_set = set(new_vertices)
1645
new_edges = [e for e in G.LG.edges
1646
if G.LG.vertex(e[0]) in vertex_set and \
1647
G.LG.vertex(e[1]) in vertex_set]
1648
new_LG = G.LG.extract(new_vertices,new_edges)
1649
leg_set = set(flatten(new_LG.legs))
1650
# Next, we take the part of dmp that we still need:
1651
# Note that G.dmp maps legs of G to points of X, but we want is a map
1652
# to points of L.
1653
# We get this from the following observation:
1654
# We have
1655
# * L.leg_dict: points of B -> points of L
1656
# * B.dmp_inv: points of X -> points of B
1657
# Therefore the composition gives the desired map
1658
# points of G -> points of L
1659
new_dmp = {k : L.leg_dict[B.dmp_inv[v]]
1660
for k, v in G.dmp.items() if k in leg_set}
1661
# The only thing missing is to add the marked points of the edges
1662
# that we have cut:
1663
# We do this in no particular order, as the clutching information will
1664
# have to be retrieved anyways when actually splitting the graph.
1665
# Note that != is boolean xor (!)
1666
split_edges = [e for e in G.LG.edges
1667
if (e[0] in leg_set) != (e[1] in leg_set)]
1668
split_half_edges = [e[0] if e[0] in leg_set else e[1]
1669
for e in split_edges]
1670
# To place these into new_dmp, we pick an undegeneration map G -> B
1671
# Note that the choice of map *should* not matter, as they should differ
1672
# only by an automorphism of B... (except for psi classes, where we have
1673
# to be careful with xi_on_level!!!)
1674
B_to_G = self.explicit_leg_maps(((bic_number,),0),enh_profile,only_one=True)
1675
assert B_to_G # G is actually a degeneration of B!
1676
G_to_B = {v : k for k, v in B_to_G.items()}
1677
# check the points we already placed are consistent:
1678
assert all(L.leg_dict[G_to_B[leg]] == new_dmp[leg] for leg in new_dmp)
1679
while split_half_edges:
1680
leg = split_half_edges.pop()
1681
new_dmp[leg] = L.leg_dict[G_to_B[leg]]
1682
# some more checks:
1683
legs_in_new_edges = set(flatten(new_edges))
1684
marked_points = set(new_dmp.keys())
1685
assert legs_in_new_edges.isdisjoint(marked_points)
1686
assert leg_set == (legs_in_new_edges | marked_points)
1687
sub_graph = EmbeddedLevelGraph(L,new_LG,new_dmp,new_dlevels)
1688
if return_split_edges:
1689
return (sub_graph, tuple(split_edges))
1690
return sub_graph
1691
1692
# @cached_method
1693
def split_graph_at_level(self,enh_profile,l):
1694
"""
1695
Splits enh_profile self into top and bottom component at level l.
1696
1697
(Note that the 'cut' occurs right above level l, i.e. to get the top level
1698
and the rest, l should be 1! (not 0))
1699
1700
The top and bottom components are EmbeddedLevelGraphs, embedded into
1701
top and bottom of the corresponding BIC (obtained via sub_graph_from_level).
1702
1703
The result is made so that it can be fed into clutch.
1704
1705
Args:
1706
enh_profile (tuple): enhanced profile.
1707
l (int): (relative) level of enh_profile.
1708
1709
Returns:
1710
dict: dictionary consising of
1711
* X: GeneralisedStratum self.X
1712
* top: EmbeddedLevelGraph: top component
1713
* bottom: EmbeddedLevelGraph: bottom component
1714
* clutch_dict: clutching dictionary mapping ex-half-edges on
1715
top to their partners on bottom (both as points in the
1716
respective strata via dmp!)
1717
* emb_dict_top: a dictionary embedding top into the stratum of self
1718
* emb_dict_bot: a dictionary embedding bot into the stratum of self
1719
* leg_dict: a dictionary legs of enh_profile -> legs of top/bottom
1720
1721
Note that clutch_dict, emb_top and emb_bot are dictionaries between
1722
points of strata, i.e. after applying dmp to the points!
1723
1724
EXAMPLES ::
1725
1726
In particular, we can use this to "glue" the BICs of 10^top into (10,9,6) and
1727
obtain all components of the profile.
1728
1729
"""
1730
# Split the graph into top and bottom components at level l:
1731
top_graph, se_top = self.sub_graph_from_level(enh_profile,l,direction='above',return_split_edges=True)
1732
bot_graph, se_bot = self.sub_graph_from_level(enh_profile,l,direction='below',return_split_edges=True)
1733
assert se_top == se_bot
1734
split_edges = se_top
1735
# We construct the clutching info by splitting the BIC that corresponds
1736
# to level l:
1737
p, _i = enh_profile
1738
# TODO: edge cases
1739
B = self.bics[p[l-1]]
1740
clutching_info = B.split()
1741
# we simply replace the top and bottom components of B by our graphs:
1742
assert clutching_info['top'] == top_graph.X == B.top
1743
clutching_info['top'] = top_graph
1744
assert clutching_info['bottom'] == bot_graph.X == B.bot
1745
clutching_info['bottom'] = bot_graph
1746
# the clutch_dict has to be replaced by the split_edges:
1747
# Note that these are currently edges of enh_profile, so they need to be
1748
# translated to points on the corresponding stratum via the embedding
1749
# of top_graph/bot_graph:
1750
# WARNING: We use here (once again) implicitly that e[0] is above e[1]!
1751
clutching_info['clutch_dict'] = {top_graph.dmp[e[0]] : bot_graph.dmp[e[1]]
1752
for e in split_edges}
1753
return clutching_info
1754
1755
# @cached_method
1756
def doublesplit(self,enh_profile):
1757
"""
1758
Splits embedded 3-level graph into top, middle and bottom component, along with
1759
all the information required (by clutch) to reconstruct self.
1760
1761
We return a dictionary so that the result can be fed into clutch (naming of
1762
optional arguments...)
1763
1764
This is mainly a technical backend for doublesplit_graph_before_and_after_level.
1765
1766
Note that in contrast to EmbeddedLevelGraph.split, we want to feed a length-2-profile
1767
so that we can really split into the top and bottom of the associated BICs (the only
1768
strata we can control!)
1769
1770
This method is mainly intended for being fed into clutch.
1771
1772
Args:
1773
enh_profile (tuple): enhanced profile.
1774
1775
Raises:
1776
ValueError: Raised if self is not a 3-level graph.
1777
1778
Returns:
1779
dict: A dictionary consisting of:
1780
X: GeneralisedStratum self,
1781
top: LevelStratum top level of top BIC,
1782
bottom: LevelStratum bottom level of bottom BIC,
1783
middle: LevelStratum level -1 of enh_profile,
1784
emb_dict_top: dict: points of top stratum -> points of X,
1785
emb_dict_bot: dict: points of bottom stratum -> points of X,
1786
emb_dict_mid: dict: points of middle stratum -> points of X,
1787
clutch_dict: dict: points of top stratum -> points of middle stratum,
1788
clutch_dict_lower: dict: points of middle stratum -> points of bottom stratum,
1789
clutch_dict_long: dict: points of top stratum -> points of bottom stratum.
1790
1791
EXAMPLES ::
1792
1793
1794
Long edges work.
1795
1796
"""
1797
p, i = enh_profile
1798
if not len(p) == 2:
1799
raise ValueError("Error: Not a 3-level graph! %r" % self)
1800
G = self.lookup_graph(p,i)
1801
# Here it is important that we pick top and bot of the corresponding BICs and identify them with
1802
# level(0) and level(2) of G (as these might not be the same (e.g. switched components!)!)
1803
top = self.bics[p[0]].top
1804
middle = G.level(1)
1805
bottom = self.bics[p[1]].bot
1806
# To construct the embedding dictionaries, we have to identify legs of G
1807
# with (stratum) points of top/middle/bottom as keys and points on self as
1808
# values.
1809
#
1810
# The values (points on self) are given by G.dmp.
1811
#
1812
# The keys for middle are given via leg_dict.
1813
#
1814
# For top and bottom, we have to first fix a map from G to
1815
# p[0] and p[1] and then combine self.dmp with the leg_dicts of the LevelStrata.
1816
# It *shouldn't* matter, which undegeneration we take:
1817
top_to_G = self.explicit_leg_maps(((p[0],),0),enh_profile,only_one=True)
1818
G_to_top = {v : k for k, v in top_to_G.items()}
1819
bot_to_G = self.explicit_leg_maps(((p[1],),0),enh_profile,only_one=True)
1820
G_to_bot = {v : k for k, v in bot_to_G.items()}
1821
# More precisely: We now have the following maps (e.g. for top):
1822
#
1823
# G_to_top: points in G -> points in p[0]
1824
# top.leg_dict: points in p[0] -> stratum points of top
1825
#
1826
# and
1827
#
1828
# top.leg_dict_inv: stratum points of top -> points in p[0]
1829
# top_to_G: points in p[0] -> points in G
1830
# G.dmp: points in G -> stratum points on self
1831
#
1832
# i.e. emb_top is the composition of the inverse of the leg_dict
1833
# of top, i.e. top.stratum_number, with top_to_G and G.dmp
1834
# (giving a map from the points of top to the points of self)
1835
# and the same for middle and bottom.
1836
#
1837
# We implement this by iterating over the marked points of G on top level,
1838
# which are exactly the keys of G.dmp that are on top level.
1839
#
1840
# For this, we have to compose with G_to_top and top.leg_dict again.
1841
#
1842
# Note that we make extra sure that we didn't mess up the level numbering by
1843
# using the relative level numbering (where the top level is guaranteed to be 0,
1844
# the middle is 1 and the bottom level is 2 (positive!)).
1845
emb_dict_top = {top.leg_dict[G_to_top[l]] : G.dmp[l]
1846
for l in iter(G.dmp)
1847
if G.LG.level_number(G.LG.levelofleg(l)) == 0}
1848
emb_dict_mid = {middle.leg_dict[l] : G.dmp[l]
1849
for l in iter(G.dmp)
1850
if G.LG.level_number(G.LG.levelofleg(l)) == 1}
1851
emb_dict_bot = {bottom.leg_dict[G_to_bot[l]] : G.dmp[l]
1852
for l in iter(G.dmp)
1853
if G.LG.level_number(G.LG.levelofleg(l)) == 2}
1854
# Because this is a 3-level graph, all edges of self are cut in this process
1855
# and this gives us exactly the dictionary we must remember:
1856
# Note however, that we have to check if the edge connects top - middle, middle - bottom
1857
# or top - bottom.
1858
# Note that all these dictionaries map points of GeneralisedStrata to each
1859
# other so we must take the corresponding stratum_number!
1860
clutch_dict = {}
1861
clutch_dict_lower = {}
1862
clutch_dict_long = {}
1863
# If the edges are not sorted with e[0] above e[1], we complain.
1864
for e in G.LG.edges:
1865
if G.LG.level_number(G.LG.levelofleg(e[0])) == 0:
1866
if G.LG.level_number(G.LG.levelofleg(e[1])) == 1:
1867
clutch_dict[top.stratum_number(G_to_top[e[0]])] = middle.stratum_number(e[1])
1868
else:
1869
assert G.LG.level_number(G.LG.levelofleg(e[1])) == 2
1870
clutch_dict_long[top.stratum_number(G_to_top[e[0]])] = bottom.stratum_number(G_to_bot[e[1]])
1871
else:
1872
assert G.LG.level_number(G.LG.levelofleg(e[0])) == 1
1873
assert G.LG.level_number(G.LG.levelofleg(e[1])) == 2
1874
clutch_dict_lower[middle.stratum_number(e[0])] = bottom.stratum_number(G_to_bot[e[1]])
1875
return {'X': self, 'top': top, 'bottom': bottom, 'middle': middle,
1876
'emb_dict_top': emb_dict_top, 'emb_dict_mid': emb_dict_mid, 'emb_dict_bot': emb_dict_bot,
1877
'clutch_dict': clutch_dict, 'clutch_dict_lower': clutch_dict_lower, 'clutch_dict_long': clutch_dict_long}
1878
1879
@cached_method
1880
def three_level_profile_for_level(self,enh_profile,l):
1881
"""
1882
Find the 3-level graph that has level l of enh_profile as its middle level.
1883
1884
Args:
1885
enh_profile (tuple): enhanced profile
1886
l (int): (relative) level number
1887
1888
Raises:
1889
RuntimeError: raised if no unique (or no) 3-level graph is found.
1890
1891
Returns:
1892
tuple: enhanced profile of the 3-level graph.
1893
1894
EXAMPLES ::
1895
1896
"""
1897
profile, _ = enh_profile
1898
three_level_profile = (profile[l-1],profile[l])
1899
# in case this is reducible, we have to find the correct enhanced profile:
1900
possible_enhancements = len(self.lookup(three_level_profile))
1901
assert possible_enhancements > 0, "No 3-level graph for subprofile %r of %r found!" % (three_level_profile, profile)
1902
enhancements = []
1903
for i in range(possible_enhancements):
1904
if self.is_degeneration(enh_profile,(three_level_profile,i)):
1905
enhancements.append(i)
1906
if len(enhancements) != 1:
1907
raise RuntimeError("No unique 3-level undegeneration in %r around level %r! %r" % (three_level_profile, l, enhancements))
1908
return (three_level_profile,enhancements[0])
1909
1910
# @cached_method
1911
def doublesplit_graph_before_and_after_level(self,enh_profile,l):
1912
"""
1913
Split the graph enh_profile directly above and below level l.
1914
1915
This can be used for gluing an arbitrary degeneration of level l into enh_profile.
1916
1917
The result is made so that it can be fed into clutch.
1918
1919
To ensure compatibility with top/bot/middle_to_bic when gluing, we have
1920
to make sure that everything is embedded into the "correct" generalised strata.
1921
1922
We denote the 3-level graph around level l by H.
1923
1924
Then the top part will be embedded into the top of the top BIC of H,
1925
the bottom part will be embedded into the bot of the bottom BIC of H
1926
and the middle will be the middle level of H.
1927
1928
For a 3-level graph is (almost) equivalent to doublesplit(), the only difference
1929
being that here we return the 0-level graph for each level.
1930
1931
Args:
1932
enh_profile (tuple): enhanced profile.
1933
l (int): (relative) level of enh_profile.
1934
1935
Raises:
1936
ValueError: Raised if l is 0 or lowest level.
1937
RuntimeError: Raised if we don't find a unique 3-level graph around l.
1938
1939
Returns:
1940
dict: A dictionary consisting of:
1941
X: GeneralisedStratum self.X,
1942
top: LevelStratum top level of top BIC of H,
1943
bottom: LevelStratum bottom level of bottom BIC of H,
1944
middle: LevelStratum middle level of H,
1945
emb_dict_top: dict: points of top stratum -> points of X,
1946
emb_dict_bot: dict: points of bottom stratum -> points of X,
1947
emb_dict_mid: dict: points of middle stratum -> points of X,
1948
clutch_dict: dict: points of top stratum -> points of middle stratum,
1949
clutch_dict_lower: dict: points of middle stratum -> points of bottom stratum,
1950
clutch_dict_long: dict: points of top stratum -> points of bottom stratum.
1951
1952
EXAMPLES ::
1953
1954
sage: from admcycles.diffstrata import *
1955
sage: X=GeneralisedStratum([Signature((4,))])
1956
sage: assert all(clutch(**X.doublesplit_graph_before_and_after_level(ep,l)).is_isomorphic(X.lookup_graph(*ep)) for levels in range(3,X.dim()+1) for ep in X.enhanced_profiles_of_length(levels-1) for l in range(1,levels-1))
1957
sage: X=GeneralisedStratum([Signature((2,2,-2))])
1958
sage: assert all(clutch(**X.doublesplit_graph_before_and_after_level(ep,l)).is_isomorphic(X.lookup_graph(*ep)) for levels in range(3,X.dim()+2) for ep in X.enhanced_profiles_of_length(levels-1) for l in range(1,levels-1)) # long time
1959
"""
1960
p, i = enh_profile
1961
if l == 0 or l == len(p) + 1:
1962
raise ValueError("Doublesplit must occur at 'inner' level! %r" % l)
1963
G = self.lookup_graph(p,i)
1964
# Split the graph into top and bottom components around level l:
1965
top_graph, se_top = self.sub_graph_from_level(enh_profile,l,direction='above',return_split_edges=True)
1966
bot_graph, se_bot = self.sub_graph_from_level(enh_profile,l+1,direction='below',return_split_edges=True)
1967
# We construct the clutching info by splitting the 3-level graph around l
1968
# Note that the middle level is really the same as that of enh_profile (that's
1969
# why we have to care about components of the profile here), but the leg
1970
# numbering might be different, so we still have to work with an undegeneration map:
1971
t_l_enh_profile = self.three_level_profile_for_level(enh_profile,l)
1972
clutching_info = self.doublesplit(t_l_enh_profile)
1973
assert top_graph.X == clutching_info['top']
1974
assert bot_graph.X == clutching_info['bottom']
1975
L = clutching_info['middle']
1976
assert L == self.lookup_graph(*t_l_enh_profile).level(1)
1977
# we simply replace the top and bottom components of B by our graphs:
1978
clutching_info['top'] = top_graph
1979
clutching_info['bottom'] = bot_graph
1980
# Now we have to match up the edges:
1981
# Note that se_top consists of the edges connecting top_graph to any vertex
1982
# on or below level l
1983
# We therefore start by distinguishing those edges ending on level l from the others
1984
# (long edges):
1985
# WARNING: We use here (once again) implicitly that e[0] is above e[1]!
1986
top_to_l = []
1987
top_to_bot = []
1988
for e in se_top:
1989
if G.LG.level_number(G.LG.levelofleg(e[1])) == l:
1990
top_to_l.append(e)
1991
else:
1992
top_to_bot.append(e)
1993
# the same for se_bot:
1994
bot_to_l = []
1995
bot_to_top = []
1996
for e in se_bot:
1997
if G.LG.level_number(G.LG.levelofleg(e[0])) == l:
1998
bot_to_l.append(e)
1999
else:
2000
bot_to_top.append(e)
2001
assert set(top_to_bot) == set(bot_to_top)
2002
# Translating the edges into points on the strata immediately gives the
2003
# three clutching dictionaries:
2004
# Note that instead of directly using leg_dict for the middle level,
2005
# we first pick an undegeneration map to the 3-level graph and compose
2006
# with (the inverse of) that:
2007
middle_leg_map = self.explicit_leg_maps(t_l_enh_profile,enh_profile,only_one=True)
2008
ep_to_m = {v : k for k, v in middle_leg_map.items()}
2009
# WARNING: We use here (once again) implicitly that e[0] is above e[1]!
2010
clutching_info['clutch_dict'] = {top_graph.dmp[e[0]] : L.leg_dict[ep_to_m[e[1]]]
2011
for e in top_to_l}
2012
clutching_info['clutch_dict_lower'] = {L.leg_dict[ep_to_m[e[0]]] : bot_graph.dmp[e[1]]
2013
for e in bot_to_l}
2014
clutching_info['clutch_dict_long'] = {top_graph.dmp[e[0]] : bot_graph.dmp[e[1]]
2015
for e in top_to_bot}
2016
return clutching_info
2017
2018
# @cached_method
2019
def splitting_info_at_level(self,enh_profile,l):
2020
"""
2021
Retrieve the splitting and embedding dictionaries for splitting at level l,
2022
as well as the level in 'standard form', i.e. as either:
2023
* a top of a BIC
2024
* a bot of a BIC
2025
* a middle of a 3-level graph
2026
2027
This is essentially only a frontend for split_graph_at_level and
2028
doublesplit_graph_before_and_after_level and saves us the annoying
2029
case distinction.
2030
2031
This is important, because when we glue we should *always* use the
2032
dmp's of the splitting dictionary, which can (and will) be different
2033
from leg_dict of the level!
2034
2035
Args:
2036
enh_profile (tuple): enhanced profile
2037
l (int): (relative) level number
2038
2039
Returns:
2040
tuple: (splitting dict, leg_dict, level) where
2041
splitting dict is the splitting dictionary:
2042
* X: GeneralisedStratum self.X
2043
* top: EmbeddedLevelGraph: top component
2044
* bottom: EmbeddedLevelGraph: bottom component
2045
* clutch_dict: clutching dictionary mapping ex-half-edges on
2046
top to their partners on bottom (both as points in the
2047
respective strata via dmp!)
2048
* emb_dict_top: a dictionary embedding top into the stratum of self
2049
* emb_dict_bot: a dictionary embedding bot into the stratum of self
2050
2051
leg_dict is the dmp at the current level (to be used instead
2052
of leg_dict of G.level(l)!!!)
2053
2054
and level is the 'standardised' LevelStratum at l (as described above).
2055
2056
Note that clutch_dict, emb_top and emb_bot are dictionaries between
2057
points of strata, i.e. after applying dmp to the points!
2058
2059
"""
2060
profile, _ = enh_profile
2061
# For this, we have to distinguish again, if we're gluing into the middle
2062
# (two cuts) or at one end of the profile (1 cut):
2063
if l == 0:
2064
d = self.split_graph_at_level(enh_profile,1)
2065
assert d['top'].is_isomorphic(d['top'].X.smooth_LG)
2066
return d, d['top'].dmp, d['top'].X
2067
if l == len(profile):
2068
d = self.split_graph_at_level(enh_profile,l)
2069
assert d['bottom'].is_isomorphic(d['bottom'].X.smooth_LG)
2070
return d, d['bottom'].dmp, d['bottom'].X
2071
d = self.doublesplit_graph_before_and_after_level(enh_profile,l)
2072
three_level_profile = self.three_level_profile_for_level(enh_profile,l)
2073
assert self.lookup_graph(*three_level_profile).level(1) == d['middle']
2074
# for the middle level, we have to use the undegeneration map to
2075
# the 3-level graph:
2076
middle_leg_map = self.explicit_leg_maps(three_level_profile,enh_profile,only_one=True)
2077
L_to_m = {v : d['middle'].leg_dict[k] for k, v in middle_leg_map.items()
2078
if k in d['middle'].leg_dict}
2079
return d, L_to_m, d['middle']
2080
2081
@cached_method
2082
def enhanced_profiles_of_length(self,l,quiet=True):
2083
"""
2084
A little helper for generating all enhanced profiles in self of a given length.
2085
2086
Args:
2087
l (int): length (codim) of profiles to be generated.
2088
2089
Returns:
2090
tuple: tuple of enhanced profiles
2091
2092
EXAMPLES ::
2093
2094
sage: from admcycles.diffstrata import *
2095
sage: X=Stratum((4,))
2096
sage: len(X.lookup_list[2])
2097
17
2098
sage: len(X.enhanced_profiles_of_length(2))
2099
19
2100
2101
"""
2102
if not quiet:
2103
print('Generating enhanced profiles of length %r...' % l)
2104
sys.stdout.flush()
2105
if l >= len(self.lookup_list):
2106
return tuple()
2107
ep_list = []
2108
for c, p in enumerate(self.lookup_list[l]):
2109
if not quiet:
2110
print('Building all graphs in %r (%r/%r)...' % (p, c+1, len(self.lookup_list[l])))
2111
sys.stdout.flush()
2112
for i in range(len(self.lookup(p, quiet=True))): # quiet=False gives A LOT of output here...
2113
ep_list.append((p,i))
2114
return tuple(ep_list)
2115
2116
#########################################################
2117
#### Checks
2118
#########################################################
2119
2120
def check_dims(self,codim=None,quiet=False):
2121
"""
2122
Check if, for each non-horizontal level graph of codimension codim
2123
the dimensions of the levels add up to the dimension of the level graph
2124
(dim of stratum - codim).
2125
2126
If codim is ommitted, check the entire stratum.
2127
2128
EXAMPLES ::
2129
2130
sage: from admcycles.diffstrata import *
2131
sage: X=GeneralisedStratum([Signature((1,1))])
2132
sage: X.check_dims()
2133
Codimension 0 Graph 0: Level sums ok!
2134
Codimension 1 Graph 0: Level sums ok!
2135
Codimension 1 Graph 1: Level sums ok!
2136
Codimension 1 Graph 2: Level sums ok!
2137
Codimension 1 Graph 3: Level sums ok!
2138
Codimension 2 Graph 0: Level sums ok!
2139
Codimension 2 Graph 1: Level sums ok!
2140
Codimension 2 Graph 2: Level sums ok!
2141
Codimension 2 Graph 3: Level sums ok!
2142
Codimension 3 Graph 0: Level sums ok!
2143
True
2144
2145
sage: X=GeneralisedStratum([Signature((4,))])
2146
sage: X.check_dims(quiet=True)
2147
True
2148
2149
sage: X=GeneralisedStratum([Signature((10,0,-10))])
2150
sage: X.check_dims()
2151
Codimension 0 Graph 0: Level sums ok!
2152
Codimension 1 Graph 0: Level sums ok!
2153
Codimension 1 Graph 1: Level sums ok!
2154
Codimension 1 Graph 2: Level sums ok!
2155
Codimension 1 Graph 3: Level sums ok!
2156
Codimension 1 Graph 4: Level sums ok!
2157
Codimension 1 Graph 5: Level sums ok!
2158
Codimension 1 Graph 6: Level sums ok!
2159
Codimension 1 Graph 7: Level sums ok!
2160
Codimension 1 Graph 8: Level sums ok!
2161
Codimension 1 Graph 9: Level sums ok!
2162
Codimension 1 Graph 10: Level sums ok!
2163
Codimension 1 Graph 11: Level sums ok!
2164
True
2165
2166
sage: X=GeneralisedStratum([Signature((2,2,-2))])
2167
sage: X.check_dims(quiet=True) # long time (3 seconds)
2168
True
2169
"""
2170
return_value = True
2171
if codim is None:
2172
codims = range(self.dim())
2173
else:
2174
codims = [codim]
2175
for c in codims:
2176
for i,emb_g in enumerate(self.all_graphs[c]):
2177
g = emb_g.LG
2178
dimsum = 0
2179
if not quiet:
2180
print("Codimension", c, "Graph", repr(i) + ":", end=" ")
2181
for l in range(g.numberoflevels()):
2182
L = g.stratum_from_level(l)
2183
if L.dim() == -1:
2184
if quiet:
2185
print("Codimension", c, "Graph", repr(i) + ":", end=" ")
2186
print("Error: Level", l, "is of dimension -1!")
2187
return_value = False
2188
dimsum += L.dim()
2189
if dimsum != self.dim() - c:
2190
if quiet:
2191
print("Codimension", c, "Graph", repr(i) + ":", end=" ")
2192
print("Error: Level dimensions add up to", dimsum, "not", self.dim() - c, "!")
2193
return_value = False
2194
else:
2195
if not quiet:
2196
print("Level sums ok!")
2197
return return_value
2198
2199
###########
2200
### Chern class calculation:
2201
def psi(self,leg):
2202
"""
2203
CURRENTLY ONLY ALLOWED FOR CONNECTED STRATA!!!!
2204
2205
The psi class on the open stratum at leg.
2206
2207
Args:
2208
leg (int): leg number (as index of signature, not point of stratum!!!)
2209
2210
Returns:
2211
ELGTautClass: Tautological class associated to psi.
2212
"""
2213
psi = self.additive_generator([tuple(),0],{leg:1})
2214
return psi.as_taut()
2215
2216
# @cached_method
2217
def taut_from_graph(self,profile,index=0):
2218
"""
2219
Tautological class from the graph with enhanced profile (profile, index).
2220
2221
Args:
2222
profile (iterable): profile
2223
index (int, optional): Index of profile. Defaults to 0.
2224
2225
Returns:
2226
ELGTautClass: Tautological class consisting just of this one graph.
2227
2228
EXAMPLES ::
2229
2230
"""
2231
return self.additive_generator((tuple(profile),index)).as_taut()
2232
2233
def ELGsum(self, L):
2234
"""
2235
Sum of tautological classes.
2236
2237
This is generally faster than += (i.e. sum()), because reduce is only called
2238
once at the end and not at every step.
2239
2240
Args:
2241
L (iterable): Iterable of ELGTautClasses on self.
2242
2243
Returns:
2244
ELGTautClass: Sum over input classes.
2245
"""
2246
new_psi_list = []
2247
for T in L:
2248
if T == 0:
2249
continue
2250
new_psi_list.extend(T.psi_list)
2251
return ELGTautClass(self, new_psi_list)
2252
2253
def pow(self, T, k, amb=None):
2254
"""
2255
Calculate T^k with ambient amb.
2256
2257
Args:
2258
T (ELGTautClass): Tautological class on self.
2259
k (int): positive integer.
2260
amb (tuple, optional): enhanced profile. Defaults to None.
2261
2262
Returns:
2263
ELGTautClass: T^k in CH(amb).
2264
"""
2265
if amb is None:
2266
amb = ((), 0)
2267
ONE = self.ONE
2268
else:
2269
ONE = self.taut_from_graph(*amb)
2270
prod = ONE
2271
for _ in range(k):
2272
prod = self.intersection(prod, T, amb)
2273
return prod
2274
2275
def exp(self,T,amb=None,quiet=True,prod=True,stop=None):
2276
"""
2277
(Formal) exp of a Tautological Class.
2278
2279
This is done (by default) by calculating exp of every AdditiveGenerator
2280
(which is cached) and calculating the product of these.
2281
2282
Alternatively, prod=False computes sums of powers of T.
2283
2284
Args:
2285
T (ELGTautClass): Tautological Class on X.
2286
2287
Returns:
2288
ELGTautClass: Tautological Class on X.
2289
"""
2290
N = self.dim()
2291
if amb is None:
2292
amb = ((), 0)
2293
if not prod:
2294
if not quiet:
2295
print("Calculating exp of %s..." % T)
2296
def _status(i):
2297
# primitive, but whatever
2298
if not quiet:
2299
print("Calculating power %r..." % i)
2300
return 1
2301
return self.ELGsum([_status(i) * QQ(1)/QQ(factorial(i)) * self.pow(T,i,amb) for i in range(N+1)])
2302
# Calculate instead product of exp(AG):
2303
e = self.taut_from_graph(*amb)
2304
if not quiet:
2305
print("Calculating exp as product of %r factors..." % len(T.psi_list), end=' ')
2306
sys.stdout.flush()
2307
for c, AG in T.psi_list:
2308
f = AG.exp(c, amb, stop)
2309
if f == 0 or f == self.ZERO:
2310
return self.ZERO
2311
e = self.intersection(e, f, amb)
2312
if not quiet:
2313
print('Done!')
2314
return e
2315
2316
@cached_method
2317
def exp_bic(self, i):
2318
l = self.bics[i].ell
2319
AG = self.additive_generator(((i,),0))
2320
return AG.exp(l, amb=None) - self.ONE
2321
2322
def td_contrib(self,l,T,amb=None):
2323
"""
2324
(Formal) td^-1 contribution, i.e. (1-exp(-l*T))/T.
2325
2326
Args:
2327
l (int): weight
2328
T (ELGTautClass): Tautological class on self.
2329
2330
Returns:
2331
ELGTautClass: Tautological class on self.
2332
"""
2333
N = self.dim()
2334
if amb is None:
2335
amb = ((), 0)
2336
return self.ELGsum([QQ(-l)**k/QQ(factorial(k+1)) * self.pow(T,k,amb) for k in range(N+1)])
2337
2338
@property
2339
def xi(self):
2340
"""
2341
xi of self in terms of psi and BICs according to Sauvaget's formula.
2342
2343
Note that we first find an "optimal" leg.
2344
2345
Returns:
2346
ELGTautClass: psi class on smooth stratum + BIC contributions (all
2347
with multiplicities...)
2348
2349
EXAMPLES ::
2350
2351
sage: from admcycles.diffstrata import *
2352
sage: X=Stratum((2,))
2353
sage: print(X.xi) # 'unsafe' (order of summands might change) # doctest:+SKIP
2354
Tautological class on Stratum: (2,)
2355
with residue conditions: []
2356
<BLANKLINE>
2357
3 * Psi class 1 with exponent 1 on level 0 * Graph ((), 0) +
2358
-1 * Graph ((0,), 0) +
2359
-1 * Graph ((1,), 0) +
2360
<BLANKLINE>
2361
"""
2362
try:
2363
return self._xi
2364
except AttributeError:
2365
self._xi = self.xi_with_leg(quiet=True)
2366
return self._xi
2367
2368
@cached_method
2369
def xi_pow(self,n):
2370
"""
2371
Cached method for calculating powers of xi.
2372
2373
Args:
2374
n (int): non-negative integer (exponent)
2375
2376
Returns:
2377
ELGTautClass: xi^n
2378
"""
2379
if n == 0:
2380
return self.ONE
2381
return self.xi * self.xi_pow(n-1)
2382
2383
@cached_method
2384
def xi_with_leg(self,leg=None,quiet=True,with_leg=False):
2385
"""
2386
xi class of self expressed using Sauvaget's relation (with optionally a choice of leg)
2387
2388
Args:
2389
leg (tuple, optional): leg on self, i.e. tuple (i,j) for the j-th element
2390
of the signature of the i-th component. Defaults to None. In this case,
2391
an optimal leg is chosen.
2392
quiet (bool, optional): No output. Defaults to False.
2393
with_leg (bool, optional): Return choice of leg. Defaults to False.
2394
2395
Returns:
2396
ELGTautClass: xi in terms of psi and bics according to Sauvaget.
2397
(ELGTautClass, tuple): if with_leg=True, where tuple is the corresponding
2398
leg on the level i.e. (component, signature index) used.
2399
2400
EXAMPLES ::
2401
2402
In the stratum (2,-2) the pole is chosen by default (there is no 'error term'):
2403
2404
sage: from admcycles.diffstrata import *
2405
sage: X=Stratum((2,-2))
2406
sage: print(X.xi)
2407
Tautological class on Stratum: (2, -2)
2408
with residue conditions: []
2409
<BLANKLINE>
2410
-1 * Psi class 2 with exponent 1 on level 0 * Graph ((), 0) +
2411
<BLANKLINE>
2412
sage: print(X.xi_with_leg(leg=(0,1)))
2413
Tautological class on Stratum: (2, -2)
2414
with residue conditions: []
2415
<BLANKLINE>
2416
-1 * Psi class 2 with exponent 1 on level 0 * Graph ((), 0) +
2417
<BLANKLINE>
2418
2419
We can specify the zero instead and pick up the extra divisor:
2420
2421
sage: print(X.xi_with_leg(leg=(0,0))) # 'unsafe' (order of summands might change) # doctest:+SKIP
2422
Tautological class on Stratum: (2, -2)
2423
with residue conditions: []
2424
<BLANKLINE>
2425
3 * Psi class 1 with exponent 1 on level 0 * Graph ((), 0) +
2426
-1 * Graph ((0,), 0) +
2427
<BLANKLINE>
2428
"""
2429
if not quiet:
2430
print("Applying Sauvaget's relation to express xi for %r..." % self)
2431
if leg is None:
2432
# choose a "good" leg:
2433
l, k, bot_bic_list = self._choose_leg_for_sauvaget_relation(quiet)
2434
else:
2435
l = leg
2436
k = self._sig_list[l[0]].sig[l[1]]
2437
bot_bic_list = self.bics_with_leg_on_bottom(l)
2438
# find internal leg number on smooth graph correspoding to l:
2439
G = self.lookup_graph(tuple())
2440
internal_leg = G.dmp_inv[l] # leg number on graph
2441
xi = (k+1) * self.psi(internal_leg)
2442
add_gens = [self.additive_generator([(b,),0]) for b in bot_bic_list]
2443
self._xi = xi + ELGTautClass(self, [(-self.bics[bot_bic_list[i]].ell, AG) \
2444
for i, AG in enumerate(add_gens)])
2445
# self._xi = xi + sum([QQ(1)/QQ(AG.stack_factor)*AG.as_taut() \
2446
# for i, AG in enumerate(add_gens)])
2447
if with_leg:
2448
return (self._xi,l)
2449
else:
2450
return self._xi
2451
2452
def _choose_leg_for_sauvaget_relation(self,quiet=True):
2453
"""
2454
Choose the best leg for Sauvaget's relation, i.e. the one that appears on bottom
2455
level for the fewest BICs.
2456
2457
Returns:
2458
tuple: tuple (leg, order, bic_list) where:
2459
* leg (tuple), as a tuple (number of conn. comp., index of the signature tuple),
2460
* order (int) the order at leg, and
2461
* bic_list (list of int) is a list of indices of self.bics where leg
2462
is on bottom level.
2463
2464
EXAMPLES ::
2465
2466
sage: from admcycles.diffstrata import *
2467
sage: X=Stratum((2,-2))
2468
sage: X._choose_leg_for_sauvaget_relation()
2469
((0, 1), -2, [])
2470
2471
In the minimal stratum, we always find all BICS:
2472
2473
sage: X=Stratum((2,))
2474
sage: X._choose_leg_for_sauvaget_relation()
2475
((0, 0), 2, [0, 1])
2476
"""
2477
best_case = len(self.bics)
2478
best_leg = -1
2479
# points of the stratum are best accessed through the embedding of the smooth graph:
2480
# (we sort for better testing...)
2481
leg_list = sorted(list(self.smooth_LG.dmp_inv.keys()), key=lambda x:x[1])
2482
for l in leg_list:
2483
bot_list = self.bics_with_leg_on_bottom(l)
2484
# none is best we can do:
2485
if not bot_list:
2486
order = self._sig_list[l[0]].sig[l[1]]
2487
if not quiet:
2488
print("Choosing leg %r (of order %r) because it never appears on bottom level."
2489
% (l, order))
2490
return (l,order,[])
2491
on_bottom = len(bot_list)
2492
if on_bottom <= best_case:
2493
best_case = on_bottom
2494
best_leg = l
2495
best_bot_list = bot_list[:] # copy!
2496
assert best_leg != -1, "No best leg found for %r!" % self
2497
order = self._sig_list[best_leg[0]].sig[best_leg[1]]
2498
if not quiet:
2499
print("Choosing leg %r (of order %r), because it only appears on bottom %r out of %r times."\
2500
% (best_leg, order, best_case, len(self.bics)))
2501
return (best_leg, order, best_bot_list)
2502
2503
def bics_with_leg_on_bottom(self,l):
2504
"""
2505
A list of BICs where l is on bottom level.
2506
2507
Args:
2508
l (tuple): leg on self (i.e. (i,j) for the j-th element of the signature
2509
of the i-th component)
2510
2511
Returns:
2512
list: list of indices self.bics
2513
2514
EXAMPLES ::
2515
2516
sage: from admcycles.diffstrata import *
2517
sage: X=GeneralisedStratum([Signature((2,))])
2518
sage: X.bics_with_leg_on_bottom((0,0))
2519
[0, 1]
2520
"""
2521
bot_list = []
2522
# the corresponding point on each EmbeddedLevelGraph is leg
2523
for i, B in enumerate(self.bics):
2524
# reminder: l is leg on stratum, i.e. (i,j)
2525
# dmp_inv maps this to a leg on a graph (integer)
2526
leg = B.dmp_inv[l]
2527
leg_level = B.dlevels[B.LG.levelofleg(leg)]
2528
assert leg_level in [0,-1], "Leg %r of BIC %r is not on level 0 or -1!"\
2529
% (leg, B)
2530
if leg_level == -1:
2531
bot_list.append(i)
2532
return bot_list
2533
2534
@cached_method
2535
def xi_at_level(self,l,enh_profile,leg=None,quiet=True):
2536
"""
2537
Pullback of xi on level l to enh_profile.
2538
2539
This corresponds to xi_Gamma^[i] in the paper.
2540
2541
Args:
2542
l (int): level number (0,...,codim)
2543
enh_profile (tuple): enhanced profile
2544
leg (int, optional): leg (as a leg of enh_profile!!!), to be used
2545
in Sauvaget's relation. Defaults to None, i.e. optimal choice.
2546
2547
Raises:
2548
RuntimeError: raised if classes produced by xi on the level have
2549
unexpected codimension.
2550
ValueError: if the leg provided is not found on the level.
2551
2552
Returns:
2553
ELGTautClass: tautological class consisting of psi classes on
2554
enh_profile and graphs with oner more level.
2555
2556
EXAMPLES ::
2557
2558
Compare multiplication with xi to xi_at_level (for top-degree):
2559
2560
sage: from admcycles.diffstrata import *
2561
sage: X=Stratum((2,-2,0))
2562
sage: assert all(X.xi_at_level(0, ((i,),0)) == X.xi*X.taut_from_graph((i,)) for i in range(len(X.bics)))
2563
2564
"""
2565
if enh_profile == ((),0):
2566
assert l == 0
2567
if leg:
2568
level_leg = self.smooth_LG.dmp[leg]
2569
return self.xi_with_leg(level_leg)
2570
return self.xi
2571
G = self.lookup_graph(*enh_profile)
2572
GAG = self.additive_generator(enh_profile)
2573
# we need to use splitting info instead of direct level extraction,
2574
# because the embeddings might differ by an automorphism!
2575
d, leg_dict, L = self.splitting_info_at_level(enh_profile,l)
2576
inv_leg_dict = {v : k for k, v in leg_dict.items()}
2577
assert set(leg_dict.values()) == set(L.leg_dict.values())
2578
if leg is None:
2579
l_xi, level_leg = L.xi_with_leg(with_leg=True,quiet=quiet)
2580
else:
2581
if not (leg in leg_dict):
2582
raise ValueError('Leg %r is not on level %r of %r!' % (leg, l, enh_profile))
2583
level_leg = leg_dict[leg]
2584
l_xi = L.xi_with_leg(level_leg,quiet=quiet)
2585
taut_list = []
2586
if l_xi == 0:
2587
return self.ZERO
2588
for c, AG in l_xi.psi_list:
2589
if AG.codim == 0:
2590
# psi class on L:
2591
new_leg_dict = {}
2592
for AGleg in AG.leg_dict:
2593
leg_on_G = inv_leg_dict[L.smooth_LG.dmp[AGleg]]
2594
new_leg_dict[leg_on_G] = AG.leg_dict[AGleg]
2595
next_taut = (c, self.additive_generator(enh_profile,leg_dict=new_leg_dict))
2596
elif AG.codim == 1:
2597
coeff, glued_AG = self.glue_AG_at_level(AG,enh_profile,l)
2598
next_taut = (c*coeff,glued_AG)
2599
else:
2600
raise RuntimeError("Classes in xi should all be of codim 0 or 1! %s" % l_xi)
2601
taut_list.append(next_taut)
2602
return ELGTautClass(self,taut_list)
2603
2604
@cached_method
2605
def glue_AG_at_level(self,AG,enh_profile,l):
2606
"""
2607
Glue an AdditiveGenerator into level l of enh_profile.
2608
2609
Note that AG must be an AdditiveGenerator on the level obtained via
2610
self.splitting_info_at_level!
2611
2612
Currently this is only implemented for graphs (and only really tested
2613
for BICs!!!)
2614
2615
TODO: Test for AGs that are not BICs and psi classes.
2616
2617
Args:
2618
AG (AdditiveGenerator): AdditiveGenerator on level
2619
enh_profile (tuple): enhanced profile of self.
2620
l (int): level number of enh_profile.
2621
2622
Raises:
2623
RuntimeError: raised if the new profile is empty.
2624
2625
Returns:
2626
tuple: A tuple consisting of the stackfactor (QQ) and the
2627
AdditiveGenerator of the glued graph.
2628
"""
2629
# TODO: Check if longer profiles work + psis!
2630
#
2631
# First, we figure out the profile of the new graph of self.
2632
# For this, we must translate the profile (inside L) of the AG
2633
# into an extended profile (of self) as a degeneration of enh_profile:
2634
profile, _comp = enh_profile
2635
AGprofile, AGcomp = AG.enh_profile
2636
# We start by deciding where something must be inserted into enh_profile:
2637
#
2638
# We observe that level l is either:
2639
# * B^top of the first BIC in profile (level 0),
2640
# * B^bot of the last BIC in profile (lowest level), or
2641
# * the middle of the 3-level graph (profile[l-1],profile[l]).
2642
#
2643
# There is also the "degenerate case" of an empty profile that
2644
# we should exclude first:
2645
if len(profile) == 0:
2646
assert l == 0
2647
# level stratum == stratum
2648
# stack_factor = QQ(AG.stack_factor)
2649
return (1, self.additive_generator((AGprofile,AGcomp)))
2650
elif l == 0:
2651
new_bics = [self.DG.top_to_bic(profile[l])[bic_index] for bic_index in AGprofile]
2652
elif l == len(profile):
2653
new_bics = [self.DG.bot_to_bic(profile[l-1])[bic_index] for bic_index in AGprofile]
2654
else: # we are in the middle of the corresponding 3-level graph:
2655
three_level_profile, enhancement = self.three_level_profile_for_level(enh_profile,l)
2656
new_bics = [self.DG.middle_to_bic((three_level_profile,enhancement))[bic_index]
2657
for bic_index in AGprofile]
2658
p = list(profile)
2659
p = tuple(p[:l] + new_bics + p[l:])
2660
# Now we know the profile, we have to figure out, which component
2661
# we're on.
2662
# For this, we split the enh_profile apart, replace one part by the BIC and
2663
# and glue it back together again.
2664
comp_list = []
2665
assert len(self.lookup(p)) > 0, "Error: Glued into empty profile %r" % p
2666
# The splitting information and the level in 'standard form' (i.e. one
2667
# of the three above possibilities), is given by splitting_info_at_level:
2668
d, leg_dict, L = self.splitting_info_at_level(enh_profile,l)
2669
if not AG._X is L:
2670
print("Warning! Additive Generator should live on level %r of %r! I hope you know what you're doing...." % (l,enh_profile))
2671
# We first build the "big" graph, i.e. glue in the AG.
2672
# For this, we have to distinguish again, if we're gluing into the middle
2673
# (two cuts) or at one end of the profile (1 cut):
2674
if l == 0:
2675
assert d['top'].X is L
2676
# we glue into top:
2677
d['top'] = d['top'].X.lookup_graph(*AG.enh_profile)
2678
elif l == len(profile):
2679
assert d['bottom'].X is L
2680
# we glue into bottom:
2681
d['bottom'] = d['bottom'].X.lookup_graph(*AG.enh_profile)
2682
else:
2683
assert d['middle'] is L
2684
# we glue into middle:
2685
d['middle'] = d['middle'].lookup_graph(*AG.enh_profile)
2686
glued_graph = admcycles.diffstrata.stratatautring.clutch(**d)
2687
# Now we check the components of p for glued_graph:
2688
for i, H in enumerate(self.lookup(p)):
2689
if glued_graph.is_isomorphic(H):
2690
comp_list.append(i)
2691
if len(comp_list) != 1:
2692
raise RuntimeError("%r is not a unique degeneration of %r! %r" % (p,enh_profile,comp_list))
2693
i = comp_list[0]
2694
glued_AG = self.additive_generator((p,i))
2695
GAG = self.additive_generator(enh_profile)
2696
stack_factor = 1
2697
for i in range(len(AGprofile)):
2698
stack_factor *= QQ(self.bics[new_bics[i]].ell) / QQ(L.bics[AGprofile[i]].ell)
2699
stack_factor *= QQ(len(glued_graph.automorphisms)) / QQ(len(AG._G.automorphisms)*len(GAG._G.automorphisms))
2700
return (stack_factor, glued_AG)
2701
2702
def calL(self, enh_profile=None, l=0):
2703
"""
2704
The error term of the normal bundle on level l of enh_profile * -ll
2705
(pulled back to enh_profile)
2706
2707
Args:
2708
enh_profile (tuple, optional): enhanced profile. Defaults to None.
2709
l (int, optional): level. Defaults to 0.
2710
2711
Returns:
2712
ELGTautClass: Tautological class on self
2713
"""
2714
result = []
2715
if enh_profile is None or enh_profile == ((), 0):
2716
for i, B in enumerate(self.bics):
2717
ll = self.bics[i].ell
2718
result.append(ll*self.taut_from_graph((i,)))
2719
else:
2720
# Morally, L = G.level(squished_level)
2721
# but we have to use splitting_info_at_level to glue in safely!
2722
d, leg_dict, L = self.splitting_info_at_level(enh_profile, l)
2723
for i, B in enumerate(L.bics):
2724
BAG = L.additive_generator(((i,),0))
2725
sf, glued_AG = self.glue_AG_at_level(BAG, enh_profile, l)
2726
coeff = QQ(sf*B.ell)
2727
result.append(coeff * glued_AG.as_taut())
2728
if not result:
2729
return self.ZERO
2730
return self.ELGsum(result)
2731
2732
################################################################
2733
############ SEC 9 FORMULAS ####################################
2734
################################################################
2735
## The following formulas check various identities used in ##
2736
## and around sec 9 of the paper. They also serve as examples ##
2737
## for the methods introduced above. ##
2738
################################################################
2739
2740
@property
2741
def c1_E(self):
2742
"""
2743
The first chern class of Omega^1(log) (Thm 1.1).
2744
2745
Returns:
2746
ELGTautClass: c_1(E) according to Thm 1.1.
2747
2748
EXAMPLES ::
2749
2750
"""
2751
N = self.dim() + 1
2752
c1E = [N*self.xi]
2753
for i, B in enumerate(self.bics):
2754
Ntop = B.top.dim() + 1
2755
l = B.ell
2756
c1E.append(((N-Ntop)*l)*self.taut_from_graph((i,)))
2757
return self.ELGsum(c1E)
2758
2759
@property
2760
def c2_E(self):
2761
"""
2762
A direct formula for the second Chern class.
2763
2764
Returns:
2765
ELGTautClass: c_2 of the Tangent bundle of self.
2766
"""
2767
N = QQ(self.dim() + 1)
2768
c2E = [N*(N-1)/QQ(2) * (self.xi_pow(2))]
2769
for i, B in enumerate(self.bics):
2770
Ntop = B.top.dim() + 1
2771
Nbot = B.bot.dim() + 1
2772
xitop = self.xi_at_level(0, ((i,),0))
2773
xibot = self.xi_at_level(1, ((i,),0))
2774
l = QQ(B.ell)
2775
c2E.append(l/2 * ((N*(N-1) - Ntop*(Ntop-1))*xitop +
2776
((N-Ntop)**2 + Ntop - N)*xibot))
2777
for ep in self.enhanced_profiles_of_length(2):
2778
p, _ = ep
2779
delta0 = self.bics[p[0]]
2780
delta1 = self.bics[p[1]]
2781
Nd0 = delta0.top.dim() + 1
2782
Nd1 = delta1.top.dim() + 1
2783
ld0 = QQ(delta0.ell)
2784
ld1 = QQ(delta1.ell)
2785
factor = QQ(1)/QQ(2) * ld0 * ld1 * (N*(N-2*Nd0)-Nd1*(Nd1-2*Nd0)-N+Nd1)
2786
c2E.append(factor * self.taut_from_graph(*ep))
2787
return self.ELGsum(c2E)
2788
2789
@cached_method
2790
def ch1_pow(self, n):
2791
"""
2792
A direct formula for powers of ch_1
2793
2794
Args:
2795
n (int): exponent
2796
2797
Returns:
2798
ELGTautClass: ch_1(T)^n
2799
"""
2800
N = QQ(self.dim() + 1)
2801
chpow = [QQ(N**n)/QQ(factorial(n)) * self.xi_pow(n)]
2802
for L in range(1,n+1):
2803
summand = []
2804
for ep in self.enhanced_profiles_of_length(L):
2805
p, _ = ep
2806
delta = [self.bics[b] for b in p]
2807
ld = [B.ell for B in delta]
2808
Nd = [B.top.dim() + 1 for B in delta]
2809
exi = self.exp(N*self.xi_at_level(0,ep), amb=ep)
2810
factor = 1
2811
td_prod = self.taut_from_graph(*ep)
2812
for i in range(L):
2813
factor *= (N - Nd[i])*ld[i]
2814
td_prod = self.intersection(td_prod,
2815
self.td_contrib(-ld[i]*(N-Nd[i]),
2816
self.cnb(ep, ep, self.squish(ep, i)), ep),
2817
ep)
2818
prod = self.intersection(exi, td_prod, ep)
2819
summand.append(factor * prod.degree(n))
2820
chpow.append(self.ELGsum(summand))
2821
return factorial(n) * self.ELGsum(chpow)
2822
2823
@property
2824
def ch2_E(self):
2825
"""
2826
A direct formula for ch_2.
2827
2828
Returns:
2829
ELGTautClass: ch_2
2830
"""
2831
N = QQ(self.dim() + 1)
2832
ch2E = [N/QQ(2) * (self.xi_pow(2))]
2833
for i, B in enumerate(self.bics):
2834
Ntop = B.top.dim() + 1
2835
Nbot = B.bot.dim() + 1
2836
xitop = self.xi_at_level(0, ((i,),0))
2837
xibot = self.xi_at_level(1, ((i,),0))
2838
l = QQ(B.ell)
2839
ch2E.append(l/2 * ((N - Ntop)*(xitop + xibot)))
2840
for ep in self.enhanced_profiles_of_length(2):
2841
p, _ = ep
2842
delta0 = self.bics[p[0]]
2843
delta1 = self.bics[p[1]]
2844
Nd0 = delta0.top.dim() + 1
2845
Nd1 = delta1.top.dim() + 1
2846
ld0 = QQ(delta0.ell)
2847
ld1 = QQ(delta1.ell)
2848
factor = QQ(1)/QQ(2) * ld0 * ld1 * (N - Nd1)
2849
ch2E.append(factor * self.taut_from_graph(*ep))
2850
return self.ELGsum(ch2E)
2851
2852
def ch_E_alt(self, d):
2853
"""
2854
A formula for the Chern character.
2855
2856
Args:
2857
d (int): cut-off degree
2858
2859
Returns:
2860
ELGTautClass: sum of ch_0 to ch_d.
2861
"""
2862
N = QQ(self.dim() + 1)
2863
ch_E = [N/QQ(factorial(d)) * self.xi_pow(d)]
2864
for L in range(1, d+1):
2865
summand = []
2866
for ep in self.enhanced_profiles_of_length(L):
2867
p, _ = ep
2868
ld = [self.bics[b].ell for b in p]
2869
Nd = self.bics[p[-1]].top.dim() + 1
2870
ld_prod = 1
2871
for l in ld:
2872
ld_prod *= l
2873
factor = ld_prod * (N - Nd)
2874
td_prod = self.ONE
2875
for i in range(L):
2876
td_prod = self.intersection(td_prod, self.td_contrib(-ld[i], self.cnb(ep, ep, self.squish(ep, i)), ep), ep)
2877
inner_sum = []
2878
for j in range(d-L+1):
2879
pr = self.intersection(self.pow(self.xi_at_level(0, ep), j, ep), td_prod.degree(d-j), ep)
2880
inner_sum.append(QQ(1)/QQ(factorial(j)) * pr)
2881
summand.append(factor * self.ELGsum(inner_sum))
2882
ch_E.append(self.ELGsum(summand))
2883
return self.ELGsum(ch_E)
2884
2885
@cached_method
2886
def exp_xi(self, quiet=True):
2887
"""
2888
Calculate exp(xi) using that no powers higher than 2g appear for connected
2889
holomorphic strata.
2890
2891
Args:
2892
quiet (bool, optional): No output. Defaults to True.
2893
2894
Returns:
2895
ELGTautClass: exp(xi)
2896
"""
2897
if not self._polelist and len(self._g) == 1:
2898
stop = 2*self._g[0]
2899
else:
2900
stop = None
2901
if not quiet:
2902
if stop:
2903
stop_str = stop - 1
2904
else:
2905
stop_str = stop
2906
print('Stoping exp(xi) at degree %r' % stop_str)
2907
return self.exp(self.xi, quiet=quiet, stop=stop)
2908
2909
def xi_at_level_pow(self, level, enh_profile, exponent):
2910
"""
2911
Calculate powers of xi_at_level (using ambient enh_profile).
2912
2913
Note that when working with xi_at_level on enh_profile, multiplication
2914
should always take place in CH(enh_profile), i.e. using intersection
2915
instead of *. This is simplified for powers by this method.
2916
2917
Moreover, by Sauvaget, xi^n = 0 for n >= 2g for connected holomorphic
2918
strata, so we check this before calculating.
2919
2920
Args:
2921
level (int): level of enh_profile.
2922
enh_profile (tuple): enhanced profile of self.
2923
exponent (int): exponent
2924
2925
Returns:
2926
ELGTautClass: Pushforward of (xi_{enh_profile}^[l])^n to self.
2927
"""
2928
G = self.lookup_graph(*enh_profile)
2929
L = G.level(level)
2930
if not L._polelist and len(L._g) == 1:
2931
if exponent >= 2*L._g[0]:
2932
return self.ZERO
2933
if enh_profile == ((), 0):
2934
assert level == 0
2935
return self.xi_pow(exponent)
2936
# ambient!
2937
power = self.taut_from_graph(*enh_profile)
2938
# maybe consecutive squaring is better? Seems that it isn't :/
2939
# xi = self.xi_at_level(level, enh_profile)
2940
# def _rec(x, n):
2941
# if n == 0:
2942
# return self.taut_from_graph(*enh_profile)
2943
# if n == 1:
2944
# return x
2945
# if n % 2 == 0:
2946
# return _rec(self.intersection(x, x, enh_profile), n // 2)
2947
# return self.intersection(x, _rec(self.intersection(x, x, enh_profile), (n - 1) // 2), enh_profile)
2948
# return _rec(xi, exponent)
2949
xi = self.xi_at_level(level, enh_profile)
2950
for _ in range(exponent):
2951
power = self.intersection(power, xi, enh_profile)
2952
return power
2953
2954
@cached_method
2955
def exp_L(self, quiet=True):
2956
"""
2957
exp(calL)
2958
2959
Args:
2960
quiet (bool, optional): No output. Defaults to True.
2961
2962
Returns:
2963
ELGTautClass: exp(calL)
2964
"""
2965
return self.exp(self.calL(), quiet=quiet)
2966
2967
@property
2968
def P_B(self):
2969
"""
2970
The twisted Chern character of self, see sec 9 of the paper.
2971
2972
Returns:
2973
ELGTautClass: class of P_B
2974
"""
2975
# Prop. 9.2
2976
N = QQ(self.dim() + 1)
2977
PB = [N*self.exp_xi() + (-1)*self.ONE]
2978
for L in range(1,N):
2979
inner = []
2980
for enh_profile in self.enhanced_profiles_of_length(L):
2981
p, _ = enh_profile
2982
B = self.bics[p[0]]
2983
Ntop = B.top.dim() + 1
2984
summand = (-1)**L * (Ntop*self.exp_xi() + (-1)*self.ONE)
2985
prod_list = []
2986
for i in range(L):
2987
ll = self.bics[p[i]].ell
2988
squish = self.squish(enh_profile, i)
2989
td_NB = ll * self.td_contrib(ll, self.cnb(enh_profile, enh_profile, squish), enh_profile)
2990
prod_list.append(td_NB)
2991
if prod_list:
2992
prod = prod_list[0]
2993
for f in prod_list[1:]:
2994
# multiply with ambient Gamma (=enh_profile)!
2995
prod = self.intersection(prod, f, enh_profile)
2996
const = prod.degree(0)
2997
prod += (-1) * const
2998
summand *= (prod + const*self.taut_from_graph(*enh_profile))
2999
inner.append(summand)
3000
PB.append(self.ELGsum(inner))
3001
return self.ELGsum(PB)
3002
3003
def charToPol(self, ch, upto=None, quiet=True):
3004
"""
3005
Newton's identity to recursively translate the Chern character into the
3006
Chern polynomial.
3007
3008
Args:
3009
ch (ELGTautClass): Chern character
3010
upto (int, optional): Calculate polynomial only up to this degree. Defaults to None (full polynomial).
3011
quiet (bool, optional): No output. Defaults to True.
3012
3013
Returns:
3014
list: Chern polynomial as list of ELGTautClasses (indexed by degree)
3015
"""
3016
if not quiet:
3017
print('Starting charToPol...')
3018
C = ch.list_by_degree()
3019
# throw out factorials:
3020
p = [factorial(k)*c for k, c in enumerate(C)]
3021
# calculate recursively using Newton's identity:
3022
E = [self.ONE]
3023
if upto is None:
3024
upto = self.dim()
3025
for k in range(1, upto + 1):
3026
if not quiet:
3027
print('Calculating c_%r...' % k)
3028
ek = []
3029
for i in range(1, k+1):
3030
ek.append((-1)**(i-1) * E[k-i]*p[i])
3031
E.append(QQ(1)/QQ(k) * self.ELGsum(ek))
3032
return E
3033
3034
def top_chern_class_alt(self, quiet=True):
3035
"""
3036
Top chern class from Chern polynomial.
3037
3038
Args:
3039
quiet (bool, optional): No output. Defaults to True.
3040
3041
Returns:
3042
ELGTautClass: c_top of the tangent bundle of self.
3043
"""
3044
ch = self.ch_E_fast(quiet=quiet).list_by_degree()
3045
top_c = []
3046
N = self.dim()
3047
for p in partitions(N):
3048
l = sum(p.values())
3049
factor = (-1)**(N-l)
3050
# for r, n in enumerate(p.values()):
3051
# factor *= QQ(factorial(r)**n)/QQ(factorial(n))
3052
ch_prod = self.ONE
3053
for i, n in p.items():
3054
factor *= QQ(factorial(i-1)**n)/QQ(factorial(n))
3055
if i == 1:
3056
ch_prod *= self.ch1_pow(n)
3057
else:
3058
ch_prod *= ch[i]**n
3059
top_c.append(factor*ch_prod)
3060
return self.ELGsum(top_c)
3061
3062
def top_chern_class_direct(self, quiet=True):
3063
"""
3064
A direct formula for the top Chern class using only xi_at_level.
3065
3066
Args:
3067
quiet (bool, optional): No output. Defaults to True.
3068
3069
Returns:
3070
ELGTautClass: c_top of the Tangent bundle of self.
3071
"""
3072
N = self.dim()
3073
top_c = []
3074
for L in range(N+1):
3075
if not quiet:
3076
print('Going through %r profiles of length %r...' % (len(self.enhanced_profiles_of_length(L)), L))
3077
summand = []
3078
for ep in self.enhanced_profiles_of_length(L):
3079
p, _ = ep
3080
ld = [self.bics[b].ell for b in p]
3081
ld_prod = 1
3082
for l in ld:
3083
ld_prod *= l
3084
inner = []
3085
for K in WeightedIntegerVectors(N-L, [1]*(L+1)):
3086
xi_prod = self.taut_from_graph(*ep)
3087
for i, k in enumerate(K):
3088
xi_prod = self.intersection(xi_prod, self.xi_at_level_pow(i, ep, k), ep)
3089
inner.append((K[0] + 1) * xi_prod)
3090
summand.append(ld_prod * self.ELGsum(inner))
3091
top_c.append(self.ELGsum(summand))
3092
return self.ELGsum(top_c)
3093
3094
def top_xi_at_level_comparison(self, ep, quiet=False):
3095
"""
3096
Comparison of level-wise computation vs xi_at_level.
3097
3098
Args:
3099
ep (tuple): enhanced profile
3100
quiet (bool, optional): no output. Defaults to False.
3101
3102
Returns:
3103
bool: Should always be True.
3104
3105
EXAMPLES ::
3106
3107
sage: from admcycles.diffstrata import *
3108
sage: X=Stratum((2,))
3109
sage: assert all(X.top_xi_at_level_comparison(ep, quiet=True) for l in range(len(X.lookup_list)) for ep in X.enhanced_profiles_of_length(l))
3110
"""
3111
N = self.dim()
3112
p, _ = ep
3113
L = len(p)
3114
ld = [self.bics[b].ell for b in p]
3115
Nvec = [self.bics[b].top.dim() + 1 for b in p]
3116
Nvec.append(N+1)
3117
ld_prod = 1
3118
for l in ld:
3119
ld_prod *= l
3120
inner = []
3121
xi_prod = self.xi_at_level_pow(0, ep, Nvec[0]-1)
3122
for i in range(1,L+1):
3123
xi_prod = self.intersection(xi_prod, self.xi_at_level_pow(i, ep, Nvec[i]-Nvec[i-1]-1), ep)
3124
xi_at_level_prod = (Nvec[0] * xi_prod).evaluate(quiet=True)
3125
if not quiet:
3126
print("Product of xis at levels: %r" % xi_at_level_prod)
3127
G = self.lookup_graph(*ep)
3128
AG = self.additive_generator(ep)
3129
top_xi_at_level = [(G.level(i).xi_at_level_pow(0,((),0),G.level(i).dim())).evaluate(quiet=True) for i in range(L+1)]
3130
if not quiet:
3131
print(top_xi_at_level)
3132
prod = Nvec[0]
3133
for x in top_xi_at_level:
3134
prod *= x
3135
tot_prod = AG.stack_factor*prod
3136
if not quiet:
3137
print("Stack factor: %r" % AG.stack_factor)
3138
print("Product: %r" % prod)
3139
print("Total product: %r" % tot_prod)
3140
return tot_prod == xi_at_level_prod
3141
3142
def top_xi_at_level(self, ep, level, quiet=True):
3143
"""
3144
Evaluate the top xi power on a level.
3145
3146
Note that this is _not_ computed on self but on the GeneralisedStratum
3147
corresponding to level l of ep (the result is a number!).
3148
3149
Moreover, all results are cached and the cache is synchronised with
3150
the ``XI_TOPS`` cache.
3151
3152
The key for the cache is L.dict_key (where L is the LevelStratum).
3153
3154
Args:
3155
ep (tuple): enhanced profile
3156
level (int): level number of ep
3157
quiet (bool, optional): No output. Defaults to True.
3158
3159
Returns:
3160
QQ: integral of the top xi power against level l of ep.
3161
3162
EXAMPLES ::
3163
3164
sage: from admcycles.diffstrata import *
3165
sage: X=Stratum((2,))
3166
sage: X.top_xi_at_level(((),0), 0)
3167
-1/640
3168
"""
3169
G = self.lookup_graph(*ep)
3170
L = G.level(level)
3171
key = L.dict_key()
3172
cache = TOP_XIS
3173
if key not in cache:
3174
N = L.dim()
3175
if not quiet:
3176
print('(calc)', end=' ')
3177
sys.stdout.flush()
3178
top_xi = L.xi_at_level_pow(0, ((),0), N)
3179
cache[key] = top_xi.evaluate(quiet=True)
3180
else:
3181
if not quiet:
3182
print ('(cache)', end=' ')
3183
sys.stdout.flush()
3184
if not quiet:
3185
print(cache[key], end=' ')
3186
sys.stdout.flush()
3187
assert QQ(cache[key]) == cache[key]
3188
return cache[key]
3189
3190
def euler_char_immediate_evaluation(self, quiet=True):
3191
"""
3192
Calculate the (Orbifold) Euler characteristic of self by evaluating top xi
3193
powers on levels.
3194
3195
This is (by far) the fastest way of computing Euler characteristics.
3196
3197
Note that only combinatorial information about the degeneration graph
3198
of self is used (enhanced_profiles_of_length(L)) and top_xi_at_level
3199
the values of which are cached and synched with ``TOP_XIS`` cache.
3200
3201
Args:
3202
quiet (bool, optional): No output. Defaults to True.
3203
3204
Returns:
3205
QQ: (Orbifold) Euler characteristic of self.
3206
3207
EXAMPLES ::
3208
3209
sage: from admcycles.diffstrata import *
3210
sage: X=Stratum((2,))
3211
sage: X.euler_char_immediate_evaluation()
3212
-1/40
3213
"""
3214
N = self.dim()
3215
ec = 0
3216
for L in range(N+1):
3217
if not quiet:
3218
total=len(self.enhanced_profiles_of_length(L, quiet=False))
3219
print('Going through %r profiles of length %r...' % (total, L))
3220
for i, ep in enumerate(self.enhanced_profiles_of_length(L)):
3221
if not quiet:
3222
print('%r / %r, %r:' % (i+1, total, ep), end=' ')
3223
sys.stdout.flush()
3224
p, _ = ep
3225
ld = [self.bics[b].ell for b in p]
3226
if p:
3227
NGammaTop = self.bics[p[0]].top.dim() + 1
3228
else:
3229
NGammaTop = N + 1
3230
ld_prod = 1
3231
for l in ld:
3232
ld_prod *= l
3233
AG = self.additive_generator(ep)
3234
prod = ld_prod * NGammaTop * AG.stack_factor
3235
if not quiet:
3236
print("Calculating xi at", end=' ')
3237
sys.stdout.flush()
3238
for i in range(L+1):
3239
if not quiet:
3240
print('level %r' % i, end=' ')
3241
sys.stdout.flush()
3242
prod *= self.top_xi_at_level(ep, i, quiet=quiet)
3243
if prod == 0:
3244
if not quiet:
3245
print("Product 0.", end=' ')
3246
sys.stdout.flush()
3247
break
3248
if not quiet:
3249
print('Done.')
3250
sys.stdout.flush()
3251
ec += prod
3252
return (-1)**N * ec
3253
3254
def euler_characteristic(self):
3255
"""
3256
Calculate the (Orbifold) Euler characteristic of self by evaluating top xi
3257
powers on levels. See also euler_char_immediate_evaluation.
3258
3259
Returns:
3260
QQ: (Orbifold) Euler characteristic of self.
3261
3262
EXAMPLES ::
3263
3264
sage: from admcycles.diffstrata import *
3265
sage: X=Stratum((2,))
3266
sage: X.euler_characteristic()
3267
-1/40
3268
"""
3269
return self.euler_char_immediate_evaluation()
3270
3271
def euler_char(self,quiet=True, alg='direct'):
3272
"""
3273
Calculate the (Orbifold) Euler characteristic of self by computing the top
3274
Chern class and evaluating this.
3275
3276
Note that this is significantly slower than using self.euler_characteristic!
3277
3278
The optional keyword argument alg determines how the top Chern class
3279
is computed and can be either:
3280
* direct (default): using top_chern_class_direct
3281
* alt: using top_chern_class_alt
3282
* other: using top_chern_class
3283
3284
Args:
3285
quiet (bool, optional): no ouput. Defaults to True.
3286
alg (str, optional): algorithm (see above). Defaults to 'direct'.
3287
3288
Returns:
3289
QQ: (Orbifold) Euler characteristic of self.
3290
3291
EXAMPLES ::
3292
3293
sage: from admcycles.diffstrata import *
3294
sage: X=Stratum((2,))
3295
sage: X.euler_char()
3296
-1/40
3297
sage: X.euler_char(alg='alt')
3298
-1/40
3299
sage: X.euler_char(alg='other')
3300
-1/40
3301
"""
3302
if alg == 'direct':
3303
tcc = self.top_chern_class_direct(quiet=quiet)
3304
elif alg == 'alt':
3305
tcc = self.top_chern_class_alt(quiet=quiet)
3306
else:
3307
tcc = self.top_chern_class(quiet=quiet, alg=alg)
3308
if not quiet:
3309
print('Evaluating...')
3310
return (-1)**self.dim() * tcc.evaluate(quiet=True)
3311
3312
def top_chern_class(self, inside=True, prod=True, top=False, quiet=True, alg='fast'):
3313
"""
3314
Compute the top Chern class from the Chern polynomial via the Chern character.
3315
3316
This uses chern_poly.
3317
3318
Args:
3319
inside (bool, optional): passed to chern_poly. Defaults to True.
3320
prod (bool, optional): passed to chern_poly. Defaults to True.
3321
top (bool, optional): passed to chern_poly. Defaults to False.
3322
quiet (bool, optional): passed to chern_poly. Defaults to True.
3323
alg (str, optional): passed to chern_poly. Defaults to 'fast'.
3324
3325
Returns:
3326
ELGTautClass: c_top(T) of self.
3327
"""
3328
return self.chern_poly(inside=inside, prod=prod, top=top, quiet=quiet, alg=alg)[-1]
3329
3330
def chern_poly(self, inside=True, prod=True, top=False, quiet=True, alg='fast', upto=None):
3331
"""
3332
The Chern polynomial calculated from the Chern character.
3333
3334
The optional keyword argument alg determines how the Chern character
3335
is computed and can be either:
3336
* fast (default): use ch_E_fast
3337
* bic_prod: use ch_E_prod
3338
* other: use ch_E
3339
3340
Args:
3341
inside (bool, optional): passed to ch_E. Defaults to True.
3342
prod (bool, optional): passed to ch_E. Defaults to True.
3343
top (bool, optional): passed to ch_E. Defaults to False.
3344
quiet (bool, optional): no output. Defaults to True.
3345
alg (str, optional): algorithm used (see above). Defaults to 'fast'.
3346
upto (int, optional): highest degree of polynomial to calculate. Defaults to None (i.e. dim so the whole polynomial).
3347
3348
Returns:
3349
list: Chern polynomial as list of ELGTautClasses (indexed by degree)
3350
"""
3351
if alg == 'bic_prod':
3352
ch = self.ch_E_prod(quiet=quiet)
3353
elif alg == 'fast':
3354
ch = self.ch_E_fast(quiet=quiet)
3355
else:
3356
ch = self.ch_E(inside=inside, prod=prod, top=top, quiet=quiet)
3357
return self.charToPol(ch, quiet=quiet, upto=upto)
3358
3359
def chern_class(self, n, quiet=True):
3360
"""
3361
A direct formula for the n-th Chern class of the tangent bundle of self.
3362
3363
Args:
3364
n (int): degree
3365
quiet (bool, optional): No output. Defaults to True.
3366
3367
Returns:
3368
ELGTautClass: c_n(T) of self.
3369
"""
3370
N = self.dim() + 1
3371
c_n = []
3372
for L in range(N):
3373
if not quiet:
3374
print('Going through %r profiles of length %r...' % (len(self.enhanced_profiles_of_length(L)), L))
3375
summand = []
3376
for ep in self.enhanced_profiles_of_length(L):
3377
if not quiet:
3378
print("Profile: %r" % (ep,), end=' ')
3379
p, _ = ep
3380
delta = [self.bics[b] for b in p]
3381
ld = [B.ell for B in delta]
3382
Nd = [B.top.dim() + 1 for B in delta]
3383
ld_prod = 1
3384
for l in ld:
3385
ld_prod *= l
3386
inner = []
3387
for K in WeightedIntegerVectors(n-L, [1]*(L+1)):
3388
if not quiet:
3389
print('xi coefficient: k_0:', K[0], end=' ')
3390
print('N-L-sum:', N-L-sum(K[1:]), end=' ')
3391
print('Binomial:', binomial(N-L-sum(K[1:]), K[0]))
3392
factor = binomial(N-L-sum(K[1:]), K[0])
3393
prod = self.xi_at_level_pow(0, ep, K[0])
3394
for i, k in list(enumerate(K))[1:]:
3395
if not quiet:
3396
print('k_%r: %r' % (i, k), end=' ')
3397
print('r_Gamma,i:', (N-Nd[i-1]), end=' ')
3398
print('L-i: %r, sum: %r' % (L-i, sum(K[i+1:])), end=' ')
3399
print('Binomial:', binomial((N-Nd[i-1]) - (L-i) - sum(K[i+1:]), k+1))
3400
factor *= binomial((N-Nd[i-1]) - (L-i) - sum(K[i+1:]), k+1)
3401
squish = self.squish(ep, i-1)
3402
X_pow = self.pow(ld[i-1] * self.cnb(ep, ep, squish), k, ep)
3403
prod = self.intersection(prod, X_pow, ep)
3404
inner.append(factor * prod)
3405
summand.append(ld_prod * self.ELGsum(inner))
3406
c_n.append(self.ELGsum(summand))
3407
return self.ELGsum(c_n)
3408
3409
def ch_E_prod(self,quiet=True):
3410
"""
3411
The product version of the Chern character formula.
3412
3413
Args:
3414
quiet (bool, optional): No output. Defaults to True.
3415
3416
Returns:
3417
ELGTautClass: Chern character of the tangent bundle.
3418
"""
3419
N = QQ(self.dim() + 1)
3420
ch_E = [N*self.ONE]
3421
for L, profiles in enumerate(self.lookup_list):
3422
if not quiet:
3423
print('Going through %r profiles of length %r...' % (len(profiles), L))
3424
summand = []
3425
for p in profiles:
3426
if not p:
3427
continue
3428
Nd = self.bics[p[-1]].top.dim() + 1
3429
if N == Nd: # factor == 0
3430
continue
3431
factor = (N - Nd)
3432
bic_prod = self.ONE
3433
for Di in p:
3434
bic_prod *= self.exp_bic(Di)
3435
summand.append(factor*bic_prod)
3436
ch_E.append(self.ELGsum(summand))
3437
return self.exp_xi(quiet=quiet) * self.ELGsum(ch_E)
3438
3439
def ch_E_fast(self,quiet=True):
3440
"""
3441
A more direct (and faster) formula for the Chern character (see sec 9 of the paper).
3442
3443
Args:
3444
quiet (bool, optional): No output. Defaults to True.
3445
3446
Returns:
3447
ELGTautClass: Chern character of the tangent bundle.
3448
"""
3449
N = QQ(self.dim() + 1)
3450
ch_E = [N*self.exp_xi(quiet=quiet)]
3451
for L in range(1, N):
3452
if not quiet:
3453
print('Going through %r profiles of length %r...' % (len(self.enhanced_profiles_of_length(L)), L))
3454
summand = []
3455
for ep in self.enhanced_profiles_of_length(L):
3456
p, _ = ep
3457
ld = [self.bics[b].ell for b in p]
3458
Nd = self.bics[p[-1]].top.dim() + 1
3459
if N == Nd: # factor == 0
3460
continue
3461
ld_prod = 1
3462
for l in ld:
3463
ld_prod *= l
3464
factor = ld_prod * (N - Nd)
3465
td_prod = self.ONE
3466
for i in range(L):
3467
td_prod = self.intersection(td_prod, self.td_contrib(-ld[i], self.cnb(ep, ep, self.squish(ep, i)), ep), ep)
3468
exi = self.exp(self.xi_at_level(0, ep), ep)
3469
pr = self.intersection(exi, td_prod, ep)
3470
summand.append(factor*pr)
3471
ch_E.append(self.ELGsum(summand))
3472
return self.ELGsum(ch_E)
3473
3474
def top_chern_alt(self):
3475
"""
3476
The top Chern class of self, computed by calulating the Chern polynomial
3477
from the Chern character as P_B*exp(L) and taking the top-degree part.
3478
3479
Returns:
3480
ELGTautClass: top Chern class of the tangent bundle.
3481
"""
3482
return self.charToPol(self.P_B*self.exp_L())[-1]
3483
3484
def first_term(self, top=False, quiet=True):
3485
"""
3486
The calculation of (N*self.exp_xi() - self.ONE)*self.exp_L() split into
3487
pieces with more debugging outputs (calculation can take a LONG time!)
3488
3489
Args:
3490
top (bool, optional): Do calculations on level. Defaults to False.
3491
quiet (bool, optional): No output. Defaults to True.
3492
3493
Returns:
3494
ELGTautClass: First term of ch.
3495
"""
3496
if not quiet:
3497
print('Calculating first term...')
3498
N = QQ(self.dim() + 1)
3499
BICs = []
3500
for i, B in enumerate(self.bics):
3501
BICs.append((B.ell, self.additive_generator(((i,),0))))
3502
L = ELGTautClass(self, BICs, reduce=False)
3503
if top:
3504
if not quiet:
3505
print('Calculating exp_xi_L...')
3506
exp_xi_L = self.ELGsum([N*B.ell*self.exp(self.xi_at_level(0,((i,),0)), ((i,),0),quiet=quiet) for i, B in enumerate(self.bics)] + [(-1)*L])
3507
last = exp_xi_L
3508
if not quiet:
3509
print('Calculating recursive exponential factors: ', end=' ')
3510
for k in range(1, N-1):
3511
if not quiet:
3512
print(k, end=' ')
3513
last = QQ(1)/QQ(k+1) * L * last
3514
if last == self.ZERO:
3515
break
3516
exp_xi_L._psi_list.extend(last.psi_list)
3517
if not quiet:
3518
print('Done!')
3519
print('Adding exp_xi...')
3520
res = self.ELGsum([N*self.exp_xi(quiet=quiet), -self.ONE, exp_xi_L])
3521
else:
3522
if not quiet:
3523
print('Calculating exp(xi+L)...')
3524
res = N * self.exp(self.xi + L, quiet=quiet)
3525
if not quiet:
3526
print('Subtracting exp_L...')
3527
res -= self.exp_L(quiet=quiet)
3528
if not quiet:
3529
print('Done calculating first term!')
3530
return res
3531
3532
def ch_E(self, inside=True, prod=True, top=False, quiet=True):
3533
"""
3534
The Chern character (accoring to sec. 9 of the paper)
3535
3536
Args:
3537
inside (bool, optional): work with ambient. Defaults to True.
3538
prod (bool, optional): product instead of sum. Defaults to True.
3539
top (bool, optional): work on level. Defaults to False.
3540
quiet (bool, optional): no output. Defaults to True.
3541
3542
Returns:
3543
ELGTautClass: Chern character of the tangent bundle of self.
3544
"""
3545
# Prop. 9.2
3546
N = QQ(self.dim() + 1)
3547
# ch = [(N*self.exp_xi() + (-1)*self.ONE)*self.exp_L()]
3548
ch = [self.first_term(top=top,quiet=quiet)]
3549
for L in range(1,N):
3550
inner = []
3551
if not quiet:
3552
print('Going through profiles of length %r...' % L)
3553
for enh_profile in self.enhanced_profiles_of_length(L):
3554
p, _ = enh_profile
3555
B = self.bics[p[0]]
3556
Ntop = B.top.dim() + 1
3557
if not inside:
3558
summand = (-1)**L * (Ntop*self.exp_xi() - self.ONE)
3559
else:
3560
if not quiet:
3561
print('Calculating inner exp(xi): ', end=' ')
3562
summand = (-1)**L * (Ntop*self.exp(self.xi_at_level(0, enh_profile), enh_profile, quiet=quiet) - self.taut_from_graph(*enh_profile))
3563
prod_list = []
3564
for i in range(L):
3565
ll = self.bics[p[i]].ell
3566
squish = self.squish(enh_profile, i)
3567
td_NB = ll * self.td_contrib(-ll, self.cnb(enh_profile, enh_profile, squish), enh_profile)
3568
prod_list.append(td_NB)
3569
if prod_list:
3570
prod = prod_list[0]
3571
for f in prod_list[1:]:
3572
# multiply with ambient Gamma (=enh_profile)!
3573
prod = self.intersection(prod, f, enh_profile)
3574
if prod:
3575
for l in range(len(p) + 1):
3576
prod = self.intersection(prod,\
3577
self.exp(self.calL(enh_profile, l), enh_profile),\
3578
enh_profile)
3579
else:
3580
prod = self.intersection(prod,\
3581
self.exp(
3582
self.ELGsum(self.calL(enh_profile, l) for l in range(len(p)+1)),\
3583
enh_profile),
3584
enh_profile)
3585
if inside:
3586
prod = self.intersection(prod, summand, enh_profile)
3587
# multiply constant term with Gamma (for i_*)
3588
const = prod.degree(0)
3589
prod += (-1) * const
3590
if inside:
3591
summand = prod
3592
else:
3593
summand *= (prod + const*self.taut_from_graph(*enh_profile))
3594
inner.append(summand)
3595
ch.append(self.ELGsum(inner))
3596
return self.ELGsum(ch)
3597
3598
################################################################
3599
############ END OF SEC 9 FORMULAS #############################
3600
################################################################
3601
3602
def res_stratum_class(self,cond,debug=False):
3603
"""
3604
The class of the stratum cut out by cond inside self.
3605
3606
Args:
3607
cond (list): list of a residue condition, i.e. a list of poles of self.
3608
3609
Returns:
3610
ELGTautClass: Tautological class of Prop. 9.3
3611
3612
EXAMPLES ::
3613
3614
3615
"""
3616
st_class = -1 * self.xi_with_leg(quiet=True)
3617
bic_list = []
3618
if debug:
3619
print("Calculating the class of the stratum cut out by %r in %r..." % (cond,self))
3620
print("-xi = %s" % st_class)
3621
for i, B in enumerate(self.bics):
3622
if debug:
3623
print("Checking BIC %r:" % i)
3624
top = B.top
3625
# we restrict/translate cond to top:
3626
poles_on_bic = [B.dmp_inv[p] for p in cond]
3627
cond_on_top = [top.leg_dict[leg] for leg in poles_on_bic if leg in top.leg_dict]
3628
# if there are RCs on top, we must check that they don't change the rank
3629
if cond_on_top:
3630
MT = top.matrix_from_res_conditions([cond_on_top])
3631
top_G = top.smooth_LG
3632
RT = top_G.full_residue_matrix
3633
if (MT.stack(RT)).rank() != RT.rank():
3634
assert (MT.stack(RT)).rank() > RT.rank()
3635
if debug:
3636
print("Discarding (because of top).")
3637
continue
3638
l = B.ell
3639
if debug:
3640
print("Appending with coefficient -%r" % l)
3641
bic_list.append((l,i))
3642
st_class += self.ELGsum([-l*self.taut_from_graph((i,),0) for l, i in bic_list])
3643
return st_class
3644
3645
def adm_evaluate(self,stgraph,psis,sig,g,quiet=False,admcycles_output=False):
3646
"""
3647
Evaluate the psi monomial on a (connected) stratum without residue conditions
3648
using admcycles.
3649
3650
stgraph should be the one-vertex graph associated to the stratum sig.
3651
3652
We use admcycles Strataclass to calculate the class of the stratum inside
3653
Mbar_{g,n} and multiply this with psis (in admcycles) and evaluate the product.
3654
3655
The result is cached and synched with the ``ADM_EVALS`` cache.
3656
3657
Args:
3658
stgraph (stgraph): admcycles stgraph
3659
psis (dict): psi polynomial on stgraph
3660
sig (tuple): signature tuple
3661
g (int): genus of sig
3662
quiet (bool, optional): No output. Defaults to False.
3663
admcycles_output (bool, optional): Print the admcycles classes. Defaults to False.
3664
3665
Returns:
3666
QQ: integral of psis on stgraph.
3667
"""
3668
# key = (tuple(sorted(psis.items())), tuple(sig))
3669
key = adm_key(sig, psis)
3670
cache = ADM_EVALS
3671
if key not in cache:
3672
DS = admcycles.admcycles.decstratum(stgraph,psi=psis)
3673
Stratum_class = admcycles.stratarecursion.Strataclass(g,1,sig)
3674
if not quiet or admcycles_output:
3675
print("DS: %r\n Stratum_class: %r" % (DS,Stratum_class))
3676
product = Stratum_class*DS # in admcycles!
3677
if not quiet or admcycles_output:
3678
print("Product: %r" % product.evaluate())
3679
cache[key] = product.evaluate() # in admcycles!
3680
return cache[key]
3681
3682
def remove_res_cond(self, psis=None):
3683
"""
3684
Remove residue conditions until the rank drops (or there are none left).
3685
3686
We return the stratum with fewer residue conditions and, in
3687
case the rank dropped, with the product of the stratum class.
3688
3689
Note that this does *not* ensure that all residue conditions are removed!
3690
3691
Args:
3692
psis (dict, optional): Psi dictionary on self. Defaults to None.
3693
3694
Returns:
3695
ELGTautClass: ELGTautClass on Stratum with less residue conditions
3696
(or self if there were none!)
3697
3698
EXAMPLES ::
3699
3700
sage: from admcycles.diffstrata import *
3701
sage: X=GeneralisedStratum([Signature((1,1,-2,-2))], res_cond=[[(0,2)], [(0,3)]])
3702
sage: print(X.remove_res_cond())
3703
Tautological class on Stratum: Signature((1, 1, -2, -2))
3704
with residue conditions:
3705
dimension: 1
3706
leg dictionary: {}
3707
<BLANKLINE>
3708
1 * Psi class 3 with exponent 1 on level 0 * Graph ((), 0) +
3709
<BLANKLINE>
3710
sage: X.evaluate(quiet=True) == X.remove_res_cond().evaluate()
3711
True
3712
"""
3713
if psis is None:
3714
psis = {}
3715
3716
if not self.res_cond:
3717
return self.additive_generator(((),0), psis).as_taut()
3718
3719
try:
3720
new_leg_dict = deepcopy(self._leg_dict)
3721
except AttributeError:
3722
new_leg_dict = {}
3723
3724
# Create new stratum with one residue condition less:
3725
new_rc = deepcopy(self._res_cond)
3726
# conditions from RT:
3727
RT_M = self.smooth_LG.residue_matrix_from_RT
3728
# we remove conditions until the rank drops:
3729
while new_rc:
3730
lost_cond = new_rc.pop()
3731
new_M = self.matrix_from_res_conditions(new_rc)
3732
if new_M:
3733
full_M = new_M.stack(RT_M)
3734
else:
3735
full_M = RT_M
3736
if full_M.rank() == self.smooth_LG.full_residue_matrix.rank() - 1:
3737
# rank dropped
3738
break
3739
new_stratum = LevelStratum(self._sig_list,new_rc,new_leg_dict)
3740
# Because only the RCs changed, X.smooth_LG still lives inside this stratum
3741
# so we can use it to build our new AdditiveGenerator:
3742
new_AG = new_stratum.additive_generator(((),0), psis)
3743
if new_stratum.dim() == self.dim() + 1:
3744
new_class = new_AG.as_taut()*new_stratum.res_stratum_class(lost_cond)
3745
else:
3746
# rank did not drop so all residue conditions are gone:
3747
assert not new_rc
3748
new_class = new_AG.as_taut()
3749
3750
return new_class
3751
3752
3753
def zeroStratumClass(self):
3754
"""
3755
Check if self splits, i.e. if a subset of vertices can be scaled
3756
independently (then the stratum class is ZERO).
3757
3758
We do this by checking if BICs B, B' exist with:
3759
* no edges
3760
* the top vertices of B are the bottom vertices of B'
3761
* the bottom vertices of B' are the top vertices of B.
3762
3763
Explicitly, we loop through all BICs with no edges, constructing for
3764
each one the BIC with the levels interchanged (as an EmbeddedLevelGraph)
3765
and check its legality.
3766
3767
Returns:
3768
boolean: True if splitting exists, False otherwise.
3769
3770
EXAMPLES ::
3771
3772
sage: from admcycles.diffstrata import *
3773
sage: GeneralisedStratum([Signature((0,)),Signature((0,))]).zeroStratumClass()
3774
True
3775
sage: GeneralisedStratum([Signature((2,))]).zeroStratumClass()
3776
False
3777
sage: GeneralisedStratum([Signature((4,-2,-2,-2)),Signature((4,-2,-2,-2))], res_cond=[[(0,2),(1,2)]]).zeroStratumClass()
3778
True
3779
sage: GeneralisedStratum([Signature((2, -2, -2)), Signature((1, 1, -2, -2))],[[(0, 2), (1, 2)], [(0, 1), (1, 3)]]).zeroStratumClass()
3780
False
3781
"""
3782
bics_no_edges = [b for b in self.bics if not b.LG.edges]
3783
if not bics_no_edges:
3784
return False
3785
for b in bics_no_edges:
3786
internal_top = b.LG.internal_level_number(0)
3787
internal_bot = b.LG.internal_level_number(1)
3788
top_vertices = b.LG.verticesonlevel(internal_top)
3789
bot_vertices = b.LG.verticesonlevel(internal_bot)
3790
assert len(top_vertices) + len(bot_vertices) == len(b.LG.genera)
3791
# build graph levels exchanged:
3792
new_levels = [internal_bot if v in top_vertices else internal_top
3793
for v in range(len(b.LG.genera))]
3794
new_vertices = deepcopy(b.LG.genera)
3795
new_legs = deepcopy(b.LG.legs)
3796
new_edges = []
3797
new_poleorders = deepcopy(b.LG.poleorders)
3798
new_LG = admcycles.diffstrata.levelgraph.LevelGraph(new_vertices,new_legs,new_edges,new_poleorders,new_levels)
3799
new_ELG = EmbeddedLevelGraph(self, new_LG, deepcopy(b.dmp), deepcopy(b.dlevels))
3800
# check if new graph is legal:
3801
if new_ELG.is_legal():
3802
return True
3803
# no splitting found
3804
return False
3805
3806
def evaluate(self,psis={},quiet=False,warnings_only=False,admcycles_output=False):
3807
"""
3808
Evaluate the psi monomial psis on self.
3809
3810
Psis is a dictionary legs of self.smooth_LG -> exponents encoding a psi monomial.
3811
3812
We translate residue conditions of self into intersections of simpler classes
3813
and feed the final pieces into admcycles for actual evaluation.
3814
3815
Args:
3816
psis (dict, optional): Psi monomial (as legs of smooth_LG -> exponent). Defaults to {}.
3817
quiet (bool, optional): No output. Defaults to False.
3818
warnings_only (bool, optional): Only warnings. Defaults to False.
3819
admcycles_output (bool, optional): adm_eval output. Defaults to False.
3820
3821
Raises:
3822
RuntimeError: raised if a required residue condition is not found.
3823
3824
Returns:
3825
QQ: integral of psis against self.
3826
"""
3827
G = self.smooth_LG
3828
LG = G.LG
3829
# Check if the rGRC doesn't cut down the dimension:
3830
# Recall:
3831
# * residue_matrix_from_RT has the RT on each component of G as rows
3832
# * full_residue_matrix is this + the res_cond of self
3833
if G.full_residue_matrix.rank() == G.residue_matrix_from_RT.rank():
3834
if self._h0 > 1:
3835
if not quiet or warnings_only:
3836
print("----------------------------------------------------")
3837
print("Level %r disconnected." % self)
3838
print("----------------------------------------------------")
3839
print("No residue conditions: contribution is 0.")
3840
return 0
3841
# stratum is connected!
3842
# 0 dimensional strata contribute 1
3843
if self.dim() == 0:
3844
return 1
3845
# We can just use admcycles to evaluate:
3846
return self.adm_evaluate(LG.stgraph,psis,self._sig_list[0].sig,LG.g(),quiet=quiet,admcycles_output=admcycles_output)
3847
# There *are* non-trivial residue conditions!
3848
if self._h0 > 1:
3849
if not quiet or warnings_only:
3850
print("----------------------------------------------------")
3851
print("Level %r disconnected." % self)
3852
print("----------------------------------------------------")
3853
# Check if graph of residue conditions is disconnected:
3854
if not LG.underlying_graph.is_connected():
3855
if not quiet or warnings_only:
3856
print("Level is product: contribution is 0.")
3857
return 0
3858
# Create new stratum with one residue condition less:
3859
new_rc = deepcopy(self._res_cond)
3860
# conditions from RT:
3861
RT_M = G.residue_matrix_from_RT
3862
# we remove conditions until the rank drops:
3863
while new_rc:
3864
lost_cond = new_rc.pop()
3865
new_M = self.matrix_from_res_conditions(new_rc)
3866
if new_M:
3867
full_M = new_M.stack(RT_M)
3868
else:
3869
full_M = RT_M
3870
if full_M.rank() == G.full_residue_matrix.rank() - 1:
3871
# rank dropped
3872
break
3873
else:
3874
raise RuntimeError("No Conditions cause dimension to drop in %r!" % self._res_cond)
3875
try:
3876
new_leg_dict = deepcopy(self._leg_dict)
3877
except AttributeError:
3878
new_leg_dict = {}
3879
new_stratum = LevelStratum(self._sig_list,new_rc,new_leg_dict)
3880
assert new_stratum.dim() == self.dim() + 1
3881
# Because only the RCs changed, G still lives inside this stratum
3882
# so we can use it to build our new AdditiveGenerator:
3883
new_AG = new_stratum.additive_generator(((),0),psis)
3884
new_class = new_AG.as_taut()*new_stratum.res_stratum_class(lost_cond)
3885
result = new_class.evaluate(quiet=quiet)
3886
return result
3887
3888
#################################################################
3889
#################################################################
3890
#################################################################
3891
#################################################################
3892
3893
class Stratum(GeneralisedStratum):
3894
"""
3895
A simpler frontend for a GeneralisedStratum with one component and
3896
no residue conditions.
3897
"""
3898
def __init__(self,sig):
3899
super(Stratum, self).__init__([admcycles.diffstrata.sig.Signature(sig)])
3900
3901
#################################################################
3902
#################################################################
3903
#################################################################
3904
#################################################################
3905
3906
class LevelStratum(GeneralisedStratum):
3907
"""
3908
A stratum that appears as a level of a levelgraph.
3909
3910
This is a GeneralisedStratum together with a dictionary mapping the
3911
leg numbers of the (big) graph to the legs of the Generalisedstratum.
3912
3913
Note that if this is initialised from an EmbeddedLevelGraph, we also
3914
have the attribute leg_orbits, a nested list giving the orbits of
3915
the points under the automorphism group of the graph.
3916
3917
* leg_dict : a (bijective!) dictionary mapping the leg numbers of a graph
3918
to the corresponing tuple (i,j), i.e. the point j on the component i.
3919
3920
* res_cond : a (nested) list of residue conditions given by the r-GRC when
3921
extracting a level.
3922
3923
"""
3924
def __init__(self,sig_list,res_cond=None,leg_dict=None):
3925
super(LevelStratum,self).__init__(sig_list,res_cond)
3926
if leg_dict is None:
3927
# assume the points were numbered 1...n
3928
self._leg_dict = {}
3929
for i in range(len(sig_list)):
3930
for j in range(sig_list[i].n):
3931
self._leg_dict[i+j+1] = (i,j)
3932
else:
3933
self._leg_dict = leg_dict
3934
# build inverse dictonary
3935
self._inv_leg_dict = dict([(v,k) for k,v in self._leg_dict.items()])
3936
3937
def __repr__(self):
3938
return "LevelStratum(sig_list=%r,res_cond=%r,leg_dict=%r)" % (self._sig_list,self._res_cond,self.leg_dict)
3939
def __str__(self):
3940
rep = ''
3941
if self._h0 > 1:
3942
rep += 'Product of Strata:\n'
3943
else:
3944
rep += 'Stratum: '
3945
for sig in self._sig_list:
3946
rep += repr(sig) + '\n'
3947
rep += 'with residue conditions: '
3948
for res in self._res_cond:
3949
rep += repr(res) + ' '
3950
rep += '\n'
3951
rep += 'dimension: ' + repr(self.dim()) + '\n'
3952
rep += 'leg dictionary: ' + repr(self._leg_dict) + '\n'
3953
try:
3954
rep += 'leg orbits: ' + repr(self.leg_orbits) + '\n'
3955
except AttributeError:
3956
pass
3957
return rep
3958
3959
@cached_method
3960
def dict_key(self):
3961
"""
3962
The hash-key for the cache of top-xi-powers.
3963
3964
More precisely, we sort each signature, sort this list and renumber
3965
the residue conditions accordingly. Finally, everything is made into a tuple.
3966
3967
Returns:
3968
tuple: nested tuple.
3969
"""
3970
rc_dict = {}
3971
sig = []
3972
for new_i, new_sign in enumerate(sorted(enumerate(self._sig_list), key=lambda k: k[1].sig)):
3973
i, sign = new_sign
3974
curr_sig = []
3975
for new_j, s in enumerate(sorted(enumerate(sign.sig), key=lambda k: k[1])):
3976
j, a = s
3977
curr_sig.append(a)
3978
rc_dict[(i,j)] = (new_i, new_j)
3979
sig.append(tuple(curr_sig))
3980
sig = tuple(sig)
3981
rc = sorted([sorted([rc_dict[cond] for cond in conds]) for conds in self._res_cond])
3982
rc = tuple(tuple(c) for c in rc)
3983
return (sig, rc)
3984
3985
@property
3986
def leg_dict(self):
3987
return self._leg_dict
3988
3989
@property
3990
def inv_leg_dict(self):
3991
return self._inv_leg_dict
3992
3993
## Psi classes are numbered according to the points of the stratum, but we want
3994
## to use them for the points of the graph. The leg_dicts translate between these,
3995
## we make this a little more user friendly.
3996
def stratum_number(self,n):
3997
"""
3998
Returns a tuple (i,j) for the point j on the component i that corresponds
3999
to the leg n of the graph.
4000
"""
4001
return self._leg_dict[n]
4002
4003
def leg_number(self,n):
4004
"""
4005
Returns the leg number (of the graph G) that corresponds to the psi class
4006
number n.
4007
"""
4008
return self._inv_leg_dict[n]
4009
4010
#################################################################
4011
#################################################################
4012
#################################################################
4013
#################################################################
4014
4015
class EmbeddedLevelGraph(object):
4016
"""
4017
LevelGraph inside a generalised stratum.
4018
4019
Note that the points of the enveloping GeneralisedStratum are of the form
4020
(i,j) where i is the component and j the index of sig of that component,
4021
while the points of the level graph are numbers 1,...,n.
4022
4023
Thus, dmp is a dictionary mapping integers to tuples of integers.
4024
4025
Attributes:
4026
LG (LevelGraph): underlying LevelGraph
4027
X (GeneralisedStratum): enveloping stratum
4028
dmp (dict): (bijective!) dictionary marked points of LG -> points of stratum
4029
dmp_inv (dict): inverse of dmp
4030
dlevels (dict): (bijective!) dictionary levels of LG -> new level numbering
4031
dlevels_inv (dict): inverse of dlevels
4032
top (GeneralisedStratum): (if self is a BIC) top component
4033
bot (GeneralisedStratum): (if self is a BIC) bottom component
4034
clutch_dict (dict): (if self is a BIC) dictionary mapping points of top
4035
stratum to points of bottom stratum where there is an edge in self.
4036
emb_top (dict): (if self is a BIC) dictionary mapping points of stratum top
4037
to the corresponding points of the enveloping stratum.
4038
emb_bot (dict): (if self is a BIC) dictionary mapping points of stratum bot
4039
to the corresponding points of the enveloping stratum.
4040
automorphisms (list of list of dicts): automorphisms
4041
codim (int): codimension of LevelGraph in stratum
4042
number_of_levels (int): Number of levels of self.
4043
4044
Note that attempting to access any of the attributes top, bot, clutch_dict,
4045
emb_top or emb_bot will raise a ValueError if self is not a BIC.
4046
"""
4047
def __init__(self,X,LG,dmp,dlevels):
4048
"""
4049
Initialises EmbeddedLevelGraph.
4050
4051
Args:
4052
LG (LevelGraph): underlying LevelGraph
4053
X (GeneralisedStratum): enveloping stratum
4054
dmp (dictionary): (bijective!) dictionary marked points of LG -> points of stratum
4055
dlevels (dictionary): (bijective!) dictionary levels of LG -> new level numbering
4056
"""
4057
self.LG = LG
4058
self.X = X
4059
self.dmp = dmp
4060
self.dmp_inv = {value: key for key, value in dmp.items()}
4061
self.add_vertices_at_infinity()
4062
self.dlevels = dlevels
4063
self.dlevels_inv = {value: key for key, value in dlevels.items()}
4064
self._top = None
4065
self._bot = None
4066
self._clutch_dict = None
4067
self._emb_top = None
4068
self._emb_bot = None
4069
self._automorphisms = None
4070
self._level = {}
4071
self._ell = None
4072
self.codim = self.LG.codim()
4073
self.number_of_levels = len(set(self.dlevels.keys()))
4074
4075
def __repr__(self):
4076
return "EmbeddedLevelGraph(LG=%r,dmp=%r,dlevels=%r)" % (self.LG, self.dmp, self.dlevels)
4077
def __str__(self):
4078
return ("Embedded Level Graph consisting of %s with point dictionary %s and level dictionary %s"
4079
% (self.LG, self.dmp, self.dlevels))
4080
4081
def explain(self):
4082
"""
4083
A more user-friendly display of __str__ :-)
4084
4085
EXAMPLES ::
4086
4087
"""
4088
def _list_print(L):
4089
if len(L) > 1:
4090
s = ['s ']
4091
for x in L[:-2]:
4092
s.append('%r, ' % x)
4093
s.append('%r ' % L[-2])
4094
s.append('and %r.' % L[-1])
4095
return ''.join(s)
4096
else:
4097
return ' %r.' % L[0]
4098
def _num(i):
4099
if i == 1:
4100
return 'one edge'
4101
else:
4102
return '%r edges' % i
4103
print("LevelGraph embedded into stratum %s with:" % self.X)
4104
LG = self.LG
4105
for l in range(LG.numberoflevels()):
4106
internal_l = LG.internal_level_number(l)
4107
print("On level %r:" % l)
4108
for v in LG.verticesonlevel(internal_l):
4109
print("* A vertex (number %r) of genus %r" % (v, LG.genus(v)))
4110
levels_of_mps = list(set([LG.level_number(LG.levelofleg(leg)) for leg in self.dmp]))
4111
print("The marked points are on level%s" % _list_print(sorted(levels_of_mps)))
4112
print("More precisely, we have:")
4113
for leg in self.dmp:
4114
print("* Marked point %r of order %r on vertex %r on level %r" % \
4115
(self.dmp[leg], LG.orderatleg(leg), LG.vertex(leg), LG.level_number(LG.levelofleg(leg))))
4116
print("Finally, we have %s. More precisely:" % _num(len(LG.edges)))
4117
edge_dict = {e : (LG.vertex(e[0]), LG.vertex(e[1])) for e in LG.edges}
4118
edge_dict_inv = {}
4119
for k, v in edge_dict.items():
4120
if v in edge_dict_inv:
4121
edge_dict_inv[v].append(k)
4122
else:
4123
edge_dict_inv[v] = [k]
4124
for e in edge_dict_inv:
4125
print("* %s between vertex %r (on level %r) and vertex %r (on level %r) with prong%s" %
4126
(_num(len(edge_dict_inv[e])),
4127
e[0], LG.level_number(LG.levelofvertex(e[0])),
4128
e[1], LG.level_number(LG.levelofvertex(e[1])),
4129
# _write_prongs()
4130
_list_print([LG.prong(ee) for ee in edge_dict_inv[e]])))
4131
4132
def __eq__(self,other):
4133
if not isinstance(other, EmbeddedLevelGraph):
4134
return False
4135
return self.LG == other.LG and self.dmp == other.dmp and self.dlevels == other.dlevels
4136
4137
@cached_method
4138
def is_bic(self):
4139
return self.LG.is_bic()
4140
4141
@property
4142
def ell(self):
4143
"""
4144
If self is a BIC: the lcm of the prongs.
4145
4146
Raises:
4147
RuntimeError: raised if self is not a BIC.
4148
4149
Returns:
4150
int: lcm of the prongs.
4151
"""
4152
if self._ell is None:
4153
if not self.is_bic():
4154
raise RuntimeError("ell only defined for BICs!")
4155
self._ell = lcm(self.LG.prongs.values())
4156
return self._ell
4157
4158
@property
4159
def top(self):
4160
if self._top is None:
4161
self.split()
4162
return self._top
4163
4164
@property
4165
def bot(self):
4166
if self._bot is None:
4167
self.split()
4168
return self._bot
4169
4170
@property
4171
def clutch_dict(self):
4172
if self._clutch_dict is None:
4173
self.split()
4174
return self._clutch_dict
4175
4176
@property
4177
def emb_bot(self):
4178
if self._emb_bot is None:
4179
self.split()
4180
return self._emb_bot
4181
4182
@property
4183
def emb_top(self):
4184
if self._emb_top is None:
4185
self.split()
4186
return self._emb_top
4187
4188
def add_vertices_at_infinity(self):
4189
"""
4190
We add the vertices at infinity to the underlying_graph of self.LG.
4191
4192
These are given by the residue conditions.
4193
4194
More precisely: Recall that the underlying_graph of self.LG has vertices
4195
and edges of self.LG stored in the form UG_vertex = (vertex number, genus, 'LG')
4196
and edges of the underlying graph are of the form:
4197
(UG_vertex, UG_vertex, edge name)
4198
We now add vertices 'at level infinity' by adding, for each res_cond of self.X
4199
* a UG_vertex calles (i, 0, 'res') (where i is the index of the condition in res_cond
4200
we are currently considering)
4201
and edges so that each residue condition corresponds to an edge from the corresponding
4202
pole to some residue at 'level infinity'. We store these in the form:
4203
* (res_vertex, UG_vertex, resiedgej)
4204
Here UG_vertex is the vertex of self.LG, in the form (vertex number, genus, 'LG'),
4205
that res_vertex is attached to and j is the leg of that vertex (as a leg of self.LG!)
4206
corresponding to the pole that resi should be attached to.
4207
"""
4208
# remove any that might already be there:
4209
existing_residues = [v for v in self.LG.underlying_graph.vertices()
4210
if v[2] == 'res']
4211
for v in existing_residues:
4212
self.LG.underlying_graph.delete_vertex(v)
4213
# add a vertex for every residue condition:
4214
# TODO: remove duplicates?
4215
edges = []
4216
for i, rc in enumerate(self.X.res_cond):
4217
v_name = (i, 0, 'res')
4218
for p in rc:
4219
e_name = 'res%redge%r' % (i,self.dmp_inv[p])
4220
v_on_graph = self.LG.vertex(self.dmp_inv[p])
4221
edges.append((self.LG.UG_vertex(v_on_graph),v_name,e_name))
4222
self.LG.underlying_graph.add_edges(edges)
4223
4224
@property
4225
@cached_method
4226
def residue_matrix_from_RT(self):
4227
"""
4228
The matrix associated to the residue conditions imposed by the residue theorem
4229
on each vertex of self.
4230
4231
Returns:
4232
SAGE Matrix: matrix of residue conditions given by RT
4233
"""
4234
poles_by_vertex = {}
4235
for p in self.X._polelist:
4236
vertex = self.LG.vertex(self.dmp_inv[p])
4237
try:
4238
poles_by_vertex[vertex].append(p)
4239
except KeyError:
4240
poles_by_vertex[vertex] = [p]
4241
rows = []
4242
for v in poles_by_vertex:
4243
rows.append([int(p in poles_by_vertex[v]) for p in self.X._polelist])
4244
return matrix(QQ,rows)
4245
4246
@property
4247
@cached_method
4248
def full_residue_matrix(self):
4249
"""
4250
Residue matrix with GRC conditions and RT conditions (for each vertex).
4251
4252
Returns:
4253
matrix: Matrix with # of poles columns and a row for each condition.
4254
"""
4255
M = self.X.residue_matrix()
4256
if M:
4257
M = M.stack(self.residue_matrix_from_RT)
4258
else:
4259
M = self.residue_matrix_from_RT
4260
return M
4261
4262
def residue_zero(self,pole):
4263
"""
4264
Check if the residue at pole is forced zero by residue conditions.
4265
4266
NOTE: We DO include the RT on the vertices in this check!
4267
4268
Args:
4269
pole (tuple): pole (as a point (i,j) of self.X)
4270
4271
Returns:
4272
bool: True if forced zero, False otherwise.
4273
"""
4274
# add the equation corresponding to the residue at pole to the residue matrix
4275
# and see if the rank changes:
4276
i = self.X._polelist.index(pole)
4277
res_vec = [[int(i==j) for j in range(len(self.X._polelist))]]
4278
RM = self.full_residue_matrix
4279
# RM = self.X.residue_matrix()
4280
if RM:
4281
stacked = RM.stack(matrix(res_vec))
4282
return stacked.rank() == self.full_residue_matrix.rank()
4283
# return stacked.rank() == self.X.residue_matrix().rank()
4284
else:
4285
return False
4286
4287
def level(self,l):
4288
"""
4289
The generalised stratum on level l.
4290
4291
Note that this is cached, i.e. on first call, it is stored in the dictionary
4292
_level.
4293
4294
Args:
4295
l (int): relative level number (0,...,codim)
4296
4297
Returns:
4298
LevelStratum: the LevelStratum, i.e.
4299
* a list of Signatures (one for each vertex on the level)
4300
* a list of residue conditions, i.e. a list [res_1,...,res_r]
4301
where each res_k is a list of tuples [(i_1,j_1),...,(i_n,j_n)]
4302
where each tuple (i,j) refers to the point j (i.e. index) on the
4303
component i and such that the residues at these points add up
4304
to 0.
4305
* a dictionary of legs, i.e. n -> (i,j) where n is the original
4306
number of the point (on the LevelGraph self) and i is the
4307
number of the component, j the index of the point in the signature tuple.
4308
Note that LevelStratum is a GeneralisedStratum together with
4309
a leg dictionary.
4310
Here, we provide an additional attribute:
4311
* leg_orbits, a nested list giving the orbits of the points on the level
4312
under the automorphism group of self.
4313
4314
EXAMPLES ::
4315
4316
4317
For the banana graph, the automorphisms fix the marked points but permute
4318
the edges. (ALL CONCRETE EXAMPLES REMOVED DUE TO INCONSISTENT BIC NUMBERING BETWEEN SAGE VERSIONS!!!)
4319
4320
4321
For the V-graph, the automorphisms permute edges on different components.
4322
4323
4324
In the stratum (4), there are more complicated examples.
4325
4326
"""
4327
try:
4328
LS = self._level[l]
4329
except KeyError:
4330
# for the residue conditions: We add the residue conditions from
4331
# the enveloping stratum:
4332
# We do this by passing those poles with residue forced zero
4333
# as those to be ignored in the residue calcuations performed by the
4334
# LevelGraph:
4335
# We have to translate them to points on self:
4336
# Note that self.LG knows the "level at infinity"
4337
excluded_poles = tuple(self.dmp_inv[p] for p in flatten(self.X.res_cond,max_level=1))
4338
LS = self.LG.stratum_from_level(l,excluded_poles=excluded_poles)
4339
# add automorphism info
4340
LS.leg_orbits = []
4341
seen = set()
4342
for leg in LS._leg_dict:
4343
if leg in seen:
4344
continue
4345
curr_orbit = [LS._leg_dict[leg]]
4346
for _v_map, l_map in self.automorphisms:
4347
curr_orbit.append(LS._leg_dict[l_map[leg]])
4348
seen.update([l_map[leg]])
4349
LS.leg_orbits.append(list(set(curr_orbit))) # remove duplicates
4350
self._level[l] = LS
4351
return LS
4352
4353
def legs_are_isomorphic(self,leg,other_leg):
4354
"""
4355
Check if leg and other_leg are in the same Aut-orbit of self.
4356
4357
Args:
4358
leg (int): leg on self.LG
4359
other_leg (int): leg on self.LG
4360
4361
Raises:
4362
RuntimeError: If leg is not in any aut-orbit of the level it should be on.
4363
4364
Returns:
4365
bool: True if they are in the same orbit of self.level(levelofleg),
4366
False, otherwise.
4367
4368
EXAMPLES ::
4369
4370
4371
Note the assymetric banana graph.
4372
4373
4374
The symmetric one has isomorphisms.
4375
4376
4377
Legs are isomorphic to themselves.
4378
4379
It's symmetric.
4380
4381
"""
4382
level = self.LG.level_number(self.LG.levelofleg(leg))
4383
other_level = self.LG.level_number(self.LG.levelofleg(other_leg))
4384
if level != other_level:
4385
return False
4386
assert(level == other_level)
4387
emb_leg = self.level(level)._leg_dict[leg]
4388
emb_other_leg = self.level(level)._leg_dict[other_leg]
4389
for orbit in self.level(level).leg_orbits:
4390
if emb_leg in orbit:
4391
if emb_other_leg in orbit:
4392
return True
4393
else:
4394
return False
4395
else:
4396
raise RuntimeError ("leg %r not in any orbit %r of %r" %
4397
(leg,self.level(level).leg_orbits,self.level(level)))
4398
4399
@cached_method
4400
def edge_orbit(self,edge):
4401
"""
4402
The edge orbit of edge in self.
4403
4404
Args:
4405
edge (tuple): edge of self.LG, i.e. tuple (start leg, end leg), where
4406
start leg should not be on a lower level than end leg.
4407
4408
Raises:
4409
ValueError: if edge is not an edge of self.LG
4410
4411
Returns:
4412
set: set of edges in aut-orbit of edge.
4413
4414
EXAMPLES ::
4415
4416
"""
4417
if not edge in self.LG.edges:
4418
raise ValueError("%r is not an edge of %r!" % (edge, self))
4419
s = set([edge])
4420
for v_map, l_map in self.automorphisms:
4421
new_edge = (l_map[edge[0]], l_map[edge[1]])
4422
s.add(new_edge)
4423
return s
4424
4425
def len_edge_orbit(self,edge):
4426
"""
4427
Length of the edge orbit of edge in self.
4428
4429
Args:
4430
edge (tuple): edge of self.LG, i.e. tuple (start leg, end leg), where
4431
start leg should not be on a lower level than end leg.
4432
4433
Raises:
4434
ValueError: if edge is not an edge of self.LG
4435
4436
Returns:
4437
int: length of the aut-orbit of edge.
4438
4439
EXAMPLES ::
4440
4441
4442
Prongs influence the orbit length.
4443
4444
"""
4445
return len(self.edge_orbit(edge))
4446
4447
def automorphisms_stabilising_legs(self,leg_tuple):
4448
stabs = []
4449
for v_map, l_map in self.automorphisms:
4450
for l in leg_tuple:
4451
if l_map[l] != l:
4452
break
4453
else: # no break
4454
stabs.append(l_map)
4455
return stabs
4456
4457
def delta(self,i):
4458
"""
4459
Squish all levels except for i.
4460
4461
Note that delta(1) contracts everything except top-level and that the
4462
argument is interpreted via internal_level_number (i.e. a relative level number).
4463
4464
Moreover, dlevels is set to map to 0 and -1(!).
4465
4466
Args:
4467
i (int): Level not to be squished.
4468
4469
Returns:
4470
EmbeddedLevelGraph: Embedded BIC (result of applying delta to the
4471
underlying LevelGraph)
4472
"""
4473
newLG = self.LG.delta(i,quiet=True)
4474
newdmp = self.dmp.copy()
4475
# level_number is (positive!) relative level number.
4476
newdlevels = {l:-newLG.level_number(l) for l in newLG.levels}
4477
return EmbeddedLevelGraph(self.X,newLG,newdmp,newdlevels)
4478
4479
def squish_vertical(self,level):
4480
"""
4481
Squish level crossing below level 'level'.
4482
4483
Note that in contrast to the levelgraph method, we work with relative
4484
level numbers here!
4485
4486
Args:
4487
level (int): relative (!) level number.
4488
4489
Returns:
4490
EmbeddedLevelGraph: Result of squishing.
4491
4492
EXAMPLES ::
4493
4494
sage: from admcycles.diffstrata import *
4495
sage: X=GeneralisedStratum([Signature((4,))])
4496
sage: p = X.enhanced_profiles_of_length(4)[0][0]
4497
sage: g = X.lookup_graph(p)
4498
4499
lookup_graph uses the sorted profile (note that these do not have to be reduced!):
4500
4501
sage: assert any(g.squish_vertical(0).is_isomorphic(G) for G in X.lookup(p[1:]))
4502
sage: assert any(g.squish_vertical(1).is_isomorphic(G) for G in X.lookup(p[:1]+p[2:]))
4503
sage: assert any(g.squish_vertical(2).is_isomorphic(G) for G in X.lookup(p[:2]+p[3:]))
4504
sage: assert any(g.squish_vertical(3).is_isomorphic(G) for G in X.lookup(p[:3]))
4505
4506
Squishing outside the range of levels does nothing:
4507
4508
sage: assert g.squish_vertical(4) == g
4509
4510
Recursive squishing removes larger parts of the profile:
4511
4512
sage: assert any(g.squish_vertical(3).squish_vertical(2).is_isomorphic(G) for G in X.lookup(p[:2]))
4513
"""
4514
newLG = self.LG.squish_vertical(self.LG.internal_level_number(level),quiet=True)
4515
newdmp = self.dmp.copy()
4516
# level_number is (positive!) relative level number.
4517
newdlevels = {l:-newLG.level_number(l) for l in newLG.levels}
4518
return EmbeddedLevelGraph(self.X,newLG,newdmp,newdlevels)
4519
4520
def split(self):
4521
"""
4522
Splits embedded BIC self into top and bottom component.
4523
4524
Raises:
4525
ValueError: Raised if self is not a BIC.
4526
4527
Returns:
4528
dict: dictionary consising of
4529
* X: GeneralisedStratum self.X
4530
* top: LevelStratum: top component
4531
* bottom: LevelStratum: bottom component
4532
* clutch_dict: clutching dictionary mapping ex-half-edges on
4533
top to their partners on bottom (both as points in the
4534
respective strata!)
4535
* emb_dict_top: a dictionary embedding top into the stratum of self
4536
* emb_dict_bot: a dictionary embedding bot into the stratum of self
4537
4538
Note that clutch_dict, emb_top and emb_bot are dictionaries between
4539
points of strata, i.e. after applying dmp to the points!
4540
4541
EXAMPLES ::
4542
4543
"""
4544
if not self.is_bic():
4545
raise ValueError(
4546
"Error: %s is not a BIC! Cannot be split into Top and Bottom component!"
4547
% self)
4548
self._top = self.level(0)
4549
self._bot = self.level(1)
4550
# To construct emb_top and emb_bot, we have to combine self.dmp with the
4551
# the leg_dicts of top and bot.
4552
# More precisely: emb_top is the composition of the inverse of the leg_dict
4553
# of top, i.e. top.stratum_number, and self.dmp
4554
# (giving a map from the points of top to the points of the enveloping
4555
# stratum of self) and the same for bot.
4556
# We implement this by iterating over the marked points of self on top level,
4557
# which are exactly the keys of self.dmp that are on top level.
4558
# Note that we make extra sure that we didn't mess up the level numbering by
4559
# using the relative level numbering (where the top level is guaranteed to be 0
4560
# and the bottom level is 1 (positive!)).
4561
self._emb_top = {self._top.stratum_number(l) : self.dmp[l]
4562
for l in iter(self.dmp)
4563
if self.LG.level_number(self.LG.levelofleg(l)) == 0}
4564
self._emb_bot = {self._bot.stratum_number(l) : self.dmp[l]
4565
for l in iter(self.dmp)
4566
if self.LG.level_number(self.LG.levelofleg(l)) == 1}
4567
# Because this is a BIC, all edges of self are cut in this process
4568
# and this is exactly the dictionary we must remember
4569
# WARNING: Here we assume that e[0] is on top level and e[1] is on bottom
4570
# This is assured by tidy_up, e.g. after initialisation!
4571
# Note that all these dictionaries map points of GeneralisedStrata to each
4572
# other so we must take the corresponding stratum_number!
4573
self._clutch_dict = {self._top.stratum_number(e[0]) : self._bot.stratum_number(e[1])
4574
for e in self.LG.edges}
4575
return {'X': self.X, 'top': self._top, 'bottom': self._bot,
4576
'clutch_dict': self._clutch_dict,
4577
'emb_dict_top': self._emb_top, 'emb_dict_bot': self._emb_bot,}
4578
4579
def is_legal(self):
4580
"""
4581
Check the R-GRC for self.
4582
4583
Returns:
4584
bool: result of R-GRC.
4585
"""
4586
# Check if any levels are empty:
4587
# Note that this can only happen if self.X has simple poles (as we never
4588
# have horizontal edges)
4589
if self.X.simple_poles():
4590
if any(self.level(l).is_empty() for l in range(self.number_of_levels)):
4591
return False
4592
# poles are excluded if they are contained in _any_ residue condition of the stratum.
4593
# In particular, they are _not_ excluded if they are only restrained by the RT on some component!
4594
poles_in_rc_stratum = flatten(self.X.res_cond, max_level=1)
4595
poles_in_rc_graph = tuple(self.dmp_inv[p] for p in poles_in_rc_stratum)
4596
return self.LG.is_legal(excluded_poles=poles_in_rc_graph, quiet=True)
4597
4598
def is_isomorphic(self,other):
4599
"""
4600
Check if self and other are isomorphic (as EmbeddedLevelGraphs).
4601
4602
Args:
4603
other (EmbeddedLevelGraph): Graph to check isomorphism.
4604
4605
Returns:
4606
bool: True if there exists at least one isomorphism.
4607
"""
4608
# TODO: Maybe include a way to check against unembedded LGs
4609
# TODO: Check embedding!
4610
if not isinstance(other, EmbeddedLevelGraph):
4611
return False
4612
try:
4613
next(self.isomorphisms(other))
4614
return True
4615
except StopIteration:
4616
return False
4617
4618
@property
4619
def automorphisms(self):
4620
"""
4621
The automorphisms of self (as automorphisms of the underlying LevelGraph,
4622
respecting the embedding, see doc of isomorphisms).
4623
4624
Returns:
4625
list: list of tuples:
4626
dict: map of vertices
4627
dict: map of legs
4628
4629
EXAMPLES ::
4630
4631
"""
4632
if not self._automorphisms:
4633
self._automorphisms = list(self.isomorphisms(self))
4634
return self._automorphisms
4635
4636
def isomorphisms(self,other):
4637
"""
4638
Generator yielding the "next" isomorphism of self and other.
4639
4640
Note that while this gives an "isomorphism" from self.LG to other.LG, this
4641
is not necessarily an isomorphism of the LevelGraphs (the numbered points may
4642
be permuted if this is "fixed" by the embedding).
4643
4644
Args:
4645
other (EmbeddedLevelGraph): The (potentially) isomorphic EmbeddedLevelGraph.
4646
4647
Yields:
4648
tuple: The next compatible isomorphism:
4649
dict: vertices of self.LG -> vertices of other.LG
4650
dict: legs of self.LG -> legs of other.LG
4651
"""
4652
#### Isomorphisms of EmbeddedLevelGraphs:
4653
## An isomorphism of EmbeddedLevelGraph is a set of compatible level isomorphisms.
4654
## We iterate through the isomorphisms on each level and yield whenever we find
4655
## compatible levelisomorphisms for all levels.
4656
## Note that we use dlevels for this, as these should be compatible.
4657
# There are (at least) two ways in which this can be optimised:
4658
# * don't go through the entire product before checking edge compatibility!
4659
# * choose a smart ordering of levels (e.g. number of vertices)
4660
isom_vertices = {}
4661
isom_legs = {}
4662
for level_isos in itertools.product(*[self._level_iso(other,l) for l in self.dlevels.values()]):
4663
for level_iso_v, level_iso_l in level_isos:
4664
isom_vertices.update(level_iso_v)
4665
isom_legs.update(level_iso_l)
4666
# check edge compatibility
4667
for e in self.LG.edges:
4668
if (isom_legs[e[0]],isom_legs[e[1]]) not in other.LG.edges:
4669
break
4670
else: # no break
4671
yield isom_vertices.copy(), isom_legs.copy()
4672
4673
def _level_iso(self,other,l):
4674
"""
4675
Generator yielding the "next" isomorphism of level l of self and other.
4676
4677
Here, l is a value of dlevels (this should be compatible).
4678
4679
Note that we require the graph to have no horizontal edges, i.e. the level
4680
contains no edges!
4681
4682
TODO: * Maybe add future horizontal support?
4683
* Maybe use relative level number instead? (this seems to give weird behaviour
4684
right now...)
4685
4686
Args:
4687
other (EmbeddedLevelGraph): The embedded graph we are checking for isomorphism.
4688
l (int): Level number (embedded into the stratum, i.e. value of dlevels).
4689
4690
Yields:
4691
tuple: the next isomorphism of levels:
4692
dict: vertices of self.LG -> vertices of other.LG
4693
dict: legs of self.LG -> legs of other.LG
4694
"""
4695
#### Isomorphisms of levels:
4696
## An isomorphism of levels consist of
4697
## * a map: vertices to vertices
4698
## * a map: legs to legs
4699
## respecting:
4700
## * the genus,
4701
## * the number of legs on every vertex,
4702
## * the order at every leg,
4703
## * the marked points of the stratum (via dmp).
4704
####
4705
# First, we extract the data for level l from self and other.
4706
# Note that we do not use stratum_from_level to avoid all the overhead.
4707
# TODO: All this should be cached!!
4708
l_self = self.LG.internal_level_number(l)
4709
l_other = other.LG.internal_level_number(l)
4710
# we need to be careful to distinguish the indices in the list of genera
4711
# of the LevelGraph from the actual genera.
4712
vv_self_idx = self.LG.verticesonlevel(l_self) # list of indices
4713
vv_other_idx = other.LG.verticesonlevel(l_other) # list of indices
4714
if len(vv_self_idx) != len(vv_other_idx):
4715
return
4716
vv_self = [self.LG.genus(i) for i in vv_self_idx] # list of genera
4717
vv_other = [other.LG.genus(i) for i in vv_other_idx] # list of genera
4718
# extract the legs: (nested lists)
4719
legs_self = [self.LG.legsatvertex(v) for v in vv_self_idx]
4720
legs_other = [other.LG.legsatvertex(v) for v in vv_other_idx]
4721
# build dictionary: leg -> index in vv
4722
leg_dict_self = {l:i for i,legs in enumerate(legs_self) for l in legs}
4723
leg_dict_other = {l:i for i, legs in enumerate(legs_other) for l in legs}
4724
if len(leg_dict_self) != len(leg_dict_other):
4725
return
4726
# for quick checks, we create sorted lists of the orders at each vertex
4727
order_sorted_self = [sorted([self.LG.orderatleg(l) for l in legs])
4728
for legs in legs_self]
4729
order_sorted_other = [sorted([other.LG.orderatleg(l) for l in legs])
4730
for legs in legs_other]
4731
# We create the two maps as dictionaries:
4732
# index of vv_self -> index of vv_other
4733
isom_vert = {}
4734
# leg number (on self.LG) -> leg number (on other.LG)
4735
isom_legs = {}
4736
# We also want to keep track of whom we've already mapped:
4737
# source is a dictionary: genus -> list of indices of vv_self
4738
source = {}
4739
for i, g in enumerate(vv_self):
4740
try:
4741
source[g].append(i)
4742
except KeyError:
4743
source[g] = [i]
4744
# target is a dictionary: genus -> list of indices of vv_other
4745
target = {}
4746
for i, g in enumerate(vv_other):
4747
try:
4748
target[g].append(i)
4749
except KeyError:
4750
target[g] = [i]
4751
# for the legs we build copies of the nested lists to manipulate
4752
legs_source = [legs[:] for legs in legs_self]
4753
legs_target = [legs[:] for legs in legs_other]
4754
# Next, we exclude some deal-breakers:
4755
# * The same genera must appear.
4756
if sorted(vv_self) != sorted(vv_other):
4757
return
4758
# * The same embedded points have to be on this level (they have to be
4759
# mapped to each other!)
4760
# In particular, this gives a part of the leg map (and thus also of the
4761
# vertex map).
4762
for p_self, p in self.dmp.items(): # p is the "shared" point of the stratum
4763
p_other = other.dmp_inv[p]
4764
# If neither point is on this level, we continue:
4765
if not (p_self in leg_dict_self or p_other in leg_dict_other):
4766
continue
4767
# The vertex of p_self must map to that of p_other.
4768
# Three things can fail here:
4769
# * only one of the two points is on this level.
4770
if ((p_self in leg_dict_self and p_other not in leg_dict_other) or
4771
(p_self not in leg_dict_self and p_other in leg_dict_other)):
4772
return
4773
v_self = leg_dict_self[p_self]
4774
v_other = leg_dict_other[p_other]
4775
# * the points are on incompatible vertices (genus or numbers/orders of legs!)
4776
if (vv_self[v_self] != vv_other[v_other] or
4777
len(legs_self[v_self]) != len(legs_other[v_other]) or
4778
order_sorted_self[v_self] != order_sorted_other[v_other]):
4779
return
4780
# * two points are on the same vertex in one case, but on different vertices
4781
# in the other. I.e. v_self is already being mapped somewhere other than v_other
4782
# or v_other is already being mapped to (by someone else)
4783
try:
4784
if isom_vert[v_self] != v_other:
4785
return
4786
except KeyError: # v_self not being mapped yet, i.e. still in source
4787
g = vv_other[v_other]
4788
if v_other in target[g]: # make sure v_other is still a possible target
4789
isom_vert[v_self] = v_other
4790
source[g].remove(v_self)
4791
target[g].remove(v_other)
4792
else:
4793
return
4794
# now we can safely map the legs:
4795
isom_legs[p_self] = p_other
4796
# and remove them from source and target (so they won't be reassigned later)
4797
legs_source[v_self].remove(p_self)
4798
legs_target[v_other].remove(p_other)
4799
# Next, we construct maps of the remaining vertices.
4800
# For this, we use a small recursive function:
4801
curr_v_map = {}
4802
legal_v_maps = []
4803
def vertex_maps(sl,tl):
4804
if not sl:
4805
# all entries of tl should be None at this point:
4806
assert(all(tv is None for tv in tl))
4807
legal_v_maps.append(curr_v_map.copy())
4808
return
4809
curr_v = sl.pop()
4810
curr_legs = len(legs_self[curr_v])
4811
# try to map curr_v to tl:
4812
for i,tv in enumerate(tl):
4813
# we temporarily set "hit" targets to None so we don't have to worry
4814
# about indexing...
4815
if tv is None:
4816
continue
4817
# check if legs _can_ be compatible:
4818
if (curr_legs != len(legs_other[tv]) or
4819
order_sorted_self[curr_v] != order_sorted_other[tv]):
4820
continue
4821
tl[i] = None
4822
curr_v_map[curr_v] = tv
4823
vertex_maps(sl,tl)
4824
# undo
4825
del curr_v_map[curr_v]
4826
tl[i] = tv
4827
# undo
4828
sl.append(curr_v)
4829
# the function for the legs is almost the same, just the condition is different:
4830
curr_l_map = {}
4831
legal_l_maps = []
4832
def leg_maps(sl,tl):
4833
if not sl:
4834
# all entries of tl should be None at this point:
4835
assert(all(tleg is None for tleg in tl))
4836
legal_l_maps.append(curr_l_map.copy())
4837
return
4838
curr_l = sl.pop()
4839
# try to map curr_l to tl:
4840
for i, tleg in enumerate(tl):
4841
# we temporarily set "hit" targets to None so we don't have to worry
4842
# about indexing...
4843
if tleg is None:
4844
continue
4845
# check if orders are compatible:
4846
if self.LG.orderatleg(curr_l) == other.LG.orderatleg(tleg):
4847
tl[i] = None
4848
curr_l_map[curr_l] = tleg
4849
leg_maps(sl,tl)
4850
# undo
4851
del curr_l_map[curr_l]
4852
tl[i] = tleg
4853
# undo
4854
sl.append(curr_l)
4855
# Now we build the list of all vertex isomorphisms going through the vertices by genus
4856
v_isom_list = []
4857
for g in source:
4858
legal_v_maps = [] # will get filled by vertex_maps
4859
vertex_maps(source[g],target[g])
4860
v_isom_list.append(legal_v_maps[:]) # copy!
4861
# v_isom_list is now a nested list of maps for each genus.
4862
# the product consists of tuples, one map for every genus.
4863
for v_maps in itertools.product(*v_isom_list):
4864
for v_map in v_maps:
4865
# this overwrites exactly the vertices in source.
4866
isom_vert.update(v_map)
4867
# Finally, the returned vertex map should use the indexing of the
4868
# LevelGraph, not of the level:
4869
return_isom_vert = {vv_self_idx[k] : vv_other_idx[v]
4870
for k,v in isom_vert.items()}
4871
# Now we build all leg maps, again as a product of all maps at vertices.
4872
# Note: This also included the previously assigned vertices (with marked points...)
4873
l_isom_list = []
4874
for v in isom_vert:
4875
# Construct leg maps:
4876
# We work with legs_source and legs_target, i.e. the list
4877
# of legs with the marked points removed.
4878
legal_l_maps = []
4879
leg_maps(legs_source[v],legs_target[isom_vert[v]])
4880
l_isom_list.append(legal_l_maps[:]) # copy!
4881
for l_maps in itertools.product(*l_isom_list):
4882
for l_map in l_maps:
4883
isom_legs.update(l_map)
4884
yield return_isom_vert.copy(), isom_legs.copy()
4885
4886
#################################################################
4887
#################################################################
4888
#################################################################
4889
#################################################################
4890
4891
class AdditiveGenerator (SageObject):
4892
"""
4893
Product of Psi classes on an EmbeddedLevelGraph (of a stratum X).
4894
4895
The information of a product of psi-class on an EmbeddedLevelGraph, i.e. a
4896
leg_dict and an enhanced_profile, where leg_dict is a dictionary on the legs
4897
leg -> exponent of the LevelGraph associated to the enhanced profile, i.e.
4898
(profile,index) or None if we refer to the class of the graph.
4899
4900
We (implicitly) work inside some stratum X, where the enhanced profile
4901
makes sense.
4902
4903
This class should be considered constant (hashable)!
4904
"""
4905
def __init__ (self,X,enh_profile,leg_dict=None):
4906
"""
4907
AdditiveGenerator for psi polynomial given by leg_dict on graph
4908
corresponding to enh_profile in X.
4909
4910
Args:
4911
X (GeneralisedStrataum): enveloping stratum
4912
enh_profile (tuple): enhanced profile (in X)
4913
leg_dict (dict, optional): dictionary leg of enh_profile -> exponent
4914
encoding a psi monomial. Defaults to None.
4915
"""
4916
self._X = X
4917
self._hash = hash_AG(leg_dict, enh_profile)
4918
self._enh_profile = (tuple(enh_profile[0]),enh_profile[1])
4919
self._leg_dict = leg_dict
4920
self._G = self._X.lookup_graph(*enh_profile)
4921
# dictionary leg -> level
4922
# Careful! These are leg numbers on the whole graph, not on
4923
# the graphs inside the LevelStrata!!
4924
self._level_dict = {}
4925
if not leg_dict is None:
4926
for l in leg_dict:
4927
self._level_dict[l] = self._G.LG.level_number(self._G.LG.levelofleg(l))
4928
self._inv_level_dict = {}
4929
for leg in self._level_dict:
4930
try:
4931
self._inv_level_dict[self._level_dict[leg]].append(leg)
4932
except KeyError:
4933
self._inv_level_dict[self._level_dict[leg]] = [leg]
4934
4935
@classmethod
4936
def from_hash(cls,X,hash):
4937
"""
4938
AdditiveGenerator from a hash generated with hash_AG.
4939
4940
Args:
4941
X (GeneralisedStratum): Enveloping stratum.
4942
hash (tuple): hash from hash_AG
4943
4944
Returns:
4945
AdditiveGenerator: AG from hash.
4946
"""
4947
if hash[0] is None:
4948
leg_dict = None
4949
else:
4950
leg_dict = dict(hash[0])
4951
return cls(X,(hash[1],hash[2]),leg_dict)
4952
4953
def __hash__(self):
4954
return hash(self._hash)
4955
def __eq__(self,other):
4956
try:
4957
return self._hash == other._hash
4958
except AttributeError:
4959
return NotImplemented
4960
4961
def __repr__(self):
4962
return "AdditiveGenerator(X=%r,enh_profile=%r,leg_dict=%r)"\
4963
% (self._X, self._enh_profile, self._leg_dict)
4964
# Better, but destroys tests:
4965
# return "AdditiveGenerator(enh_profile=%r,leg_dict=%r)"\
4966
# % (self._enh_profile, self._leg_dict)
4967
def __str__(self):
4968
str = ""
4969
if not self._leg_dict is None:
4970
for l in self._leg_dict:
4971
str += "Psi class %r with exponent %r on level %r * "\
4972
% (l, self._leg_dict[l], self._level_dict[l])
4973
str += "Graph %r" % (self._enh_profile,)
4974
return str
4975
4976
def __mul__(self,other):
4977
"""
4978
Multiply to psi products on the same graph (add dictioaries).
4979
4980
Args:
4981
other (AdditiveGenerator): Product of psi classes on same graph.
4982
4983
Returns:
4984
AdditiveGenerator: Product of psi classes on same graph.
4985
4986
EXAMPLES ::
4987
4988
4989
Also works without legs.
4990
4991
"""
4992
# Check that other is an AdditiveGenerator for the same graph:
4993
try:
4994
if self._X != other._X or self._enh_profile != other._enh_profile:
4995
return NotImplemented
4996
other_leg_dict = other._leg_dict
4997
except AttributeError:
4998
return NotImplemented
4999
# "unite" the leg_dicts:
5000
if self._leg_dict is None:
5001
self_leg_dict = {}
5002
else:
5003
self_leg_dict = self._leg_dict
5004
if other_leg_dict is None:
5005
other_leg_dict = {}
5006
new_leg_dict = {l:self_leg_dict.get(l,0) + other_leg_dict.get(l,0)
5007
for l in set(self_leg_dict) | set(other_leg_dict)}
5008
return self._X.additive_generator(self._enh_profile,new_leg_dict)
5009
def __rmul__(self,other):
5010
self.__mul__(other)
5011
def __pow__(self, n):
5012
return self.pow(n)
5013
5014
@property
5015
def enh_profile(self):
5016
return self._enh_profile
5017
5018
@property
5019
def psi_degree(self):
5020
"""
5021
Sum of powers of psi classes of self.
5022
"""
5023
if self._leg_dict is None:
5024
return 0
5025
else:
5026
return sum(self._leg_dict.values())
5027
5028
@cached_method
5029
def dim_check(self):
5030
"""
5031
Check if, on any level, the psi degree is higher than the dimension.
5032
5033
Returns:
5034
bool: False if the class is 0 for dim reasons, True otherwise.
5035
"""
5036
# remove if degree > dim(X)
5037
if self.degree > self._X.dim():
5038
return False
5039
if self.codim == 0:
5040
# Avoid crazy infinite recursion for smooth graph :-)
5041
return True
5042
# for each level, check if psi product on level exceeds level dimension
5043
for level_number in range(self.codim + 1):
5044
assert self.level_dim(level_number) >= 0
5045
if self.degree_on_level(level_number) > self.level_dim(level_number):
5046
return False
5047
return True
5048
5049
@property
5050
def codim(self):
5051
"""
5052
The codimension of the graph (number of levels)
5053
5054
Returns:
5055
int: length of the profile
5056
"""
5057
return len(self._enh_profile[0])
5058
5059
@property
5060
def degree(self):
5061
"""
5062
Degree of class, i.e. codimension of graph + psi-degree
5063
5064
Returns:
5065
int: codim + psi_degree
5066
"""
5067
# degree = codim of graph + powers of psi classes
5068
return self.codim + self.psi_degree
5069
5070
@property
5071
def leg_dict(self):
5072
return self._leg_dict
5073
5074
@property
5075
def level_dict(self):
5076
"""
5077
The dictionary mapping leg -> level
5078
"""
5079
return self._level_dict
5080
5081
@property
5082
def inv_level_dict(self):
5083
"""
5084
The dictionary mapping level -> list of legs on level.
5085
5086
Returns:
5087
dict: level -> list of legs.
5088
"""
5089
return self._inv_level_dict
5090
5091
@cached_method
5092
def degree_on_level(self,level):
5093
"""
5094
Total degree of psi classes on level.
5095
5096
Args:
5097
level (int): (relative) level number (i.e. 0...codim)
5098
5099
Raises:
5100
RuntimeError: Raised for level number out of range.
5101
5102
Returns:
5103
int: sum of exponents of psis appearing on this level.
5104
"""
5105
if level not in range(self.codim + 1):
5106
raise RuntimeError("Illegal level number: %r on %r" % (level, self))
5107
try:
5108
return sum(self._leg_dict[leg] for leg in self._inv_level_dict[level])
5109
except KeyError:
5110
# no psis on this level
5111
return 0
5112
5113
def level(self,level_number):
5114
"""
5115
Level of underlying graph.
5116
5117
Args:
5118
level_number (int): (relative) level number (0...codim)
5119
5120
Returns:
5121
LevelStratum: Stratum at level level_number of self._G.
5122
"""
5123
return self._G.level(level_number)
5124
5125
@cached_method
5126
def level_dim(self,level_number):
5127
"""
5128
Dimension of level level_number.
5129
5130
Args:
5131
level_number (int): (relative) level number (i.e. 0...codim)
5132
5133
Returns:
5134
int: dimension of GeneralisedLevelStratum
5135
"""
5136
level = self._G.level(level_number)
5137
return level.dim()
5138
5139
@property
5140
def stack_factor(self):
5141
"""
5142
The stack factor, that is the product of the prongs of the underlying graph
5143
divided by the product of the ells of the BICs and the automorphisms.
5144
5145
Returns:
5146
QQ: stack factor
5147
"""
5148
try:
5149
return self._stack_factor
5150
except AttributeError:
5151
# to get g_Gamma, we have to take the product of prongs/lcm for each bic:
5152
prod = 1
5153
for k in self._G.LG.prongs.values():
5154
prod *= k
5155
5156
p, _ = self.enh_profile
5157
5158
bic_contr = 1
5159
for i in p:
5160
bic_contr *= self._X.bics[i].ell
5161
5162
stack_factor = QQ(prod) / QQ(bic_contr * len(self._G.automorphisms))
5163
5164
self._stack_factor = stack_factor
5165
return self._stack_factor
5166
5167
@cached_method
5168
def as_taut(self):
5169
"""
5170
Helper method, returns [(1,self)] as default input to ELGTautClass.
5171
"""
5172
return ELGTautClass(self._X,[(1,self)])
5173
5174
@cached_method
5175
def is_in_ambient(self,ambient_enh_profile):
5176
"""
5177
Check if ambient_enh_profile is an ambient graph, i.e. self is a degeneration
5178
of ambient_enh_profile.
5179
5180
Args:
5181
ambient_enh_profile (tuple): enhanced profile.
5182
5183
Returns:
5184
bool: True if there exists a leg map, False otherwise.
5185
5186
EXAMPLES ::
5187
5188
"""
5189
return self._X.is_degeneration(self._enh_profile,ambient_enh_profile)
5190
5191
@cached_method
5192
def pow(self, n, amb=None):
5193
"""
5194
Recursively calculate the n-th power of self (in amb), caching all results.
5195
5196
Args:
5197
n (int): exponent
5198
amb (tuple, optional): enhanced profile. Defaults to None.
5199
5200
Returns:
5201
ELGTautClass: self^n in CH(amb)
5202
"""
5203
if amb is None:
5204
ONE = self._X.ONE
5205
amb = ((),0)
5206
else:
5207
ONE = self._X.taut_from_graph(*amb)
5208
if n == 0:
5209
return ONE
5210
return self._X.intersection(self.as_taut(), self.pow(n-1, amb), amb)
5211
5212
@cached_method
5213
def exp(self, c, amb=None, stop=None):
5214
"""
5215
exp(c * self) in CH(amb), calculated via exp_list.
5216
5217
Args:
5218
c (QQ): coefficient
5219
amb (tuple, optional): enhanced profile. Defaults to None.
5220
stop (int, optional): cut-off. Defaults to None.
5221
5222
Returns:
5223
ELGTautClass: the tautological class associated to the
5224
graded list exp_list.
5225
"""
5226
# graded pieces are already reduced:
5227
new_taut_list = []
5228
for T in self.exp_list(c, amb, stop):
5229
new_taut_list.extend(T.psi_list)
5230
return ELGTautClass(self._X, new_taut_list, reduce=False)
5231
5232
@cached_method
5233
def exp_list(self, c, amb=None, stop=None):
5234
"""
5235
Calculate exp(c * self) in CH(amb).
5236
5237
We calculate exp as a sum of powers (using self.pow, i.e. cached)
5238
and check at each step if the power vanishes (if yes, we obviously stop).
5239
5240
The result is returned as a list consisting of the graded pieces.
5241
5242
Optionally, one may specify the cut-off degree using stop (by
5243
default this is dim + 1).
5244
5245
Args:
5246
c (QQ): coefficient
5247
amb (tuple, optional): enhanced profile. Defaults to None.
5248
stop (int, optional): cut-off. Defaults to None.
5249
5250
Returns:
5251
list: list of ELGTautClasses
5252
"""
5253
c = QQ(c)
5254
if amb is None:
5255
ONE = self._X.ONE
5256
amb = ((),0)
5257
else:
5258
ONE = self._X.taut_from_graph(*amb)
5259
e = [ONE]
5260
f = ONE
5261
coeff = QQ(1)
5262
k = QQ(0)
5263
if stop is None:
5264
stop = self._X.dim() + 1
5265
while k < stop and f != self._X.ZERO:
5266
k += 1
5267
coeff *= c/k
5268
f = self.pow(k, amb)
5269
e.append(coeff * f)
5270
return e
5271
5272
def pull_back(self,deg_enh_profile):
5273
"""
5274
Pull back self to the graph associated to deg_enh_profile.
5275
5276
Note that this returns an ELGTautClass as there could be several maps.
5277
5278
More precisely, we return the sum over the pulled back classes divided
5279
by the number of undegeneration maps.
5280
5281
Args:
5282
deg_enh_profile (tuple): enhanced profile of graph to pull back to.
5283
5284
Raises:
5285
RuntimeError: raised if deg_enh_profile is not a degeneration of the
5286
underlying graph of self.
5287
5288
Returns:
5289
ELGTautClass: sum of pullbacks of self to deg_enh_profile for each
5290
undegeneration map divided by the number of such maps.
5291
5292
"""
5293
if self._leg_dict is None:
5294
# trivial pullback
5295
return ELGTautClass(self._X,[(1,self._X.additive_generator(deg_enh_profile))])
5296
else:
5297
leg_maps = self._X.explicit_leg_maps(self._enh_profile,deg_enh_profile)
5298
if leg_maps is None:
5299
raise RuntimeError ("Pullback failed: %r is not a degeneration of %r")\
5300
% (deg_enh_profile,self._enh_profile)
5301
psi_list = []
5302
aut_factor = QQ(1) / QQ(len(leg_maps))
5303
for leg_map in leg_maps:
5304
new_leg_dict = {leg_map[l]:e for l, e in self._leg_dict.items()}
5305
psi_list.append((aut_factor,self._X.additive_generator(deg_enh_profile,new_leg_dict)))
5306
return ELGTautClass(self._X,psi_list)
5307
5308
def psis_on_level(self, l):
5309
"""
5310
The psi classes on level l of self.
5311
5312
Args:
5313
l (int): level, i.e. 0,...,codim
5314
5315
Returns:
5316
dict: psi dictionary on self.level(l).smooth_LG
5317
"""
5318
L = self.level(l)
5319
# The psi classes on this level should be expressed in terms of the legs
5320
# of the smooth_LG of L:
5321
EG = L.smooth_LG
5322
try:
5323
# Careful: the legs of the smooth_LG are numbered 1,...,n
5324
# The psi classes are still numbered inside the whole graph
5325
# The conversion runs through the embedding of the LevelStratum
5326
# and back through the embedding of smooth_LG (dmp_inv)
5327
psis = {EG.dmp_inv[L.leg_dict[leg]] : self.leg_dict[leg]
5328
for leg in self.inv_level_dict[l]}
5329
except KeyError:
5330
# no psis on this level
5331
psis = {}
5332
return psis
5333
5334
def evaluate(self,quiet=False,warnings_only=False,admcycles_output=False):
5335
"""
5336
Evaluate self (cap with the fundamental class of self._X).
5337
5338
Note that this gives 0 if self is not a top-degree class.
5339
5340
Evaluation works by taking the product of the evaluation of each level
5341
(i.e. evaluating, for each level, the psi monomial on this level) and
5342
multiplying this with the stack factor.
5343
5344
The psi monomials on the levels are evaluated using admcycles (after
5345
removing residue conditions).
5346
5347
Args:
5348
quiet (bool, optional): No output. Defaults to False.
5349
warnings_only (bool, optional): Output warnings. Defaults to False.
5350
admcycles_output (bool, optional): Output admcycles debugging info
5351
(used when evaluating levels). Defaults to False.
5352
5353
Raises:
5354
RuntimeError: raised if there are inconsistencies with the psi
5355
degrees on the levels.
5356
5357
Returns:
5358
QQ: integral of self on X.
5359
"""
5360
if self.degree < self._X.dim():
5361
if not quiet or warnings_only:
5362
print("Warning: %r is not of top degree: %r (instead of %r)" % (self,self.degree,self._X.dim()))
5363
return 0
5364
level_list = []
5365
for l in range(self.codim + 1):
5366
if self.degree_on_level(l) < self.level_dim(l):
5367
raise RuntimeError("%r is of top degree, but not on level %r" % (self,l))
5368
L = self.level(l)
5369
value = L.evaluate(psis=self.psis_on_level(l),quiet=quiet,warnings_only=warnings_only,admcycles_output=admcycles_output)
5370
if value == 0:
5371
return 0
5372
level_list.append(value)
5373
# product over levels:
5374
prod = 1
5375
for p in level_list:
5376
prod *= p
5377
if not quiet:
5378
print("----------------------------------------------------")
5379
print("Contribution of Additive generator:")
5380
print(self)
5381
print("Product of level-wise integrals: %r" % prod)
5382
print("Stack factor: %r" % self.stack_factor)
5383
print("Total: %r" % (prod*self.stack_factor))
5384
print("----------------------------------------------------")
5385
return self.stack_factor * prod
5386
5387
def to_prodtautclass(self):
5388
"""
5389
Transform self into an admcycles prodtautclass on the underlying stgraph of self.
5390
5391
Note that this gives the pushforward to M_g,n in the sense that we multiply with
5392
Strataclass and remove all residue conditions.
5393
5394
Returns:
5395
prodtautclass: the prodtautclass of self, multiplied with the Strataclasses of
5396
the levels and all residue conditions removed.
5397
5398
EXAMPLES ::
5399
5400
sage: from admcycles.diffstrata import *
5401
sage: X=Stratum((2,))
5402
sage: X.additive_generator(((),0)).to_prodtautclass()
5403
Outer graph : [2] [[1]] []
5404
Vertex 0 :
5405
Graph : [2] [[1]] []
5406
Polynomial : (-7/24)*(kappa_1^1 )_0
5407
<BLANKLINE>
5408
<BLANKLINE>
5409
Vertex 0 :
5410
Graph : [2] [[1]] []
5411
Polynomial : 79/24*psi_1^1
5412
<BLANKLINE>
5413
<BLANKLINE>
5414
Vertex 0 :
5415
Graph : [1, 1] [[8], [1, 9]] [(8, 9)]
5416
Polynomial : (-19/24)*
5417
<BLANKLINE>
5418
<BLANKLINE>
5419
Vertex 0 :
5420
Graph : [1] [[1, 8, 9]] [(8, 9)]
5421
Polynomial : (-1/48)*
5422
sage: from admcycles.stratarecursion import Strataclass
5423
sage: X=GeneralisedStratum([Signature((4,-2,-2))], res_cond=[[(0,1)], [(0,2)]])
5424
sage: (X.additive_generator(((),0)).to_prodtautclass().pushforward() - Strataclass(1, 1, [4,-2,-2], res_cond=[2])).is_zero()
5425
True
5426
"""
5427
LG = self._G.LG
5428
stgraph = LG.stgraph
5429
if any(self.level(l).zeroStratumClass() for l in range(self.codim + 1)):
5430
return admcycles.admcycles.prodtautclass(stgraph, terms=[]) # ZERO
5431
alpha = [] # prodtautclasses on levels
5432
vertices = [] # embedding of level into stgraph
5433
for l in range(self.codim + 1):
5434
# we pass the psis on this level and hope that in-between terms vanish
5435
# for dimension reasons:
5436
psis = self.psis_on_level(l)
5437
T = self.level(l).remove_res_cond(psis) # ELGTautClass on self.level(l) with possibly less RCs
5438
if T._X.res_cond or len(T.psi_list) > 1:
5439
alpha.append(T.to_prodtautclass())
5440
else:
5441
# Now T is at most a psi product on T._X:
5442
psis = T.psi_list[0][1].leg_dict
5443
# self.level(l) has no residue conditions: take prodtautclass of Strataclass
5444
tautlist = [admcycles.stratarecursion.Strataclass(sig.g, 1, sig.sig) for sig in self.level(l)._sig_list]
5445
# as it is not ZERO it must be connected!
5446
assert len(tautlist) == 1
5447
# we have to include the psi contribution:
5448
stgraph_level = T._X.smooth_LG.LG.stgraph
5449
adm_psis = admcycles.admcycles.decstratum(stgraph_level, psi=psis)
5450
adm_psis_taut = admcycles.admcycles.tautclass([adm_psis])
5451
tautlist = [tautlist[0] * adm_psis_taut]
5452
ptc = admcycles.admcycles.prodtautclass(stgraph_level, protaut=tautlist)
5453
alpha.append(ptc)
5454
# Finally, we save the vertices of this level (as vertices of stgraph)
5455
vertices.append(LG.verticesonlevel(LG.internal_level_number(l)))
5456
# now we pull back all the alphas to a prodtautclass on stgraph:
5457
prod = self.stack_factor * admcycles.admcycles.prodtautclass(stgraph)
5458
for l, ptc in enumerate(alpha):
5459
prod = prod.factor_pullback(vertices[l], ptc) # returns product (!)
5460
return prod
5461
5462
#################################################################
5463
#################################################################
5464
#################################################################
5465
#################################################################
5466
5467
class ELGTautClass (SageObject):
5468
"""
5469
A Tautological class of a stratum X, i.e. a formal sum of of psi classes on
5470
EmbeddedLevelGraphs.
5471
5472
This is encoded by a list of summands.
5473
5474
Each summand corresponds to an AdditiveGenerator with coefficient.
5475
5476
Thus an ELGTautClass is a list with entries tuples (coefficient, AdditiveGenerator).
5477
5478
These can be added, multiplied and reduced (simplified).
5479
5480
INPUT :
5481
5482
* X : GeneralisedStratum that we are on
5483
* psi_list : list of tuples (coefficient, AdditiveGenerator) as
5484
described above.
5485
* reduce=True : call self.reduce() on initialisation
5486
"""
5487
def __init__(self,X,psi_list,reduce=True):
5488
self._psi_list = psi_list
5489
self._X = X
5490
if reduce:
5491
self.reduce()
5492
5493
@classmethod
5494
def from_hash_list(cls,X,hash_list):
5495
# This does not reduce!
5496
return cls(X,[(c, X.additive_generator_from_hash(h)) for c,h in hash_list], reduce=False)
5497
5498
@property
5499
def psi_list(self):
5500
return self._psi_list
5501
5502
def __repr__(self):
5503
return "ELGTautClass(X=%r,psi_list=%r)"\
5504
% (self._X,self._psi_list)
5505
def __str__(self):
5506
str = "Tautological class on %s\n" % self._X
5507
for coeff, psi in self._psi_list:
5508
str += "%s * %s + \n" % (coeff, psi)
5509
return str
5510
5511
def __eq__(self,other):
5512
if isinstance(other, AdditiveGenerator):
5513
return self == other.as_taut()
5514
try:
5515
return self._psi_list == other._psi_list
5516
except AttributeError:
5517
return False
5518
5519
def __add__(self,other):
5520
# for sum, we need to know how to add '0':
5521
if other == 0:
5522
return self
5523
try:
5524
if not self._X == other._X:
5525
return NotImplemented
5526
new_psis = self._psi_list + other._psi_list
5527
return ELGTautClass(self._X,new_psis)
5528
except AttributeError:
5529
return NotImplemented
5530
def __iadd__(self,other):
5531
return self.__add__(other)
5532
def __radd__(self,other):
5533
return self.__add__(other)
5534
5535
def __neg__(self):
5536
return (-1)*self
5537
5538
def __sub__(self,other):
5539
return self + (-1)*other
5540
5541
def __mul__ (self, other):
5542
if 0 == other:
5543
return 0
5544
elif self._X.ONE == other:
5545
return self
5546
# convert AdditiveGenerators to Tautclasses:
5547
if isinstance(other, AdditiveGenerator):
5548
return self * other.as_taut()
5549
try:
5550
# check if other is a tautological class
5551
_other_psi_list = other._psi_list
5552
except AttributeError:
5553
# attempt scalar multiplication:
5554
new_psis = [(coeff * other, psi) for coeff, psi in self._psi_list]
5555
return ELGTautClass(self._X,new_psis,reduce=False)
5556
if not self._X == other._X:
5557
return NotImplemented
5558
else:
5559
return self._X.intersection(self,other)
5560
def __rmul__(self,other):
5561
return self.__mul__(other)
5562
def __pow__(self,exponent):
5563
if exponent == 0:
5564
return self._X.ONE
5565
# TODO: quick check for going over top degree?
5566
prod = self
5567
for _ in range(1,exponent):
5568
prod = self * prod
5569
return prod
5570
5571
@cached_method
5572
def is_equidimensional(self):
5573
"""
5574
Determine if all summands of self have the same degree.
5575
5576
Note that the empty empty tautological class (ZERO) gives True.
5577
5578
Returns:
5579
bool: True if all AdditiveGenerators in self.psi_list are of same degree,
5580
False otherwise.
5581
"""
5582
if self.psi_list:
5583
first_deg = self.psi_list[0][1].degree
5584
return all(AG.degree == first_deg for _c, AG in self.psi_list)
5585
return True
5586
5587
def reduce(self):
5588
"""
5589
Reduce self.psi_list by combining summands with the same AdditiveGenerator
5590
and removing those with coefficient 0 or that die for dimension reasons.
5591
"""
5592
# we use the hash of the AdditiveGenerators to group:
5593
hash_dict = Counter()
5594
for c, AG in self._psi_list:
5595
hash_dict[AG] += c
5596
self._psi_list = [(c, AG) for AG, c in hash_dict.items() if c != 0 and AG.dim_check()]
5597
5598
# To evaluate, we go through the AdditiveGenerators and
5599
# take the (weighted) sum of the AdditiveGenerators.
5600
def evaluate(self,quiet=True,warnings_only=False,admcycles_output=False):
5601
"""
5602
Evaluation of self, i.e. cap with fundamental class of X.
5603
5604
This is the sum of the evaluation of the AdditiveGenerators in psi_list
5605
(weighted with their coefficients).
5606
5607
Each AdditiveGenerator is (essentially) the product of its levels,
5608
each level is (essentially) evaluated by admcycles.
5609
5610
Args:
5611
quiet (bool, optional): No output. Defaults to True.
5612
warnings_only (bool, optional): Only warnings output. Defaults to False.
5613
admcycles_output (bool, optional): admcycles debugging output. Defaults to False.
5614
5615
Returns:
5616
QQ: integral of self on X
5617
5618
EXAMPLES ::
5619
5620
sage: from admcycles.diffstrata import *
5621
sage: X=GeneralisedStratum([Signature((0,0))])
5622
sage: assert (X.xi^2).evaluate() == 0
5623
5624
sage: X=GeneralisedStratum([Signature((1,1,1,1,-6))])
5625
sage: assert set([(X.cnb(((i,),0),((i,),0))).evaluate() for i in range(len(X.bics))]) == {-2, -1}
5626
"""
5627
if warnings_only:
5628
quiet = True
5629
DS_list = []
5630
for c, AG in self.psi_list:
5631
value = AG.evaluate(quiet=quiet,warnings_only=warnings_only,admcycles_output=admcycles_output)
5632
DS_list.append(c * value)
5633
if not quiet:
5634
print("----------------------------------------------------")
5635
print("In summary: We sum")
5636
for i, summand in enumerate(DS_list):
5637
print("Contribution %r from AdditiveGenerator" % summand)
5638
print(self.psi_list[i][1])
5639
print("(With coefficient %r)" % self.psi_list[i][0])
5640
print("To obtain a total of %r" % sum(DS_list))
5641
print("----------------------------------------------------")
5642
return sum(DS_list)
5643
5644
def extract(self,i):
5645
"""
5646
Return the i-th component of self.
5647
5648
Args:
5649
i (int): index of self._psi_list
5650
5651
Returns:
5652
ELGTautClass: coefficient * AdditiveGenerator at position i of self.
5653
"""
5654
return ELGTautClass(self._X,[self._psi_list[i]],reduce=False)
5655
5656
@cached_method
5657
def degree(self, d):
5658
"""
5659
The degree d part of self.
5660
5661
Args:
5662
d (int): degree
5663
5664
Returns:
5665
ELGTautClass: degree d part of self
5666
"""
5667
new_psis = []
5668
for c, AG in self.psi_list:
5669
if AG.degree == d:
5670
new_psis.append((c, AG))
5671
return ELGTautClass(self._X, new_psis, reduce=False)
5672
5673
@cached_method
5674
def list_by_degree(self):
5675
"""
5676
A list of length X.dim with the degree d part as item d
5677
5678
Returns:
5679
list: list of ELGTautClasses with entry i of degree i.
5680
"""
5681
deg_psi_list = [[] for _ in range(self._X.dim() + 1)]
5682
for c, AG in self.psi_list:
5683
deg_psi_list[AG.degree].append((c, AG))
5684
return [ELGTautClass(self._X, piece, reduce=False) for piece in deg_psi_list]
5685
5686
def is_pure_psi(self):
5687
"""
5688
Check if self is ZERO or a psi-product on the stratum.
5689
5690
Returns:
5691
boolean: True if self has at most one summand and that is of the form
5692
AdditiveGenerator(((), 0), psis).
5693
5694
EXAMPLES ::
5695
5696
sage: from admcycles.diffstrata import *
5697
sage: X=Stratum((2,))
5698
sage: X.ZERO.is_pure_psi()
5699
True
5700
sage: X.ONE.is_pure_psi()
5701
True
5702
sage: X.psi(1).is_pure_psi()
5703
True
5704
sage: X.xi.is_pure_psi()
5705
False
5706
"""
5707
if not self.psi_list:
5708
return True
5709
return len(self.psi_list) == 1 and self.psi_list[0][1].enh_profile == ((), 0)
5710
5711
def to_prodtautclass(self):
5712
"""
5713
Transforms self into an admcycles prodtautclass on the stable graph of the smooth
5714
graph of self._X.
5715
5716
Note that this is essentially the pushforward to M_g,n, i.e. we resolve residues
5717
and multiply with the correct Strataclasses along the way.
5718
5719
Returns:
5720
prodtautclass: admcycles prodtautclass corresponding to self pushed forward
5721
to the stable graph with one vertex.
5722
5723
EXAMPLES ::
5724
5725
sage: from admcycles.diffstrata import *
5726
sage: X=Stratum((2,))
5727
sage: X.ONE.to_prodtautclass()
5728
Outer graph : [2] [[1]] []
5729
Vertex 0 :
5730
Graph : [2] [[1]] []
5731
Polynomial : (-7/24)*(kappa_1^1 )_0
5732
<BLANKLINE>
5733
<BLANKLINE>
5734
Vertex 0 :
5735
Graph : [2] [[1]] []
5736
Polynomial : 79/24*psi_1^1
5737
<BLANKLINE>
5738
<BLANKLINE>
5739
Vertex 0 :
5740
Graph : [1, 1] [[6], [1, 7]] [(6, 7)]
5741
Polynomial : (-19/24)*
5742
<BLANKLINE>
5743
<BLANKLINE>
5744
Vertex 0 :
5745
Graph : [1] [[1, 6, 7]] [(6, 7)]
5746
Polynomial : (-1/48)*
5747
sage: (X.xi^X.dim()).evaluate() == (X.xi^X.dim()).to_prodtautclass().pushforward().evaluate()
5748
True
5749
"""
5750
G = self._X.smooth_LG
5751
stgraph = G.LG.stgraph
5752
total = 0
5753
for c, AG in self.psi_list:
5754
ptc = AG.to_prodtautclass()
5755
# sort vertices by connected component:
5756
vertex_map = {}
5757
# note that every vertex of G has at least one leg (that is a marked point of _X):
5758
for v, _ in enumerate(G.LG.genera):
5759
mp_on_stratum = G.dmp[G.LG.legs[v][0]]
5760
# find this marked point on AG:
5761
leg_on_ag = AG._G.dmp_inv[mp_on_stratum]
5762
LG = AG._G.LG
5763
# we use the underlying graph:
5764
UG_v = LG.UG_vertex(v)
5765
for w, g, kind in LG.underlying_graph.connected_component_containing_vertex(UG_v):
5766
if kind != 'LG':
5767
continue
5768
vertex_map[w] = v
5769
# map legs of AG._G to smooth_LG
5770
# CAREFUL: This goes in the OTHER direction!
5771
leg_map = {G.dmp_inv[mp] : ldeg for ldeg, mp in AG._G.dmp.items()}
5772
pf = ptc.partial_pushforward(stgraph, vertex_map, leg_map)
5773
total += c * pf
5774
return total
5775
5776
#################################################################
5777
#################################################################
5778
#################################################################
5779
#################################################################
5780
5781
#################################################################
5782
#################################################################
5783
### Auxillary functions:
5784
#################################################################
5785
#################################################################
5786
5787
def unite_embedded_graphs(gen_LGs):
5788
"""
5789
Create a (disconnected) EmbeddedLevelGraph from a tuple of tuples that generate EmbeddedLevelGraphs.
5790
5791
(The name is slightly misleading, but of course it does not make sense to actually unite two complete
5792
EmbeddedLevelGraphs, as the checks would (and do!) throw errors otherwise! Therefore, this essentially
5793
takes the data of a LevelGraph embedded into each connected componenent of a GeneralisedStratum and
5794
returns an EmbeddedLevelGraph on the product.)
5795
5796
This should be used on (products) of BICs in generalised strata.
5797
5798
Args:
5799
gen_LGs (tuple): tuple of tuples that generate EmbeddedLevelGraphs.
5800
More precisely, each tuple is of the form:
5801
* X (GeneralisedStratum): Enveloping stratum (should be the same for all tuples!)
5802
* LG (LevelGraph): Underlying LevelGraph
5803
* dmp (dict): (partial) dictionary of marked points
5804
* dlevels (dict): (partial) dictionary of levels
5805
5806
Returns:
5807
EmbeddedLevelGraph: The (disconnected) LevelGraph obtained from the input with
5808
the legs renumbered (continuously, starting with 1), and the levels numbered
5809
according to the embedding.
5810
"""
5811
newgenera = []
5812
newlevels = []
5813
newlegs = []
5814
newpoleorders = {}
5815
newedges = []
5816
newdmp = {}
5817
newdlevels = {}
5818
max_leg_number = 0
5819
oldX = gen_LGs[0][0] # for check that all belong to the same stratum:
5820
for emb_g in gen_LGs:
5821
# Unpack tuple:
5822
X, LG, dmp, dlevels = emb_g
5823
if X != oldX:
5824
raise RuntimeError("Can't unite graphs on different Strata! %r" % gen_LGs)
5825
# the genera are just appended
5826
newgenera += LG.genera
5827
# same for the levels, but while we're at it, we might just as well
5828
# replace them by their embedding (then newdlevels will be trivial)
5829
# and these should be consistens for all graphs in the tuple.
5830
# Thus, newdlevels will be the identity.
5831
newlevels += [dlevels[l] for l in LG.levels]
5832
# the legs will have to be renumbered
5833
leg_dict = {} # old number -> new number
5834
legs = 0
5835
for i, l in enumerate(flatten(LG.legs)):
5836
newlegnumber = max_leg_number + i + 1
5837
leg_dict[l] = newlegnumber
5838
# while we're at it, we add the pole orders:
5839
newpoleorders[newlegnumber] = LG.poleorders[l]
5840
# For the dictionary of marked points (for the embedding), we
5841
# must distinguish if this is a marked point or a half-edge.
5842
# Marked points are simply the ones for which we have a key
5843
# in dmp :-)
5844
try:
5845
newdmp[newlegnumber] = dmp[l]
5846
except KeyError:
5847
pass
5848
legs += 1
5849
max_leg_number += legs
5850
# append (nested) list of legs:
5851
newlegs += [[leg_dict[l] for l in comp] for comp in LG.legs]
5852
# finally, the edges are renumbered accordingly:
5853
newedges += [(leg_dict[e[0]], leg_dict[e[1]]) for e in LG.edges]
5854
# the levels are already numbered according to the embedding dict
5855
newdlevels = {l:l for l in newlevels}
5856
newLG = admcycles.diffstrata.levelgraph.LevelGraph(
5857
newgenera, newlegs, newedges, newpoleorders, newlevels
5858
)
5859
return EmbeddedLevelGraph(X, newLG, newdmp, newdlevels)
5860
5861
def sort_with_dict(l):
5862
"""
5863
Sort a list and provide a dictionary relating old and new indices.
5864
5865
If x had index i in l, then x has index sorted_dict[i] in the sorted l.
5866
5867
Args:
5868
l (list): List to be sorted.
5869
5870
Returns:
5871
tuple: A tuple consisting of:
5872
list: The sorted list l.
5873
dict: A dictionary old index -> new index.
5874
"""
5875
sorted_list = []
5876
sorted_dict = {}
5877
for i,(j,v) in enumerate(sorted(enumerate(l),key=lambda w:w[1])):
5878
sorted_list.append(v)
5879
sorted_dict[j] = i
5880
return sorted_list, sorted_dict
5881
5882
def get_squished_level(deg_ep,ep):
5883
"""
5884
Get the (relative) level number of the level squished in ep.
5885
5886
This is the index of the corresponding BIC in the profile.
5887
5888
Args:
5889
deg_ep (tuple): enhanced profile
5890
ep (tuple): enhanced profile
5891
5892
Raises:
5893
RuntimeError: raised if deg_ep is not a degeneration of ep
5894
5895
Returns:
5896
int: relative level number
5897
"""
5898
deg_p = deg_ep[0]
5899
p = set(ep[0])
5900
for i, b in enumerate(deg_p):
5901
if not b in p:
5902
break
5903
else:
5904
raise RuntimeError("%r is not a degeneration of %r!" % (deg_ep, p))
5905
return i
5906
5907
def _graph_word(n):
5908
if n == 1:
5909
return "graph"
5910
else:
5911
return "graphs"
5912
5913
#################################################################
5914
#################################################################
5915
### Auxillary functions for caching:
5916
#################################################################
5917
#################################################################
5918
5919
def hash_AG(leg_dict, enh_profile):
5920
"""
5921
The hash of an AdditiveGenerator, built from the psis and the enhanced profile.
5922
5923
The hash-tuple is (leg-tuple,profile,index), where profile is
5924
changed to a tuple and leg-tuple is a nested tuple consisting of
5925
tuples (leg,exponent) (or None).
5926
5927
Args:
5928
leg_dict (dict): dictioary for psi powers (leg -> exponent)
5929
enh_profile (tuple): enhanced profile
5930
5931
Returns:
5932
tuple: nested tuple
5933
"""
5934
if leg_dict is None:
5935
leg_hash = ()
5936
else:
5937
leg_hash = tuple(sorted(leg_dict.items()))
5938
return (leg_hash,tuple(enh_profile[0]),enh_profile[1])
5939
5940
def adm_key(sig, psis):
5941
"""
5942
The hash of a psi monomial on a connected stratum without residue conditions.
5943
5944
This is used for caching the values computed using admcycles (using
5945
GeneralisedStratum.adm_evaluate)
5946
5947
The signature is sorted, the psis are renumbered accordingly and also
5948
sorted (with the aim of computing as few duplicates as possible).
5949
5950
Args:
5951
sig (tuple): signature tuple
5952
psis (dict): psi dictionary
5953
5954
Returns:
5955
tuple: nested tuple
5956
"""
5957
sorted_psis = {}
5958
sorted_sig = []
5959
psi_by_order = defaultdict(list)
5960
# sort signature and relabel psis accordingly:
5961
# NOTE: Psis are labelled 'mathematically', i.e. 1,...,len(sig)
5962
for new_i, (old_i, order) in enumerate(sorted(enumerate(sig), key=lambda k: k[1])):
5963
psi_new_i = new_i + 1
5964
psi_old_i = old_i + 1
5965
sorted_sig.append(order)
5966
if psi_old_i in psis:
5967
assert not (psi_new_i in sorted_psis)
5968
psi_exp = psis[psi_old_i]
5969
sorted_psis[psi_new_i] = psi_exp
5970
psi_by_order[order].append(psi_exp)
5971
# sort psis for points of same order:
5972
ordered_sorted_psis = {}
5973
i = 0
5974
assert len(sig) == len(sorted_sig)
5975
while i < len(sig):
5976
order = sorted_sig[i]
5977
for j, psi_exp in enumerate(sorted(psi_by_order[order])):
5978
assert sorted_sig[i+j] == order
5979
ordered_sorted_psis[i+j+1] = psi_exp
5980
while i < len(sig) and sorted_sig[i] == order:
5981
i += 1
5982
return (tuple(sorted_sig), tuple(sorted(ordered_sorted_psis.items())))
5983
5984