Sage cell
Use this to produce your statistic values and test your code!
Rules
Apply the following rules while writing your submission.
- Emphasis
- ''italics''; '''bold'''; '''''bold italics'''''
- Bulleted lists
- * Item 1
Separate line
* Subitem 1
* Item 2 - Links
- [[target|linktext]] target must start with http://, https:// or with www.
- Links to statistics in the database
- [[St000001]]
- Links to arXiv papers
- new: [[arxiv:1401.3690]], old: [[arXiv:math/0011099]]
- Links to MathSciNet reviews
- [[mathscinet:3113227]] or [[MR3113227]]
- Links to zbMATH (formerly: Zentralblatt) reviews
- [[zbmath:0845.11001]]
- Links to OEIS sequences
- [[oeis:A000108]]
- Links to MathOverflow questions and answers
- [[mathoverflow:132338]] or [[MO132338]]
- Links to Wikipedia articles
- [[wikipedia:On-Line_Encyclopedia_of_Integer_Sequences]]
- Links to Mathworld articles
- [[mathworld:AdjacencyMatrix]]
- Digital Object Identifiers
- [[doi:10.1090/noti1029]]
- CiteSeerX Identifiers
- [[citeseer:10.1.1.150.1445]]
def statistic(x):
return len(x.crossings())
parent_initializer = PerfectMatchings
element_repr = lambda m: str(sorted(tuple(sorted(b)) for b in m))
levels = [2, 4, 6, 8]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
# edit this
# WARNING: takes a long time even for permutations of 5.
def defect_set(sigma, w):
"""
INPUT:
- w is a reduced word for a permutation
- sigma is the mask, given as a binary word of length l(w)
OUTPUT:
- the defect set, that is the set of positions j such that
l(w^{\sigma[j-1]} w_j) < l(w^{\sigma[j-1]})
"""
res = []
S = Permutations(max(w)+1)
w_S = [S.simple_reflection(e) for e in w]
w_up_to_j = S([])
for j in range(len(w)-1):
if sigma[j] == 1:
w_up_to_j = w_up_to_j.left_action_product(w_S[j])
if (w_up_to_j.left_action_product(w_S[j+1])).length() < w_up_to_j.length():
res += [j+2]
return res
def statistic(pi):
"""
sage: statistic(Permutation([4,3,5,2,1]))
3
sage: statistic(Permutation([5,4,2,1,3]))
3
"""
k = pi.length()
if not k:
return 0
masks = cartesian_product([[0,1]]*k)
return max([len(defect_set(m, w))
for m in masks
for w in pi.reduced_words()])
# DON'T edit this
parent_initializer = Permutations
element_repr = repr
levels = [2, 3, 4, 5]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[] => 0
[1] => 0
[1,2] => 0
[2,1] => 0
[1,2,3] => 0
# edit this
@cached_function
def P(k):
return posets.IntegerPartitionsDominanceOrder(k)
def statistic(pi):
Q = P(pi.size())
return len(Q.lower_covers(Q(pi)))
# DON'T edit this
parent_initializer = Partitions
element_repr = repr
levels = [2, 3, 4, 5, 6, 7, 8, 9, 10]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[] => 0
[1] => 0
[2] => 1
[1,1] => 0
[3] => 1
# edit this
# depends on https://trac.sagemath.org/ticket/16110
def statistic(D):
return ParallelogramPolyomino.from_dyck_word(D).area()
# DON'T edit this
parent_initializer = DyckWords
element_repr = repr
levels = [2, 3, 4, 5, 6]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[] => 0
[1,0] => 1
[1,0,1,0] => 2
[1,1,0,0] => 2
[1,0,1,0,1,0] => 3
# edit this
def avoids(w, pattern):
l = [len(p) for p in pattern]
l_p = len(pattern)
n = len(w)
A = sorted(list(set([a for p in pattern for a in p])))
pattern = [[A.index(a) for a in p] for p in pattern]
k = 1+max(max(p) for p in pattern)
def is_match(s):
assignment = [None]*k
for i in range(l_p):
for j in range(len(pattern[i])):
l = w[s[i]+j]
p = pattern[i][j]
m = assignment[p]
if m is None:
assignment[p] = l
elif m != l:
return False
return all(assignment[i] < assignment[i+1] for i in range(k-1))
for s in Subsets(list(range(len(w))), l_p)._fast_iterator():
if all(s[i]+l[i] <= s[i+1] for i in range(l_p-1)) and s[-1]+l[-1] <= n:
if is_match(s):
return False
return True
def statistic(la):
mset = [i for i, p in enumerate(la) for _ in range(p)]
return len([w for w in Permutations(mset)if avoids(w, [[1,3,2]])])
# DON'T edit this
parent_initializer = Compositions
element_repr = repr
levels = [2, 3, 4, 5, 6, 7]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[] => 1
[1] => 1
[1,1] => 2
[2] => 1
[1,1,1] => 5
# edit this
def diagonal_index_of_partition(la):
return sum((j - i) for (i, j) in la.cells())
def binomial_shifted_diagonal_index_of_partition(partition):
return binomial(partition.size() + 1, 2) + diagonal_index_of_partition(partition)
def is_desarrangement_tableau(t):
if t.size() == 0:
return True
descents = t.standard_descents()
ascents = [i for i in t.entries() if i not in descents]
return min(ascents) % 2 == 0
def r2r_statistic_on_standard_tableaux(t):
# remove 1 and rectify until we get a desarrangement tableau
s = copy(t)
while not is_desarrangement_tableau(s):
s = SkewTableau([[(i-1 if i > 1 else None) for i in row] for row in s])
s = StandardTableau(s.rectify())
b_t = binomial_shifted_diagonal_index_of_partition(t.shape())
b_s = binomial_shifted_diagonal_index_of_partition(s.shape())
return b_t - b_s
def statistic(t):
return r2r_statistic_on_standard_tableaux(t)
# DON'T edit this
parent_initializer = StandardTableaux
element_repr = repr
levels = [2, 3, 4, 5, 6]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[1,2,3,4,6,7,8],[5,9]] => 39
[[1,2,4,5,6,8,9],[3],[7]] => 15
[[1,2,5,6,8,9],[3,4,7]] => 10
[[1,3,4,6,8,9],[2,5,7]] => 0
[[1,2,3,6,8,9],[4,5,7]] => 23
# edit this
def Klazar_occurrences(pi, pattern):
"""
Return the number of occurrences of pattern in pi.
EXAMPLES:
From Mansour 2013, page 86, Example 3.24
The set partition π = 135/26/4 contains four occurrences of
the pattern 12/3, namely, 13/6, 13/4, 15/6, and 35/6. On the other hand, the
set partition π avoids the pattern 12/34.
sage: Klazar_occurrences([[1,3,5],[2,6],[4]], [[1,2],[3]])
4
sage: Klazar_occurrences([[1,3,5],[2,6],[4]], [[1,2],[3,4]])
0
"""
pi = SetPartition(pi).standardization()
pattern = SetPartition(pattern).standardization()
count = 0
for s in Subsets(range(1,1+pi.size()), pattern.size()):
if pi.restriction(s).standardization() == pattern:
count += 1
return count
def statistic(pi):
return Klazar_occurrences(pi, [[1,2,3]])
# DON'T edit this
parent_initializer = SetPartitions
element_repr = lambda X: str(sorted([sorted(s) for s in X])).replace("[","{").replace("]","}")
levels = [2, 3, 4, 5]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
{{1,2}} => 0
{{1},{2}} => 0
{{1,2,3}} => 1
{{1},{2,3}} => 0
{{1,3},{2}} => 0
# edit this
# BinaryTree -> Bool
def is_leaf(b):
return b == BinaryTree('.')
# BinaryTree -> Int
def get_external_node(b, flag):
count = 0;
if is_leaf(b):
return 0
else:
if (flag): # True counts the left side
if not is_leaf(b[0]):
return 1 + get_external_node(b[0], True)
else: # False counts the right side
if not is_leaf(b[1]):
return 1 + get_external_node(b[1], False)
return count
# BinaryTree -> Int
def external_node_count(b):
return 1 + get_external_node(b, True) + get_external_node(b, False)
def size(b):
if not b:
return 1
else:
return size(b[0])+size(b[1])
def statistic(b):
return size(b)-1-external_node_count(b)
# DON'T edit this
parent_initializer = BinaryTrees
element_repr = repr
levels = [2, 3, 4, 5, 6]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[.,.] => 0
[.,[.,.]] => 0
[[.,.],.] => 0
[.,[.,[.,.]]] => 0
[.,[[.,.],.]] => 1
# edit this
@cached_function
def statistic(w):
def children(m):
for (a,b),(c,d) in m.crossings():
m_new = list(m)
m_new.remove((a,b))
m_new.remove((c,d))
A, B = min(a,b), max(a,b)
C, D = min(c,d), max(c,d)
if C < A:
(A,B),(C,D) = (C,D),(A,B)
yield PerfectMatching(m_new + [(A,C), (B,D)])
yield PerfectMatching(m_new + [(A,D), (B,C)])
w = PerfectMatching(sorted([sorted(a) for a in w]))
l = [statistic(v) for v in children(w)]
if len(l) == 0:
return 0
else:
return 1+min(l)
# DON'T edit this
parent_initializer = PerfectMatchings
element_repr = lambda m: str(sorted(tuple(sorted(b)) for b in m))
levels = [2, 4, 6, 8]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[(1,2)] => 0
[(1,2),(3,4)] => 0
[(1,3),(2,4)] => 1
[(1,4),(2,3)] => 0
[(1,2),(3,4),(5,6)] => 0
# edit this
def statistic(C):
return C.length()
# DON'T edit this
parent_initializer = lambda x: Cores(x[1],x[0])
element_repr = lambda X: "( "+X._repr_()+", "+str(X.k())+" )"
levels = [(2, 3), (3, 3), (4, 3), (5, 3), (6, 3), (2, 4), (3, 4), (4, 4), (5, 4), (6, 4), (2, 5), (3, 5), (4, 5), (5, 5), (6, 5), (2, 6), (3, 6), (4, 6), (5, 6), (6, 6), (7, 6)]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
([2,1],2) => 2
([2],3) => 2
([1,1],3) => 2
([2],4) => 2
([1,1],4) => 2
# edit this
# code by Jang Soo Kim
def delete_elements(P,A):
"Return the poset obtained from P by deleting the elements in A"
elms = [x for x in P.list() if x not in A]
rels = [R for R in P.relations() if R[0] not in A and R[1] not in A]
return Poset([elms,rels], cover_relations=False)
def add_bottom(P):
"Return the poset obtained from P by adding the minimum element 0."
elms = ["zero_hat"] + P.list()
rels = [["zero_hat",x] for x in P.list()] + P.relations()
return Poset([elms,rels], cover_relations=False)
def Poin(P,t,omega=0):
"""
Input:
P: a poset or a list of numbers [a,b,c,...].
If P = [a,b,c,...], then P is set to be the cell poset of the partition [a,b,c,...].
t: a variable
omega: a labeling of P which is given by default to the first lin ext of P.
Output: The Poincare polynomial of the poset P.
The bijection from linear extensions to P-transverse permutations is used.
"""
if type(P) == list:
P = Partition(P).cell_poset().dual()
if omega == 0:
omega = P.linear_extensions()[0]
poly = 0
for L in P.linear_extensions():
poly += t^(len(P)-len(get_P_transverse_permutation(P,omega,L)))
return poly
def get_P_transverse_permutation(P,omega,L):
"""
Input:
P: a poset
omega: a labeling of P
L: a linear extension of P
Output: The P-transverse permutation corresponding to L with respect to omega.
"""
Q = add_bottom(P)
pi = []
while len(L)>0:
N = next_level(Q,P,omega,L)
pi = pi + N[-1]
[Q, P, omega, L] = N[:-1]
return pi
def next_level(Q,P,omega,L):
"""
INPUT: Q,P,omega,L
Q: poset containing P
omega: labeling
L: a linear extension of P
OUTPUT: [Q1, P1, omega, L1, pi]
Q1 = P
P1 = P - minimal blocks
omega = omega
L1 = L restricted to P1
pi = P-transverse permutation of the minimal blocks in P.
"""
L1 = L
pi = []
C = [] # the list of current level elements in P
D = [] # the list of current level elements in P that are not minimal in Q
B = []
for x in L:
if x not in P.minimal_elements(): # If x is not a minimal element in P this level is done.
break
C.append(x)
if x not in Q.minimal_elements():
D.append(x)
if omega.index(x) == min([omega.index(d) for d in D]):
if len(B)>0:
pi.append(B)
B = [x]
else:
B.append(x)
pi.append(B)
Q1 = P
P1 = delete_elements(P,C)
L1 = L[len(C):]
return [Q1, P1, omega, L1, pi]
statistic = lambda P: Poin(P, -1)
# DON'T edit this
def parent_initializer(n):
class MyPosets():
def __iter__(self):
for P in Posets(n):
yield P
def cardinality(self):
l = [1, 1, 2, 5, 16, 63, 318, 2045, 16999, 183231, 2567284, 46749427, 1104891746, 33823827452, 1338193159771, 68275077901156, 4483130665195087]
if n < len(l):
return l[n]
else:
return Infinity
return MyPosets()
element_repr = lambda X: str( ( sorted(X._hasse_diagram.canonical_label().cover_relations()), len(X._hasse_diagram.vertices(sort=False)) ) )
levels = [2, 3, 4, 5]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
([],1) => 1
([],2) => 0
([(0,1)],2) => 1
([],3) => 0
([(1,2)],3) => -1
# edit this
def ASM_X_ray(A):
P = A.to_matrix()
n = P.nrows()
return tuple(sum(P[k-1-j][j] for j in range(max(0, k-n), min(k,n)))
for k in range(1,2*n))
@cached_function
def ASM_X_rays(n):
return sorted(ASM_X_ray(A) for A in AlternatingSignMatrices(n))
def statistic(A):
return ASM_X_rays(A.to_matrix().nrows()).count(ASM_X_ray(A))
# DON'T edit this
parent_initializer = AlternatingSignMatrices
element_repr = lambda X: str([list(row) for row in X.to_matrix()])
levels = [2, 3, 4]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[1]] => 1
[[1,0],[0,1]] => 1
[[0,1],[1,0]] => 1
[[1,0,0],[0,1,0],[0,0,1]] => 1
[[0,1,0],[1,0,0],[0,0,1]] => 1
# edit this
def statistic(G):
return G[0][0]-G[0][-1]
# DON'T edit this
def parent_initializer(x):
class MyGTP():
def __iter__(self):
for la in Partitions(x[1], max_length=x[0]):
top_row = la + [0]*(x[0]-len(la))
for P in GelfandTsetlinPatterns(n=x[0], top_row=top_row):
yield P
def cardinality(self):
return SemistandardTableaux(size=x[1], max_entry=x[0]).cardinality()
return MyGTP()
element_repr = repr
levels = [(2, 1), (2, 2), (3, 1), (2, 3), (3, 2), (4, 1), (2, 4), (3, 3), (4, 2), (5, 1), (2, 5), (3, 4), (4, 3), (5, 2), (6, 1)]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[1,0],[0]] => 1
[[1,0],[1]] => 1
[[2,0],[0]] => 2
[[2,0],[1]] => 2
[[2,0],[2]] => 2
# edit this
def statistic(T):
S = [[e for e in row] for row in T]
count = 0
for i, row in enumerate(S):
for j, e in enumerate(row):
S[i][j] = e+1
try:
_ = SemistandardTableau(S)
except StandardError:
count += 1
S[i][j] = e
return count
# DON'T edit this
def parent_initializer(x):
class MySemistandardTableaux():
def __iter__(self):
for T in SemistandardTableaux(size=x[0], max_entry=x[1]):
if max(T.entries()) == x[1]:
yield T
def cardinality(self):
c1 = SemistandardTableaux(size=x[0], max_entry=x[1]).cardinality()
if x[1] <= 1:
return c1
c2 = SemistandardTableaux(size=x[0], max_entry=x[1]-1).cardinality()
return c1 - c2
return MySemistandardTableaux()
element_repr = repr
levels = [(2, 2), (2, 3), (3, 2), (2, 4), (3, 3), (4, 2), (2, 5), (3, 4), (4, 3), (5, 2), (2, 6), (3, 5), (4, 4), (5, 3), (6, 2)]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[1,2]] => 0
[[2,2]] => 1
[[1],[2]] => 1
[[1,3]] => 0
[[2,3]] => 0
# edit this
"""
On the dimension of a graph, Paul Erdös, Frank Harary and William
T. Tutte
"""
dimensions = dict()
N = 7
for n in range(1,N+1):
# C_n
if n > 3:
G = graphs.CycleGraph(n)
dimensions[G.canonical_label().copy(immutable=True)] = 2
# P_n
if n > 1:
G = graphs.PathGraph(n)
dimensions[G.canonical_label().copy(immutable=True)] = 1
# K_n
G = graphs.CompleteGraph(n)
dimensions[G.canonical_label().copy(immutable=True)] = n-1
# K_n-e
if n > 2:
G.delete_edge(G.edges()[0])
dimensions[G.canonical_label().copy(immutable=True)] = n-2
# K_{2,n}
G = graphs.CompleteBipartiteGraph(2, n)
if n >= 3:
dimensions[G.canonical_label().copy(immutable=True)] = 3
elif n == 2:
dimensions[G.canonical_label().copy(immutable=True)] = 2
# K_{3,n}
G = graphs.CompleteBipartiteGraph(3, n)
if n >= 3:
dimensions[G.canonical_label().copy(immutable=True)] = 4
# Wheel
G = graphs.WheelGraph(n) # n vertices
if n in [5, 6]:
dimensions[G.canonical_label().copy(immutable=True)] = 3
elif n == 7:
dimensions[G.canonical_label().copy(immutable=True)] = 2
elif n > 7:
dimensions[G.canonical_label().copy(immutable=True)] = 3
G = graphs.CubeGraph(n) # 2^n vertices
if n > 1:
dimensions[G.canonical_label().copy(immutable=True)] = 2
def statistic(G):
global dimensions
# bridges do not change the dimension, unless we have a forest
G = G.copy(immutable=False)
bridges = list(G.bridges())
if G.num_edges() == len(bridges):
if G.num_edges() == 0:
return 0
if max(G.degree()) <= 2:
return 1
return 2
G.delete_edges(bridges)
l = G.connected_components_subgraphs()
if len(l) > 1:
statistics = [statistic(H) for H in l]
if not any(v is None for v in statistics):
return max(statistics)
l = G.blocks_and_cut_vertices()[0]
if len(l) > 1:
statistics = [statistic(G.subgraph(B)) for B in l]
if not any(v is None for v in statistics):
return max(statistics)
G = G.canonical_label().copy(immutable=True)
if G in dimensions:
return dimensions[G]
n = G.num_verts()
if G.is_clique():
return n-1
# DON'T edit this
def parent_initializer(n):
class MyGraphs():
def __iter__(self):
for G in graphs(n):
G._immutable = True
yield G
def cardinality(self):
l = [1, 1, 2, 4, 11, 34, 156, 1044, 12346, 274668]
if n < len(l):
return l[n]
else:
return Infinity
return MyGraphs()
element_repr = lambda X: str((sorted(X.canonical_label().edges(sort=False, labels=False)), X.num_verts()))
levels = [1, 2, 3, 4, 5, 6]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
([],0) => 0
([],1) => 0
([],2) => 0
([(0,1)],2) => 1
([],3) => 0
# edit this
@cached_function
def Ordered_to_Rooted(n):
return [RootedTree(S) for S in OrderedTrees(n)]
def statistic(tree):
return Ordered_to_Rooted(tree.node_number()).count(RootedTree(tree))
# DON'T edit this
parent_initializer = OrderedTrees
element_repr = repr
levels = [2, 3, 4, 5, 6, 7]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[]] => 1
[[],[]] => 1
[[[]]] => 1
[[],[],[]] => 1
[[],[[]]] => 2
# edit this
@cached_function
def statistic(ct):
return sum(1 for w in WeylGroup(ct) if is_proper(ct, w))
def is_proper(ct, w):
ct = CartanType(ct)
n = ct.rank()
l = w.length()
d = len(w.descents(side="left"))
if ct.type() == "A":
return l <= n + binomial(d + 1, 2)
if ct.type() in ["B", "C"]:
return l <= n + d^2
if ct.type() == "D":
if d > 3:
return l <= n + d^2 - d
return l <= binomial(d + 1, 2)
if ct.type() == "E":
m = [0, 1, 3, 6, 12, 20, 36, 63, 120]
return l <= n + m[d]
if ct.type() == "F":
m = [0, 1, 4, 9, 24]
return l <= n + m[d]
if ct.type() == "G": # = I_2(6)
if d == 2:
return l <= 6 + n
if d < 2:
return l <= 2 + d
# DON'T edit this
def parent_initializer(n):
cartan_types = [ ['A',n] ]
if n >= 2:
cartan_types += [ ['B',n] ]
if n >= 3:
cartan_types += [ ['C',n] ]
if n >= 4:
cartan_types += [ ['D',n] ]
if n in [6,7,8]:
cartan_types += [ ['E',n] ]
if n == 4:
cartan_types += [ ['F',n] ]
if n == 2:
cartan_types += [ ['G',n] ]
return [ CartanType(cartan_type) for cartan_type in cartan_types ]
element_repr = repr
levels = [1, 2, 3, 4, 5, 6, 7, 8]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
['A',1] => 2
['A',2] => 6
['B',2] => 8
['G',2] => 8
['A',3] => 24
# edit this
def statistic(P):
"""
sage: R. = PolynomialRing(ZZ)
sage: n=3; sum(x^statistic(P)*y^(binomial(len(P)+1,2)-sum(P)) for P in ParkingFunctions(n))
x^3 + y^3 + 3*x^2 + 4*x*y + 3*y^2 + 2*x + 2*y
sage: G = graphs.CompleteGraph(n+1)
sage: G.tutte_polynomial()
x^3 + y^3 + 3*x^2 + 4*x*y + 3*y^2 + 2*x + 2*y
"""
return len([1 for i, j in enumerate(P)
if ((i == 0 or max(P[:i]) < j)
and len([1 for k in P if k < j]) == j-1
and len([1 for k in P if k > j]) == len(P)-j)])
# DON'T edit this
parent_initializer = ParkingFunctions
element_repr = repr
levels = [1, 2, 3, 4]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[1] => 1
[1,1] => 0
[1,2] => 2
[2,1] => 1
[1,1,1] => 0
# edit this
# code from https://codereview.stackexchange.com/questions/164326
def zigzags(input):
input = iter(input)
stack = None
try:
stack = [next(input)]
while True:
if len(stack) < 2:
stack.append(next(input))
else:
stack = stack[-2:]
a, b = stack
if a == b:
yield (a,)
stack = [b]
continue
zig = a > b
while True:
prev = stack[-1]
this = next(input)
if prev == this or zig == (prev > this):
break
stack.append(this)
zig = not zig
yield tuple(stack)
stack.append(this)
except StopIteration:
pass
if stack:
yield tuple(stack)
def statistic(input):
return max(len(z) for z in zigzags(input))
# DON'T edit this
parent_initializer = lambda x: Words([0,1], x)
element_repr = lambda X: str(X)
levels = [1, 2, 3, 4, 5, 6]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
0 => 1
1 => 1
00 => 1
01 => 2
10 => 2
# edit this
def statistic(P):
P = [ list(row) for row in P ]
m = min(min(row) for row in P)
return sum(row.count(m) for row in P)
# DON'T edit this
def parent_initializer(n):
class MyPlanePartitions():
def __iter__(self):
for P in PlanePartitions(n):
yield PlanePartition(P[:-1])
def cardinality(self):
card = [1, 1, 3, 6, 13, 24, 48, 86, 160, 282, 500, 859, 1479, 2485, 4167, 6879]
if n < len(card):
return card[n]
else:
return Infinity
return MyPlanePartitions()
def PlanePartitions(n, outer=None):
emp = [[]]
if n == 0:
yield [emp]
return
if outer == emp:
yield []
return
if outer is None:
outer = [n]*n
for k in range(1,n+1):
for L in Partitions(k, outer=outer):
for Pp in PlanePartitions(n-k, outer=L):
Pp = [L] + Pp
yield Pp
element_repr = lambda X: str([Partition(la) for la in X]).replace(" ","")
levels = [1, 2, 3, 4, 5, 6, 7]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[1]] => 1
[[1],[1]] => 2
[[2]] => 1
[[1,1]] => 2
[[1],[1],[1]] => 3
# edit this
# please simplify
def cyclic_interval(a, b, n):
if a <= b:
return list(range(a, b+1))
return list(range(a, n+1)) + list(range(1, b+1))
def alignments(x):
pi = x.to_signed_permutation().permutation()
n = len(pi)
al = []
for i, e in enumerate(pi, 1):
for j, f in enumerate(pi, 1):
if i == j:
continue
a, b = sorted([i, e])
c, d = sorted([j, f])
if a <= c <= b <= d or c <= a <= d <= b:
continue
if (e not in cyclic_interval(i, f-1, n)
or f not in cyclic_interval(e+1, j, n)):
continue
if j == f and x[j-1] < 0:
continue
if i == e and x[i-1] > 0:
continue
al.append((i, e, j, f))
return al
def statistic(x):
return len(alignments(x))
# DON'T edit this
def parent_initializer(n):
class MyDecoratedPermutations():
def __iter__(self):
for tau in DecoratedPermutations(n):
yield tau
def cardinality(self):
return sum( factorial(n)/factorial(k) for k in range(n+1) )
return MyDecoratedPermutations()
def DecoratedPermutations(n):
for sigma in Permutations(n):
F = sigma.fixed_points()
for X in Subsets(F):
tau = list(sigma)
for i in X:
tau[i-1] = -tau[i-1]
yield SignedPermutations(n)(tau)
def element_repr(pi):
pi = list(pi)
for i,a in enumerate(pi):
if a == i+1:
pi[i] = 0
elif -a == i+1:
pi[i] = -1
return str(pi).replace(" ","").replace("0","+").replace("-1","-")
levels = [2, 3, 4, 5]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[+] => 0
[-] => 0
[+,+] => 0
[-,+] => 1
[+,-] => 1
# edit this
def atomic_length(pi):
"""
EXAMPLES::
sage: l = [atomic_length(SignedPermutations(n).long_element()) for n in range(1,8)]
sage: l
sage: fricas.guess(l)[0].sage().factor()
1/6*(4*n + 3)*(n + 2)*(n + 1)
"""
W = WeylGroup(pi.parent().coxeter_type())
w = W.from_reduced_word(pi.reduced_word())
return sum(a.height() for a in w.inversions(inversion_type="roots"))
# DON'T edit this
def parent_initializer(n):
class MySignedPermutations():
def __iter__(self):
for tau in SignedPermutations(n):
yield tau
def cardinality(self):
return 2**n*factorial(n)
return MySignedPermutations()
element_repr = repr
levels = [2, 3, 4]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[1] => 0
[-1] => 1
[1,2] => 0
[1,-2] => 1
[-1,2] => 6
# edit this
def fdeg(p):
R.
= QQ[] f = prod([ 1-q*q^j for j in range(p.size()) ]) b = sum([ j*k for j, k in enumerate(p) ]) return (q^b)*(f/p.hook_polynomial(q,q)) def fake_degree(a): R.= QQ[] if a == 0: return R(0) else: z = SymmetricFunctions(QQ).schur()(a) return R(sum([z.coefficient(p)*fdeg(p) for p in z.support()])) def standard_CSP(p, r): P = parent(p) q = P.gen() PP.= PolynomialRing(QQ) R.= PP.quo(q^r-1) return R(p).lift() def orbit_lengths(p, r): p = standard_CSP(p, r) p = {k: p[k] for k in range(r)} divs = sorted(divisors(r)) O = dict() for d in divs: count = p[r-d] if count: O[r//d] = count if count: for k in range(0, r, d): p[k] -= count assert all(c == 0 for c in p.values()), "%s" % p return O def has_cyclic_permutation_representation(p, r): if p == 0: return True try: lo = orbit_lengths(p, r) except AssertionError: return False return all(v in ZZ and ZZ(v) >= 0 for v in lo.values()) def statistic(la): s = SymmetricFunctions(QQ).schur() f = fake_degree(s(la)) for r in reversed(divisors(la.size())): if has_cyclic_permutation_representation(f, r): return r # alternative implementation using Theorem 2.7 def has_cyclic_permutation_representation_slow(p, r): for k in divisors(r): if sum(moebius(k//j) * p.subs(q=QQbar.zeta(r)^j) for j in divisors(k)) < 0: return False return True def statistic_slow(la): s = SymmetricFunctions(QQ).schur() f = fake_degree(s(la)) for r in reversed(divisors(la.size())): if has_cyclic_permutation_representation_slow(f, r): return r # DON'T edit this parent_initializer = SkewPartitions element_repr = lambda P: str(list(P)).replace(" ","") levels = [2, 3, 4, 5] for level in levels: for elt in parent_initializer(level): print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[[1],[]] => 1
[[2],[]] => 2
[[1,1],[]] => 1
[[2,1],[1]] => 2
[[3],[]] => 3
# edit this
@cached_function
def aux(L):
if L.cardinality() == 1:
return 1, 1
b = L.bottom()
U = L.upper_covers(b)
J = set()
for k in range(1, len(U)+1):
for S in Subsets(U, k):
J.add(L.join(S))
t = L.top()
C = {}
for j in J:
M = L.sublattice(L.interval(j, t))
M = M.canonical_label()
l, c = aux(M)
C[l+1] = C.get(l+1, 0) + c
l = min(C.keys())
return l, C[l]
def statistic(L):
return aux(L.canonical_label())[1]
# DON'T edit this
def parent_initializer(n):
class MyLattices():
def __iter__(self):
if n <= 2:
for P in Posets(n):
if P.is_lattice():
yield LatticePoset(P)
else:
for P in Posets(n-2):
Q = P.with_bounds()
if Q.is_lattice():
yield LatticePoset(Q)
def cardinality(self):
l = [1, 1, 1, 1, 2, 5, 15, 53, 222, 1078, 5994, 37622, 262776, 2018305, 16873364, 152233518, 1471613387, 15150569446, 165269824761, 1901910625578, 23003059864006]
if n < len(l):
return l[n]
else:
return Infinity
return MyLattices()
element_repr = lambda X: str( ( sorted(X._hasse_diagram.canonical_label().cover_relations()), len(X._hasse_diagram.vertices(sort=False)) ) )
levels = [3, 4, 5, 6, 7]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
([],1) => 1
([(0,1)],2) => 1
([(0,2),(2,1)],3) => 1
([(0,1),(0,2),(1,3),(2,3)],4) => 1
([(0,3),(2,1),(3,2)],4) => 1
# edit this
from sage.combinat.permutation import to_standard
def occurrences(p, pattern):
"""
Return elements in sequences of distinct blocks order isomorphic
to `pattern`.
"""
o = []
for i, b in enumerate(p):
q = p[i+1:]
for e in b:
o.extend(occurrences_aux(q, [e], pattern))
return o
def occurrences_aux(p, prefix, pattern):
"""
Return all occurrences of the form `(prefix, suffix)` order
isomorphic to `pattern`, where `suffix` are elements in sequences
of distinct blocks of `p`.
"""
# TODO: we could do better, probably
if len(pattern) == len(prefix):
yield prefix
else:
pattern_prefix = to_standard(pattern[:len(prefix)+1])
for i, b in enumerate(p):
q = p[i+1:]
for e in b:
new_prefix = prefix + [e]
if to_standard(new_prefix) == pattern_prefix:
yield from occurrences_aux(q, new_prefix, pattern)
def statistic(p):
return len(occurrences(p, [1,3,2]))
# DON'T edit this
parent_initializer = OrderedSetPartitions
element_repr = repr
levels = [2, 3, 4]
for level in levels:
for elt in parent_initializer(level):
print('%s => %s' % (element_repr(elt), statistic(elt)))
object_name => value
E.g.
[{1}] => 0
[{1},{2}] => 0
[{2},{1}] => 0
[{1,2}] => 0
[{1},{2},{3}] => 0