C # e SAT: 1, 2, 4, 6, 9, 12, 17, 20, 26, 31, 37, 43
Se limitiamo il riquadro di delimitazione, esiste un'espressione abbastanza ovvia del problema in termini di SAT : ogni traduzione di ciascun orientamento di ciascun poliomino libero è una grande congiunzione; per ogni poliomino formiamo una disgiunzione sulle sue congiunzioni; e quindi richiediamo che ogni disgiunzione sia vera e che il numero totale di celle fosse limitato.
Per limitare il numero di celle la mia versione iniziale ha creato un sommatore completo; poi ho usato l'ordinamento bitonico per il conteggio unario (simile a questa risposta precedente ma generalizzato); alla fine ho optato per l'approccio descritto da Bailleux e Boufkhad nella codifica CNF efficiente dei vincoli di cardinalità booleana .
Volevo rendere il post autonomo, quindi ho estratto un'implementazione C # di un solutore SAT con una licenza BSD che era all'avanguardia circa 15 anni fa, sostituendo l'implementazione dell'elenco NIH con System.Collections.Generic.List<T>
(guadagnando un fattore di 2 in velocità), lo ha portato da 50 KB a 31 KB per rientrare nel limite di 64 KB, quindi ha fatto un lavoro aggressivo sulla riduzione dell'utilizzo della memoria. Questo codice può ovviamente essere adattato per generare un file DIMACS che può quindi essere passato a solutori più moderni.
Soluzioni trovate
#
##
###
..#
####
.##.
..#..
#####
..###
.####.
######
.##...
....#..
#######
#####..
.####..
########
..######
.....###
.....###
#########
#######..
..#####..
....##...
....###..
##########
########..
..######..
....####..
.....###..
..#######..
..#########
###########
..####.....
..####.....
..##.......
...#######..
...#########
############
..#####....#
..#####.....
...####.....
Per trovare 43 per n = 12 ci sono voluti un po 'più di 7,5 ore.
Codice Polyomino
using MiniSAT;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace PPCG167484
{
internal class SatGenerator
{
public static void Main()
{
for (int n = 1; n < 13; n++)
{
int width = n;
int height = (n + 1) >> 1;
var polys = FreePolyomino.All(n);
(var solver, var unaryWeights) = Generate(polys, width, height);
int previous = width * height + 1;
while (true)
{
Stopwatch sw = new Stopwatch(); sw.Start();
if (solver.Solve())
{
// The weight of the solution might be smaller than the target
int weight = Enumerable.Range(0, width * height).Count(x => solver.Model[x] == Solver.l_True);
Console.Write($"{n}\t<={weight}\t{sw.Elapsed.TotalSeconds:F3}s\t");
int cell = 0;
for (int y = 0; y < height; y++)
{
if (y > 0) Console.Write('_');
for (int x = 0; x < width; x++) Console.Write(solver.Model[cell++] == Solver.l_True ? '#' : '.');
}
Console.WriteLine();
// Now knock out that weight
for (int i = weight - 1; i < previous - 1; i++) solver.AddClause(~unaryWeights[i]);
previous = weight;
}
else
{
Console.WriteLine("--------");
break;
}
}
}
}
public static Tuple<Solver, Solver.Lit[]> Generate(IEnumerable<FreePolyomino> polys, int width, int height)
{
var solver = new Solver();
if (width == 12) solver.Prealloc(6037071 + 448, 72507588 + 6008); // HACK!
// Variables: 0 to width * height - 1 are the cells available to fill.
for (int i = 0; i < width * height; i++) solver.NewVar();
foreach (var poly in polys)
{
// We naturally get a DNF: each position of each orientation is a conjunction of poly.Weight variables,
// and we require any one. Therefore we add an auxiliary variable per.
var polyAuxs = new List<Solver.Lit>();
foreach (var orientation in poly.OrientedPolyominos)
{
int maxh = height;
// Optimisation: break symmetry
if (orientation.BBHeight == 1) maxh = ((height + 1) >> 1);
for (int dy = 0; dy + orientation.BBHeight <= maxh; dy++)
{
for (int dx = 0; dx + orientation.BBWidth <= width; dx++)
{
var currentAux = solver.NewVar();
for (int y = 0; y < orientation.BBHeight; y++)
{
uint tmp = orientation.Rows[y];
for (int x = 0; tmp > 0; x++, tmp >>= 1)
{
if ((tmp & 1) == 1) solver.AddClause(~currentAux, new Solver.Lit((y + dy) * width + x + dx));
}
}
polyAuxs.Add(currentAux);
}
}
}
solver.AddClause(polyAuxs.ToArray());
}
// Efficient CNF encoding of Boolean cardinality constraints, Bailleux and Boufkhad, http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.458.7676&rep=rep1&type=pdf
var unaryWeights = _BBSum(0, width * height, solver);
return Tuple.Create(solver, unaryWeights);
}
private static Solver.Lit[] _BBSum(int from, int num, Solver solver)
{
var sum = new Solver.Lit[num];
if (num == 1) sum[0] = new Solver.Lit(from);
else
{
var left = _BBSum(from, num >> 1, solver);
var right = _BBSum(from + left.Length, num - left.Length, solver);
for (int i = 0; i < num; i++) sum[i] = solver.NewVar();
for (int alpha = 0; alpha <= left.Length; alpha++)
{
for (int beta = 0; beta <= right.Length; beta++)
{
var sigma = alpha + beta;
// C_1 = ~left[alpha-1] + ~right[beta-1] + sum[sigma-1]
if (alpha > 0 && beta > 0) solver.AddClause(~left[alpha - 1], ~right[beta - 1], sum[sigma - 1]);
else if (alpha > 0) solver.AddClause(~left[alpha - 1], sum[sigma - 1]);
else if (beta > 0) solver.AddClause(~right[beta - 1], sum[sigma - 1]);
// C_2 = left[alpha] + right[beta] + ~sum[sigma]
if (alpha < left.Length && beta < right.Length) solver.AddClause(left[alpha], right[beta], ~sum[sigma]);
else if (alpha < left.Length) solver.AddClause(left[alpha], ~sum[sigma]);
else if (beta < right.Length) solver.AddClause(right[beta], ~sum[sigma]);
}
}
}
return sum;
}
}
class FreePolyomino : IEquatable<FreePolyomino>
{
internal FreePolyomino(OrientedPolyomino orientation)
{
var orientations = new HashSet<OrientedPolyomino>();
orientations.Add(orientation);
var tmp = orientation.Rot90(); orientations.Add(tmp);
tmp = tmp.Rot90(); orientations.Add(tmp);
tmp = tmp.Rot90(); orientations.Add(tmp);
tmp = tmp.FlipV(); orientations.Add(tmp);
tmp = tmp.Rot90(); orientations.Add(tmp);
tmp = tmp.Rot90(); orientations.Add(tmp);
tmp = tmp.Rot90(); orientations.Add(tmp);
OrientedPolyominos = orientations.OrderBy(x => x).ToArray();
}
public IReadOnlyList<OrientedPolyomino> OrientedPolyominos { get; private set; }
public OrientedPolyomino CanonicalOrientation => OrientedPolyominos[0];
public static IEnumerable<FreePolyomino> All(int numCells)
{
if (numCells < 1) throw new ArgumentOutOfRangeException(nameof(numCells));
if (numCells == 1) return new FreePolyomino[] { new FreePolyomino(OrientedPolyomino.Unit) };
// We do this in two phases because identifying two equal oriented polyominos is faster than first building
// free polyominos and then identifying that they're equal.
var oriented = new HashSet<OrientedPolyomino>();
foreach (var smaller in All(numCells - 1))
{
// We can add a cell to a side. The easiest way to do this is to add to the bottom of one of the rotations.
// TODO Optimise by distinguishing the symmetries.
foreach (var orientation in smaller.OrientedPolyominos)
{
int h = orientation.BBHeight;
var bottomRow = orientation.Rows[h - 1];
for (int deltax = 0; deltax < orientation.BBWidth; deltax++)
{
if (((bottomRow >> deltax) & 1) == 1)
{
var rows = orientation.Rows.Concat(Enumerable.Repeat(1U << deltax, 1)).ToArray();
oriented.Add(new OrientedPolyomino(rows));
}
}
}
// We can add a cell in the middle, provided it connects up.
var canon = smaller.CanonicalOrientation;
uint prev = 0, curr = 0, next = canon.Rows[0];
for (int y = 0; y < canon.BBHeight; y++)
{
(prev, curr, next ) = (curr, next, y + 1 < canon.BBHeight ? canon.Rows[y + 1] : 0);
uint valid = (prev | next | (curr << 1) | (curr >> 1)) & ~curr;
for (int x = 0; x < canon.BBWidth; x++)
{
if (((valid >> x) & 1) == 1)
{
var rows = canon.Rows.ToArray(); // Copy
rows[y] |= 1U << x;
oriented.Add(new OrientedPolyomino(rows));
}
}
}
}
// Now cluster the oriented polyominos into equivalence classes under dihedral symmetry.
return new HashSet<FreePolyomino>(oriented.Select(orientation => new FreePolyomino(orientation)));
}
public bool Equals(FreePolyomino other) => other != null && CanonicalOrientation.Equals(other.CanonicalOrientation);
public override bool Equals(object obj) => Equals(obj as FreePolyomino);
public override int GetHashCode() => CanonicalOrientation.GetHashCode();
}
[DebuggerDisplay("{ToString()}")]
struct OrientedPolyomino : IComparable<OrientedPolyomino>, IEquatable<OrientedPolyomino>
{
public static readonly OrientedPolyomino Unit = new OrientedPolyomino(1);
public OrientedPolyomino(params uint[] rows)
{
if (rows.Length == 0) throw new ArgumentException("We don't support the empty polyomino", nameof(rows));
if (rows.Any(row => row == 0) || rows.All(row => (row & 1) == 0)) throw new ArgumentException("Polyomino is not packed into the corner", nameof(rows));
var colsUsed = rows.Aggregate(0U, (accum, row) => accum | row);
BBWidth = Helper.Width(colsUsed);
if (colsUsed != ((1U << BBWidth) - 1)) throw new ArgumentException("Polyomino has empty columns", nameof(rows));
Rows = rows;
}
public IReadOnlyList<uint> Rows { get; private set; }
public int BBWidth { get; private set; }
public int BBHeight => Rows.Count;
#region Dihedral symmetries
public OrientedPolyomino FlipV() => new OrientedPolyomino(Rows.Reverse().ToArray());
public OrientedPolyomino Rot90()
{
uint[] rot = new uint[BBWidth];
for (int y = 0; y < BBHeight; y++)
{
for (int x = 0; x < BBWidth; x++)
{
rot[x] |= ((Rows[y] >> x) & 1) << (BBHeight - 1 - y);
}
}
return new OrientedPolyomino(rot);
}
#endregion
#region Identity
public int CompareTo(OrientedPolyomino other)
{
// Favour wide-and-short orientations for the canonical one.
if (BBHeight != other.BBHeight) return BBHeight.CompareTo(other.BBHeight);
for (int i = 0; i < BBHeight; i++)
{
if (Rows[i] != other.Rows[i]) return Rows[i].CompareTo(other.Rows[i]);
}
return 0;
}
public bool Equals(OrientedPolyomino other) => CompareTo(other) == 0;
public override int GetHashCode() => Rows.Aggregate(0, (h, row) => h * 37 + (int)row);
public override bool Equals(object obj) => (obj is OrientedPolyomino other) && Equals(other);
public override string ToString()
{
var width = BBWidth;
return string.Join("_", Rows.Select(row => Helper.ToString(row, width)));
}
#endregion
}
static class Helper
{
public static int Width(uint x)
{
int w = 0;
if ((x >> 16) != 0) { w += 16; x >>= 16; }
if ((x >> 8) != 0) { w += 8; x >>= 8; }
if ((x >> 4) != 0) { w += 4; x >>= 4; }
if ((x >> 2) != 0) { w += 2; x >>= 2; }
switch (x)
{
case 0: break;
case 1: w++; break;
case 2:
case 3: w += 2; break;
default: throw new Exception("Unreachable code");
}
return w;
}
internal static string ToString(uint x, int width)
{
char[] chs = new char[width];
for (int i = 0; i < width; i++)
{
chs[i] = (char)('0' + (x & 1));
x >>= 1;
}
return new string(chs);
}
internal static uint Weight(uint v)
{
// https://graphics.stanford.edu/~seander/bithacks.html
v = v - ((v >> 1) & 0x55555555);
v = (v & 0x33333333) + ((v >> 2) & 0x33333333);
return ((v + (v >> 4) & 0xF0F0F0F) * 0x1010101) >> 24;
}
}
}
Codice risolutore SAT
/******************************************************************************************
MiniSat -- Copyright (c) 2003-2005, Niklas Een, Niklas Sorensson
MiniSatCS -- Copyright (c) 2006-2007 Michal Moskal
GolfMiniSat -- Copyright (c) 2018 Peter Taylor
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
**************************************************************************************************/
using System;
using System.Diagnostics;
using System.Collections.Generic;
// NOTE! Variables are just integers. No abstraction here. They should be chosen from 0..N, so that they can be used as array indices.
using Var = System.Int32;
using System.Linq;
namespace MiniSAT
{
public static class Ext
{
private static int TargetCapacity(int size) =>
size < 65536 ? (size << 1) :
size < 1048576 ? (size + (size >> 1)) :
size + (size >> 2);
public static void Push<T>(this List<T> list, T elem)
{
// Similar to List<T>.Add but with a slower growth rate for large lists
if (list.Count == list.Capacity) list.Capacity = TargetCapacity(list.Count + 1);
list.Add(elem);
}
public static void Pop<T>(this List<T> list) => list.RemoveAt(list.Count - 1);
public static T Peek<T>(this List<T> list) => list[list.Count - 1];
public static void GrowTo<T>(this List<T> list, int size, T pad)
{
if (size > list.Count)
{
// Minimise resizing
if (size > list.Capacity) list.Capacity = size;
while (list.Count < size) list.Add(pad);
}
}
public static void ShrinkTo<T>(this List<T> list, int size)
{
list.RemoveRange(size, list.Count - size);
int targetCap = TargetCapacity(size);
if (list.Capacity > targetCap) list.Capacity = targetCap;
}
}
public delegate bool IntLess(int i1, int i2);
public class Heap
{
IntLess Cmp;
List<int> Heap_ = new List<int>(); // heap of ints
List<int> Indices = new List<int>(); // index in Heap_
static int Left(int i) => i << 1;
static int Right(int i) => (i << 1) | 1;
static int Parent(int i) => i >> 1;
void UpHead(int i)
{
int x = Heap_[i];
while (Parent(i) != 0 && Cmp(x, Heap_[Parent(i)]))
{
Heap_[i] = Heap_[Parent(i)];
Indices[Heap_[i]] = i;
i = Parent(i);
}
Heap_[i] = x;
Indices[x] = i;
}
void DownHeap(int i)
{
int x = Heap_[i];
while (Left(i) < Heap_.Count)
{
int child = Right(i) < Heap_.Count && Cmp(Heap_[Right(i)], Heap_[Left(i)]) ? Right(i) : Left(i);
if (!Cmp(Heap_[child], x)) break;
Heap_[i] = Heap_[child];
Indices[Heap_[i]] = i;
i = child;
}
Heap_[i] = x;
Indices[x] = i;
}
bool Ok(int n) => n >= 0 && n < Indices.Count;
public Heap(IntLess c) { Cmp = c; Heap_.Add(-1); }
public void SetBounds(int size) { Solver.Assert(size >= 0); Indices.GrowTo(size, 0); if (size > Heap_.Capacity) Heap_.Capacity = size; }
public bool InHeap(int n) { Solver.Assert(Ok(n)); return Indices[n] != 0; }
public void Increase(int n) { Solver.Assert(Ok(n)); Solver.Assert(InHeap(n)); UpHead(Indices[n]); }
public bool IsEmpty => Heap_.Count == 1;
public void Push(int n)
{
Solver.Assert(Ok(n));
Indices[n] = Heap_.Count;
Heap_.Add(n);
UpHead(Indices[n]);
}
public int Pop()
{
int r = Heap_[1];
Heap_[1] = Heap_.Peek();
Indices[Heap_[1]] = 1;
Indices[r] = 0;
Heap_.Pop();
if (Heap_.Count > 1) DownHeap(1);
return r;
}
}
public class Solver
{
#region lbool ~= Nullable<bool>
public struct LBool
{
public static readonly LBool True = new LBool { Content = 1 };
public static readonly LBool False = new LBool { Content = -1 };
public static readonly LBool Undef = new LBool { Content = 0 };
private sbyte Content;
public static bool operator ==(LBool a, LBool b) => a.Content == b.Content;
public static bool operator !=(LBool a, LBool b) => a.Content != b.Content;
public static LBool operator ~(LBool a) => new LBool { Content = (sbyte)-a.Content };
public static implicit operator LBool(bool b) => b ? True : False;
}
public static readonly LBool l_True = LBool.True;
public static readonly LBool l_False = LBool.False;
public static readonly LBool l_Undef = LBool.Undef;
#endregion
#region Literals
const int var_Undef = -1;
public struct Lit
{
public Lit(Var var) { Index = var << 1; }
public bool Sign => (Index & 1) != 0;
public int Index { get; private set; }
public int Var => Index >> 1;
public bool SatisfiedBy(List<LBool> assignment) => assignment[Var] == (Sign ? l_False : l_True);
public static Lit operator ~(Lit p) => new Lit { Index = p.Index ^ 1 };
public static bool operator ==(Lit p, Lit q) => p.Index == q.Index;
public static bool operator !=(Lit p, Lit q) => !(p == q);
public override int GetHashCode() => Index;
public override bool Equals(object other) => other is Lit that && this == that;
public override string ToString() => (Sign ? "-" : "") + "x" + Var;
}
static public readonly Lit lit_Undef = ~new Lit(var_Undef);
#endregion
#region Clauses
public abstract class Clause
{
protected Clause(bool learnt)
{
IsLearnt = learnt;
}
public bool IsLearnt { get; private set; }
public float Activity;
public abstract int Size { get; }
public abstract Lit this[int i] { get;set; }
public abstract bool SatisfiedBy(List<LBool> assigns);
public static Clause Create(bool learnt, List<Lit> ps)
{
if (ps.Count < 2) throw new ArgumentOutOfRangeException(nameof(ps));
if (ps.Count == 2) return new BinaryClause(learnt, ps[0], ps[1]);
return new LargeClause(learnt, ps);
}
}
public class BinaryClause : Clause
{
public BinaryClause(bool learnt, Lit p0, Lit p1) : base(learnt)
{
l0 = p0; l1 = p1;
}
private Lit l0;
private Lit l1;
public override Lit this[int i]
{
get { return i == 0 ? l0 : l1; }
set { if (i == 0) l0 = value; else l1 = value; }
}
public override int Size => 2;
public override bool SatisfiedBy(List<LBool> assigns) => l0.SatisfiedBy(assigns) || l1.SatisfiedBy(assigns);
}
public class LargeClause : Clause
{
public static int[] SizeDistrib = new int[10];
internal LargeClause(bool learnt, List<Lit> ps) : base(learnt)
{
Data = ps.ToArray();
SizeDistrib[Size >= SizeDistrib.Length ? SizeDistrib.Length - 1 : Size]++;
}
public Lit[] Data { get; private set; }
public override int Size => Data.Length;
public override Lit this[int i]
{
get { return Data[i]; }
set { Data[i] = value; }
}
public override bool SatisfiedBy(List<LBool> assigns) => Data.Any(lit => lit.SatisfiedBy(assigns));
public override string ToString() => "[" + string.Join(", ", Data) + "]";
}
#endregion
#region Utilities
// Returns a random float 0 <= x < 1. Seed must never be 0.
static double Rnd(ref double seed)
{
seed *= 1389796;
int k = 2147483647;
int q = (int)(seed / k);
seed -= (double)q * k;
return seed / k;
}
[Conditional("DEBUG")]
static public void Assert(bool expr) => Check(expr);
// Just like 'assert()' but expression will be evaluated in the release version as well.
static void Check(bool expr) { if (!expr) throw new Exception("assertion violated"); }
#endregion
#region VarOrder
public class VarOrder
{
readonly List<LBool> Assigns; // Pointer to external assignment table.
readonly List<float> Activity; // Pointer to external activity table.
internal Heap Heap_;
double RandomSeed;
public VarOrder(List<LBool> ass, List<float> act)
{
Assigns = ass;
Activity = act;
Heap_ = new Heap(Lt);
RandomSeed = 91648253;
}
bool Lt(Var x, Var y) => Activity[x] > Activity[y];
public virtual void NewVar()
{
Heap_.SetBounds(Assigns.Count);
Heap_.Push(Assigns.Count - 1);
}
// Called when variable increased in activity.
public virtual void Update(Var x) { if (Heap_.InHeap(x)) Heap_.Increase(x); }
// Called when variable is unassigned and may be selected again.
public virtual void Undo(Var x) { if (!Heap_.InHeap(x)) Heap_.Push(x); }
// Selects a new, unassigned variable (or 'var_Undef' if none exists).
public virtual Lit Select(double random_var_freq)
{
// Random decision:
if (Rnd(ref RandomSeed) < random_var_freq && !Heap_.IsEmpty)
{
Var next = (Var)(Rnd(ref RandomSeed) * Assigns.Count);
if (Assigns[next] == l_Undef) return ~new Lit(next);
}
// Activity based decision:
while (!Heap_.IsEmpty)
{
Var next = Heap_.Pop();
if (Assigns[next] == l_Undef) return ~new Lit(next);
}
return lit_Undef;
}
}
#endregion
#region Solver state
public bool Ok { get; private set; } // If false, the constraints are already unsatisfiable. No part of the solver state may be used!
List<Clause> Clauses = new List<Clause>(); // List of problem clauses.
List<Clause> Learnts = new List<Clause>(); // List of learnt clauses.
double ClaInc = 1; // Amount to bump next clause with.
const double ClaDecay = 1 / 0.999; // INVERSE decay factor for clause activity: stores 1/decay.
public List<float> Activity = new List<float>(); // A heuristic measurement of the activity of a variable.
float VarInc = 1; // Amount to bump next variable with.
const float VarDecay = 1 / 0.95f; // INVERSE decay factor for variable activity: stores 1/decay. Use negative value for static variable order.
VarOrder Order; // Keeps track of the decision variable order.
const double RandomVarFreq = 0.02;
List<List<Clause>> Watches = new List<List<Clause>>(); // 'watches[lit]' is a list of constraints watching 'lit' (will go there if literal becomes true).
public List<LBool> Assigns = new List<LBool>(); // The current assignments.
public List<Lit> Trail = new List<Lit>(); // Assignment stack; stores all assigments made in the order they were made.
List<int> TrailLim = new List<int>(); // Separator indices for different decision levels in 'trail'.
List<Clause> Reason = new List<Clause>(); // 'reason[var]' is the clause that implied the variables current value, or 'null' if none.
List<int> Level = new List<int>(); // 'level[var]' is the decision level at which assignment was made.
List<int> TrailPos = new List<int>(); // 'trail_pos[var]' is the variable's position in 'trail[]'. This supersedes 'level[]' in some sense, and 'level[]' will probably be removed in future releases.
int QHead = 0; // Head of queue (as index into the trail -- no more explicit propagation queue in MiniSat).
int SimpDBAssigns = 0; // Number of top-level assignments since last execution of 'simplifyDB()'.
long SimpDBProps = 0; // Remaining number of propagations that must be made before next execution of 'simplifyDB()'.
// Temporaries (to reduce allocation overhead)
List<LBool> AnalyzeSeen = new List<LBool>();
List<Lit> AnalyzeStack = new List<Lit>();
List<Lit> AnalyzeToClear = new List<Lit>();
#endregion
#region Main internal methods:
// Activity
void VarBumpActivity(Lit p)
{
if (VarDecay < 0) return; // (negative decay means static variable order -- don't bump)
if ((Activity[p.Var] += VarInc) > 1e100) VarRescaleActivity();
Order.Update(p.Var);
}
void VarDecayActivity() { if (VarDecay >= 0) VarInc *= VarDecay; }
void ClaDecayActivity() { ClaInc *= ClaDecay; }
// Operations on clauses
void ClaBumpActivity(Clause c) { if ((c.Activity += (float)ClaInc) > 1e20) ClaRescaleActivity(); }
// Disposes of clause and removes it from watcher lists. NOTE! Low-level; does NOT change the 'clauses' and 'learnts' vector.
void Remove(Clause c)
{
RemoveWatch(Watches[(~c[0]).Index], c);
RemoveWatch(Watches[(~c[1]).Index], c);
if (c.IsLearnt) LearntsLiterals -= c.Size;
else ClausesLiterals -= c.Size;
}
bool IsLocked(Clause c) => c == Reason[c[0].Var];
int DecisionLevel => TrailLim.Count;
#endregion
#region Public interface
public Solver()
{
Ok = true;
Order = new VarOrder(Assigns, Activity);
}
public void Prealloc(int numVars, int numClauses)
{
Activity.Capacity = numVars;
AnalyzeSeen.Capacity = numVars;
Assigns.Capacity = numVars;
Level.Capacity = numVars;
Reason.Capacity = numVars;
Watches.Capacity = numVars << 1;
Order.Heap_.SetBounds(numVars + 1);
Trail.Capacity = numVars;
TrailPos.Capacity = numVars;
Clauses.Capacity = numClauses;
}
// Helpers (semi-internal)
public LBool Value(Lit p) => p.Sign ? ~Assigns[p.Var] : Assigns[p.Var];
public int nAssigns => Trail.Count;
public int nClauses => Clauses.Count;
public int nLearnts => Learnts.Count;
// Statistics
public long ClausesLiterals, LearntsLiterals;
// Problem specification
public int nVars => Assigns.Count;
public void AddClause(params Lit[] ps) => NewClause(new List<Lit>(ps), false);
// Solving
public List<LBool> Model = new List<LBool>(); // If problem is satisfiable, this vector contains the model (if any).
#endregion
#region Operations on clauses:
List<Lit> BasicClauseSimplification(List<Lit> ps)
{
List<Lit> qs = new List<Lit>(ps);
var dict = new Dictionary<Var, Lit>(qs.Count);
int ptr = 0;
for (int i = 0; i < qs.Count; i++)
{
Lit l = qs[i];
Var v = l.Var;
if (dict.TryGetValue(v, out var other))
{
if (other != l) return null; // other = ~l, so always satisfied
}
else
{
dict[v] = l;
qs[ptr++] = l;
}
}
qs.ShrinkTo(ptr);
return qs;
}
void NewClause(List<Lit> ps, bool learnt)
{
if (!Ok) return;
Assert(ps != null);
if (!learnt)
{
Assert(DecisionLevel == 0);
ps = BasicClauseSimplification(ps);
if (ps == null) return;
int j = 0;
for (int i = 0; i < ps.Count; i++)
{
var lit = ps[i];
if (Level[lit.Var] == 0)
{
if (Value(lit) == l_True) return; // Clause already sat
if (Value(lit) == l_False) continue; // Literal already eliminated
}
ps[j++] = lit;
}
ps.ShrinkTo(j);
}
// 'ps' is now the (possibly) reduced vector of literals.
if (ps.Count == 0) Ok = false;
else if (ps.Count == 1)
{
if (!Enqueue(ps[0], null)) Ok = false;
}
else
{
var c = Clause.Create(learnt, ps);
if (!learnt)
{
Clauses.Add(c);
ClausesLiterals += c.Size;
}
else
{
// Put the second watch on the literal with highest decision level:
int max_i = 1;
int max = Level[ps[1].Var];
for (int i = 2; i < ps.Count; i++)
if (Level[ps[i].Var] > max)
{
max = Level[ps[i].Var];
max_i = i;
}
c[1] = ps[max_i];
c[max_i] = ps[1];
Check(Enqueue(c[0], c));
// Bumping:
ClaBumpActivity(c); // (newly learnt clauses should be considered active)
Learnts.Push(c);
LearntsLiterals += c.Size;
}
// Watch clause:
Watches[(~c[0]).Index].Push(c);
Watches[(~c[1]).Index].Push(c);
}
}
// Can assume everything has been propagated! (esp. the first two literals are != l_False, unless
// the clause is binary and satisfied, in which case the first literal is true)
bool IsSatisfied(Clause c)
{
Assert(DecisionLevel == 0);
return c.SatisfiedBy(Assigns);
}
#endregion
#region Minor methods
static bool RemoveWatch(List<Clause> ws, Clause elem) // Pre-condition: 'elem' must exists in 'ws' OR 'ws' must be empty.
{
if (ws.Count == 0) return false; // (skip lists that are already cleared)
int j = 0;
for (; ws[j] != elem; j++) Assert(j < ws.Count - 1);
for (; j < ws.Count - 1; j++) ws[j] = ws[j + 1];
ws.Pop();
return true;
}
public Lit NewVar()
{
int index = nVars;
Watches.Add(new List<Clause>()); // (list for positive literal)
Watches.Add(new List<Clause>()); // (list for negative literal)
Reason.Add(null);
Assigns.Add(l_Undef);
Level.Add(-1);
TrailPos.Add(-1);
Activity.Add(0);
Order.NewVar();
AnalyzeSeen.Add(l_Undef);
return new Lit(index);
}
// Returns FALSE if immediate conflict.
bool Assume(Lit p)
{
TrailLim.Add(Trail.Count);
return Enqueue(p, null);
}
// Revert to the state at given level.
void CancelUntil(int level)
{
if (DecisionLevel > level)
{
for (int c = Trail.Count - 1; c >= TrailLim[level]; c--)
{
Var x = Trail[c].Var;
Assigns[x] = l_Undef;
Reason[x] = null;
Order.Undo(x);
}
Trail.RemoveRange(TrailLim[level], Trail.Count - TrailLim[level]);
TrailLim.ShrinkTo(level);
QHead = Trail.Count;
}
}
#endregion
#region Major methods:
int Analyze(Clause confl, List<Lit> out_learnt)
{
List<LBool> seen = AnalyzeSeen;
int pathC = 0;
Lit p = lit_Undef;
// Generate conflict clause
out_learnt.Push(lit_Undef); // (placeholder for the asserting literal)
var out_btlevel = 0;
int index = Trail.Count - 1;
do
{
Assert(confl != null); // (otherwise should be UIP)
if (confl.IsLearnt) ClaBumpActivity(confl);
for (int j = (p == lit_Undef) ? 0 : 1; j < confl.Size; j++)
{
Lit q = confl[j];
var v = q.Var;
if (seen[v] == l_Undef && Level[v] > 0)
{
VarBumpActivity(q);
seen[v] = l_True;
if (Level[v] == DecisionLevel) pathC++;
else
{
out_learnt.Push(q);
out_btlevel = Math.Max(out_btlevel, Level[v]);
}
}
}
// Select next clause to look at
while (seen[Trail[index--].Var] == l_Undef) ;
p = Trail[index + 1];
confl = Reason[p.Var];
seen[p.Var] = l_Undef;
pathC--;
} while (pathC > 0);
out_learnt[0] = ~p;
// Conflict clause minimization
{
uint min_level = 0;
for (int i = 1; i < out_learnt.Count; i++) min_level |= (uint)(1 << (Level[out_learnt[i].Var] & 31)); // (maintain an abstraction of levels involved in conflict)
AnalyzeToClear.Clear();
int j = 1;
for (int i = 1; i < out_learnt.Count; i++)
if (Reason[out_learnt[i].Var] == null || !AnalyzeRemovable(out_learnt[i], min_level)) out_learnt[j++] = out_learnt[i];
// Clean up
for (int jj = 0; jj < out_learnt.Count; jj++) seen[out_learnt[jj].Var] = l_Undef;
for (int jj = 0; jj < AnalyzeToClear.Count; jj++) seen[AnalyzeToClear[jj].Var] = l_Undef; // ('seen[]' is now cleared)
out_learnt.ShrinkTo(j);
}
return out_btlevel;
}
// Check if 'p' can be removed. 'min_level' is used to abort early if visiting literals at a level that cannot be removed.
bool AnalyzeRemovable(Lit p_, uint min_level)
{
Assert(Reason[p_.Var] != null);
AnalyzeStack.Clear(); AnalyzeStack.Add(p_);
int top = AnalyzeToClear.Count;
while (AnalyzeStack.Count > 0)
{
Clause c = Reason[AnalyzeStack.Peek().Var];
Assert(c != null);
AnalyzeStack.Pop();
for (int i = 1; i < c.Size; i++)
{
Lit p = c[i];
if (AnalyzeSeen[p.Var] == l_Undef && Level[p.Var] != 0)
{
if (Reason[p.Var] != null && ((1 << (Level[p.Var] & 31)) & min_level) != 0)
{
AnalyzeSeen[p.Var] = l_True;
AnalyzeStack.Push(p);
AnalyzeToClear.Push(p);
}
else
{
for (int j = top; j < AnalyzeToClear.Count; j++) AnalyzeSeen[AnalyzeToClear[j].Var] = l_Undef;
AnalyzeToClear.ShrinkTo(top);
return false;
}
}
}
}
AnalyzeToClear.Push(p_);
return true;
}
bool Enqueue(Lit p, Clause from)
{
if (Value(p) != l_Undef) return Value(p) == l_True;
Var x = p.Var;
Assigns[x] = !p.Sign;
Level[x] = DecisionLevel;
TrailPos[x] = Trail.Count;
Reason[x] = from;
Trail.Add(p);
return true;
}
Clause Propagate()
{
Clause confl = null;
while (QHead < Trail.Count)
{
SimpDBProps--;
Lit p = Trail[QHead++]; // 'p' is enqueued fact to propagate.
List<Clause> ws = Watches[p.Index];
int i, j, end;
for (i = j = 0, end = ws.Count; i != end;)
{
Clause c = ws[i++];
// Make sure the false literal is data[1]
Lit false_lit = ~p;
if (c[0] == false_lit) { c[0] = c[1]; c[1] = false_lit; }
Assert(c[1] == false_lit);
// If 0th watch is true, then clause is already satisfied.
Lit first = c[0];
LBool val = Value(first);
if (val == l_True) ws[j++] = c;
else
{
// Look for new watch
for (int k = 2; k < c.Size; k++)
if (Value(c[k]) != l_False)
{
c[1] = c[k]; c[k] = false_lit;
Watches[(~c[1]).Index].Push(c);
goto FoundWatch;
}
// Did not find watch -- clause is unit under assignment
ws[j++] = c;
if (!Enqueue(first, c))
{
if (DecisionLevel == 0) Ok = false;
confl = c;
QHead = Trail.Count;
while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
}
FoundWatch:;
}
}
ws.ShrinkTo(j);
}
return confl;
}
void ReduceDB()
{
double extra_lim = ClaInc / Learnts.Count; // Remove any clause below this activity
Learnts.Sort((x, y) => x.Size > 2 && (y.Size == 2 || x.Activity < y.Activity) ? -1 : 1);
int i, j;
for (i = j = 0; i < Learnts.Count / 2; i++)
{
if (Learnts[i].Size > 2 && !IsLocked(Learnts[i])) Remove(Learnts[i]);
else Learnts[j++] = Learnts[i];
}
for (; i < Learnts.Count; i++)
{
if (Learnts[i].Size > 2 && !IsLocked(Learnts[i]) && Learnts[i].Activity < extra_lim) Remove(Learnts[i]);
else Learnts[j++] = Learnts[i];
}
Learnts.ShrinkTo(j);
}
void SimplifyDB()
{
if (!Ok) return;
Assert(DecisionLevel == 0);
if (Propagate() != null) { Ok = false; return; }
if (nAssigns == SimpDBAssigns || SimpDBProps > 0) return; // (nothing has changed or performed a simplification too recently)
// Clear watcher lists:
for (int i = SimpDBAssigns; i < nAssigns; i++)
{
Lit p = Trail[i];
Watches[p.Index].Clear();
Watches[(~p).Index].Clear();
}
// Remove satisfied clauses:
for (int type = 0; type < 2; type++)
{
List<Clause> cs = type != 0 ? Learnts : Clauses;
int j = 0;
for (int i = 0; i < cs.Count; i++)
{
if (!IsLocked(cs[i]) && IsSatisfied(cs[i])) Remove(cs[i]);
else cs[j++] = cs[i];
}
cs.ShrinkTo(j);
}
SimpDBAssigns = nAssigns;
SimpDBProps = ClausesLiterals + LearntsLiterals;
}
LBool Search(int nof_conflicts, int nof_learnts)
{
if (!Ok) return l_False;
Assert(0 == DecisionLevel);
int conflictC = 0;
Model.Clear();
while (true)
{
Clause confl = Propagate();
if (confl != null)
{
// CONFLICT
conflictC++;
var learnt_clause = new List<Lit>();
if (DecisionLevel == 0) return l_False; // Contradiction found
CancelUntil(Analyze(confl, learnt_clause));
NewClause(learnt_clause, true);
if (learnt_clause.Count == 1) Level[learnt_clause[0].Var] = 0;
VarDecayActivity();
ClaDecayActivity();
}
else
{
// NO CONFLICT
if (nof_conflicts >= 0 && conflictC >= nof_conflicts)
{
// Reached bound on number of conflicts
CancelUntil(0);
return l_Undef;
}
// Simplify the set of problem clauses
if (DecisionLevel == 0) { SimplifyDB(); if (!Ok) return l_False; }
// Reduce the set of learnt clauses
if (nof_learnts >= 0 && Learnts.Count - nAssigns >= nof_learnts) ReduceDB();
// New variable decision
Lit next = Order.Select(RandomVarFreq);
if (next == lit_Undef)
{
// Model found
Model.Clear();
Model.Capacity = nVars;
Model.AddRange(Assigns);
CancelUntil(0);
return l_True;
}
Check(Assume(next));
}
}
}
void VarRescaleActivity()
{
for (int i = 0; i < nVars; i++) Activity[i] *= 1e-100f;
VarInc *= 1e-100f;
}
void ClaRescaleActivity()
{
for (int i = 0; i < Learnts.Count; i++) Learnts[i].Activity *= 1e-20f;
ClaInc *= 1e-20;
}
public bool Solve()
{
SimplifyDB();
Assert(DecisionLevel == 0);
double nof_conflicts = 100;
double nof_learnts = nClauses / 3;
while (true)
{
if (Search((int)nof_conflicts, (int)nof_learnts) != l_Undef)
{
CancelUntil(0);
return Ok;
}
nof_conflicts *= 1.5;
nof_learnts *= 1.1;
}
}
#endregion
}
}
ottimalità
n = 11n = 12
n = 12
--- MiniSAT.cs.old
+++ MiniSAT.cs
@@ -346,6 +346,7 @@ namespace MiniSAT
const double RandomVarFreq = 0.02;
List<List<Clause>> Watches = new List<List<Clause>>(); // 'watches[lit]' is a list of constraints watching 'lit' (will go there if literal becomes true).
+ List<List<Lit>> BinaryWatches = new List<List<Lit>>();
public List<LBool> Assigns = new List<LBool>(); // The current assignments.
public List<Lit> Trail = new List<Lit>(); // Assignment stack; stores all assigments made in the order they were made.
List<int> TrailLim = new List<int>(); // Separator indices for different decision levels in 'trail'.
@@ -381,7 +382,9 @@ namespace MiniSAT
void Remove(Clause c)
{
RemoveWatch(Watches[(~c[0]).Index], c);
+ if (Watches[(~c[0]).Index] != null && Watches[(~c[0]).Index].Count == 0) Watches[(~c[0]).Index] = null;
RemoveWatch(Watches[(~c[1]).Index], c);
+ if (Watches[(~c[1]).Index] != null && Watches[(~c[1]).Index].Count == 0) Watches[(~c[1]).Index] = null;
if (c.IsLearnt) LearntsLiterals -= c.Size;
else ClausesLiterals -= c.Size;
@@ -408,6 +411,7 @@ namespace MiniSAT
Level.Capacity = numVars;
Reason.Capacity = numVars;
Watches.Capacity = numVars << 1;
+ BinaryWatches.Capacity = numVars << 1;
Order.Heap_.SetBounds(numVars + 1);
Trail.Capacity = numVars;
TrailPos.Capacity = numVars;
@@ -500,7 +504,7 @@ namespace MiniSAT
if (!learnt)
{
- Clauses.Add(c);
+ if (c.Size > 2) Clauses.Add(c);
ClausesLiterals += c.Size;
}
else
@@ -526,8 +530,20 @@ namespace MiniSAT
}
// Watch clause:
- Watches[(~c[0]).Index].Push(c);
- Watches[(~c[1]).Index].Push(c);
+ if (c.Size == 2 && !learnt)
+ {
+ if (BinaryWatches[(~c[0]).Index] == null) BinaryWatches[(~c[0]).Index] = new List<Lit>();
+ BinaryWatches[(~c[0]).Index].Push(c[1]);
+ if (BinaryWatches[(~c[1]).Index] == null) BinaryWatches[(~c[1]).Index] = new List<Lit>();
+ BinaryWatches[(~c[1]).Index].Push(c[0]);
+ }
+ else
+ {
+ if (Watches[(~c[0]).Index] == null) Watches[(~c[0]).Index] = new List<Clause>();
+ Watches[(~c[0]).Index].Push(c);
+ if (Watches[(~c[1]).Index] == null) Watches[(~c[1]).Index] = new List<Clause>();
+ Watches[(~c[1]).Index].Push(c);
+ }
}
}
@@ -545,7 +561,7 @@ namespace MiniSAT
static bool RemoveWatch(List<Clause> ws, Clause elem) // Pre-condition: 'elem' must exists in 'ws' OR 'ws' must be empty.
{
- if (ws.Count == 0) return false; // (skip lists that are already cleared)
+ if (ws == null || ws.Count == 0) return false; // (skip lists that are already cleared)
int j = 0;
for (; ws[j] != elem; j++) Assert(j < ws.Count - 1);
for (; j < ws.Count - 1; j++) ws[j] = ws[j + 1];
@@ -556,8 +572,10 @@ namespace MiniSAT
public Lit NewVar()
{
int index = nVars;
- Watches.Add(new List<Clause>()); // (list for positive literal)
- Watches.Add(new List<Clause>()); // (list for negative literal)
+ Watches.Add(null); // (list for positive literal)
+ Watches.Add(null); // (list for negative literal)
+ BinaryWatches.Add(null);
+ BinaryWatches.Add(null);
Reason.Add(null);
Assigns.Add(l_Undef);
Level.Add(-1);
@@ -716,45 +734,85 @@ namespace MiniSAT
SimpDBProps--;
Lit p = Trail[QHead++]; // 'p' is enqueued fact to propagate.
- List<Clause> ws = Watches[p.Index];
- int i, j, end;
- for (i = j = 0, end = ws.Count; i != end;)
{
- Clause c = ws[i++];
- // Make sure the false literal is data[1]
- Lit false_lit = ~p;
- if (c[0] == false_lit) { c[0] = c[1]; c[1] = false_lit; }
+ List<Clause> ws = Watches[p.Index];
+ if (ws != null)
+ {
+ int i, j, end;
+ for (i = j = 0, end = ws.Count; i != end;)
+ {
+ Clause c = ws[i++];
+ // Make sure the false literal is data[1]
+ Lit false_lit = ~p;
+ if (c[0] == false_lit) { c[0] = c[1]; c[1] = false_lit; }
- Assert(c[1] == false_lit);
+ Assert(c[1] == false_lit);
- // If 0th watch is true, then clause is already satisfied.
- Lit first = c[0];
- LBool val = Value(first);
- if (val == l_True) ws[j++] = c;
- else
- {
- // Look for new watch
- for (int k = 2; k < c.Size; k++)
- if (Value(c[k]) != l_False)
+ // If 0th watch is true, then clause is already satisfied.
+ Lit first = c[0];
+ LBool val = Value(first);
+ if (val == l_True) ws[j++] = c;
+ else
{
- c[1] = c[k]; c[k] = false_lit;
- Watches[(~c[1]).Index].Push(c);
- goto FoundWatch;
+ // Look for new watch
+ for (int k = 2; k < c.Size; k++)
+ if (Value(c[k]) != l_False)
+ {
+ c[1] = c[k]; c[k] = false_lit;
+ if (Watches[(~c[1]).Index] == null) Watches[(~c[1]).Index] = new List<Clause>();
+ Watches[(~c[1]).Index].Push(c);
+ goto FoundWatch;
+ }
+
+ // Did not find watch -- clause is unit under assignment
+ ws[j++] = c;
+ if (!Enqueue(first, c))
+ {
+ if (DecisionLevel == 0) Ok = false;
+ confl = c;
+ QHead = Trail.Count;
+ while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
+ }
+ FoundWatch:;
}
+ }
- // Did not find watch -- clause is unit under assignment
- ws[j++] = c;
- if (!Enqueue(first, c))
+ if (j == 0) Watches[p.Index] = null;
+ else ws.ShrinkTo(j);
+ }
+ }
+ // TODO BinaryWatches
+ {
+ List<Lit> ws = BinaryWatches[p.Index];
+ if (ws != null)
+ {
+ int i, j, end;
+ for (i = j = 0, end = ws.Count; i != end;)
{
- if (DecisionLevel == 0) Ok = false;
- confl = c;
- QHead = Trail.Count;
- while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
+ var first = ws[i++];
+
+ // If 0th watch is true, then clause is already satisfied.
+ LBool val = Value(first);
+ if (val == l_True) ws[j++] = first;
+ else
+ {
+ // Did not find watch -- clause is unit under assignment
+ ws[j++] = first;
+ var c = new BinaryClause(false, first, ~p); // Needed for consistency of interface
+ if (!Enqueue(first, c))
+ {
+ if (DecisionLevel == 0) Ok = false;
+ confl = c;
+ QHead = Trail.Count;
+ while (i < end) ws[j++] = ws[i++]; // Copy the remaining watches
+ }
+ }
}
- FoundWatch:;
+
+ if (j == 0) Watches[p.Index] = null;
+ else ws.ShrinkTo(j);
}
}
- ws.ShrinkTo(j);
}
return confl;
@@ -792,8 +850,10 @@ namespace MiniSAT
for (int i = SimpDBAssigns; i < nAssigns; i++)
{
Lit p = Trail[i];
- Watches[p.Index].Clear();
- Watches[(~p).Index].Clear();
+ Watches[p.Index] = null;
+ Watches[(~p).Index] = null;
+ BinaryWatches[p.Index] = null;
+ BinaryWatches[(~p).Index] = null;
}
// Remove satisfied clauses:
Soluzioni distinte
Contare le soluzioni a un problema SAT è semplice, se a volte lento: trovi una soluzione, aggiungi una nuova clausola che la esclude direttamente ed esegui di nuovo. Qui è facile generare la classe di equivalenza delle soluzioni sotto le simmetrie del rettangolo, quindi il seguente codice è sufficiente per generare tutte le soluzioni distinte.
// Force it to the known optimal weight
for (int i = optimal[n]; i < unaryWeights.Length; i++) solver.AddClause(~unaryWeights[i]);
while (solver.Solve())
{
var rows = new uint[height];
int cell = 0;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
if (solver.Model[cell++] == Solver.l_True) rows[y] |= 1U << x;
}
}
var poly = new FreePolyomino(new OrientedPolyomino(rows));
Console.WriteLine(poly.CanonicalOrientation);
foreach (var orientation in poly.OrientedPolyominos)
{
if (orientation.BBWidth != width || orientation.BBHeight != height) continue;
// Exclude it
List<Solver.Lit> soln = new List<Solver.Lit>(previous);
cell = 0;
for (int y = 0; y < height; y++)
{
uint row = orientation.Rows[y];
for (int x = 0; x < width; x++, cell++)
{
if ((row & 1) == 1) soln.Add(~new Solver.Lit(cell));
row >>= 1;
}
}
solver.AddClause(soln.ToArray());
}
}
n
100_111
010_111
0110_1111
1100_1111
01000_11111_01110
00100_11111_11100
011000_111111_011110
110000_111111_111100
011000_011110_111111
001100_111100_111111
110000_111100_111111
001100_111111_111100
0010000_0111100_0111110_1111111
0001000_1111000_1111111_1111100
0001000_0111000_1111111_1111110
0100000_1111000_1111111_1111100
0100000_1111000_1111100_1111111
0001000_0111000_1111110_1111111
0001000_0111110_1111111_1111000
0100000_1111000_0111110_1111111
0001000_1111100_1111111_1111000
1100000_1110000_1111100_1111111
0100000_1111111_1111100_0111100
0011000_0111000_0111110_1111111
1010000_1110000_1111100_1111111
0011000_1110000_1111100_1111111
0010100_0011100_1111100_1111111
0011000_1011100_1111111_1111000
0100000_1111111_0111110_0111100
1100000_1110000_1111111_1111100
0100000_1111100_1111111_0111100
1010000_1110000_1111111_1111100
1110000_0110000_1111111_0111110
0110000_1110000_1111100_1111111
0110000_1110000_1111111_1011110
0111000_0011000_1111111_0011111
0011100_0011000_1111111_0011111
1000100_1111111_0011110_0111100
0010000_1111111_0011111_0011110
0011000_0111000_1111111_0101111
0011000_0011101_1111111_0001111
0101000_0111000_0111110_1111111
0001000_0111100_1111100_1111111
1000100_1111111_0111100_0111100
0110000_1110000_1111111_1111100
1010000_1110000_1111111_0111110
0101000_0111000_1111111_0011111
0001000_1111111_1111100_1111000
0110000_0111010_1111111_0011110
0011000_0001110_0111110_1111111
0010000_1111111_0111110_0011110
0101000_0111000_1111111_0111110
0010000_1111100_1111111_0011110
0010000_0011100_1111111_1111110
0110000_0111000_1111111_0111110
0010000_0011100_1111111_0111111
0011000_0111010_1111111_0011110
0110000_1110100_1111111_0111100
0110000_0011100_1111100_1111111
0001000_0111100_1111111_1111100
0010000_0111100_1111111_1011110
0011000_0011100_1111111_1111100
0110000_0111000_0111110_1111111
0011000_0011100_1111100_1111111
0011000_0011100_0011111_1111111
0010000_0111100_0011111_1111111
0010000_0011100_1111110_1111111
0011000_0111000_1111111_0111110
0010000_0111100_1111111_0111110
0010000_0011110_1111111_0111110
0010000_0111100_1111111_0011111
0011000_0011100_1111111_0011111
0010100_0011100_1111111_1111100
0010000_0111101_1111111_0011110
0010000_0111110_1111111_0011110
01110000_01110000_11111111_01111110
11100000_11100000_11111100_11111111
00111000_00111000_00111111_11111111
11100000_11100000_11111111_11111100
00111000_00111000_11111111_00111111
01110000_01110000_01111110_11111111
011000000_111000000_111111111_111111100_011111000
001110000_000110000_001111100_001111111_111111111
000110000_001110000_111111111_001111111_000111110
001100000_011100000_111111111_011111110_001111100
001100000_011100000_111111100_111111111_001111100
011100000_001100000_011111000_011111110_111111111
000110000_000111000_001111111_111111111_000011111
000110000_000111000_111111100_111111111_111110000
000110000_001110000_011111110_111111111_000111110
001100000_001110000_011111110_111111111_000111110
1110000000_1111000000_1111110000_1111111100_1111111111
1110000000_1111000000_1111110000_1111111111_1111111100
1111000000_0111000000_0111111000_1111111111_0111111110
0011100000_0011110000_0011111100_0011111111_1111111111
0111000000_0111100000_0111111000_0111111110_1111111111
0111000000_0111100000_0111111000_1111111111_0111111110
0111000000_1111000000_0111111000_1111111111_1111111001
0111000000_1111000000_0111111000_1111111111_1111111010
0011100000_0011110000_0011111100_1111111111_0011111111
0111100000_0011100000_0011111100_1111111111_0011111111
0011100000_0111100000_0011111100_1111111111_0111111101