ABC: A System for Sequential Synthesis and Verification
 
Loading...
Searching...
No Matches
cadical_reduce.cpp
Go to the documentation of this file.
1#include "global.h"
2
3#include "internal.hpp"
4
6
7namespace CaDiCaL {
8
9/*------------------------------------------------------------------------*/
10
11// Once in a while we reduce, e.g., we remove learned clauses which are
12// supposed to be less useful in the future. This is done in increasing
13// intervals, which has the effect of allowing more and more learned clause
14// to be kept for a longer period. The number of learned clauses kept
15// in memory corresponds to an upper bound on the 'space' of a resolution
16// proof needed to refute a formula in proof complexity sense.
17
19 if (!opts.reduce)
20 return false;
21 if (!stats.current.redundant)
22 return false;
23 return stats.conflicts >= lim.reduce;
24}
25
26/*------------------------------------------------------------------------*/
27
28// Even less regularly we are flushing all redundant clauses.
29
31 if (!opts.flush)
32 return false;
33 return stats.conflicts >= lim.flush;
34}
35
36/*------------------------------------------------------------------------*/
37
39 const int tier1limit = tier1[false];
40 const int tier2limit = max (tier1limit, tier2[false]);
41 for (const auto &c : clauses) {
42 if (!c->redundant)
43 continue; // keep irredundant
44 if (c->garbage)
45 continue; // already marked as garbage
46 if (c->reason)
47 continue; // need to keep reasons
48 const unsigned used = c->used;
49 if (used)
50 c->used--;
51 if (c->glue < tier1limit && used)
52 continue;
53 if (c->glue < tier2limit && used >= max_used - 1)
54 continue;
55 mark_garbage (c); // flush unused clauses
56 if (c->hyper)
57 stats.flush.hyper++;
58 else
59 stats.flush.learned++;
60 }
61 // No change to 'lim.kept{size,glue}'.
62}
63
64/*------------------------------------------------------------------------*/
65
66// Clauses of larger glue or larger size are considered less useful.
67//
68// We also follow the observations made by the Glucose team in their
69// IJCAI'09 paper and keep all low glue clauses limited by
70// 'options.keepglue' (typically '2').
71//
72// In earlier versions we pre-computed a 64-bit sort key per clause and
73// wrapped a pointer to the clause and the 64-bit sort key into a separate
74// data structure for sorting. This was probably faster but awkward and
75// so we moved back to a simpler scheme which also uses 'stable_sort'
76// instead of 'rsort' below. Sorting here is not a hot-spot anyhow.
77
79 bool operator() (const Clause *c, const Clause *d) const {
80 if (c->glue > d->glue)
81 return true;
82 if (c->glue < d->glue)
83 return false;
84 return c->size > d->size;
85 }
86};
87
88// This function implements the important reduction policy. It determines
89// which redundant clauses are considered not useful and thus will be
90// collected in a subsequent garbage collection phase.
91
93
94 // We use a separate stack for sorting candidates for removal. This uses
95 // (slightly) more memory but has the advantage to keep the relative order
96 // in 'clauses' intact, which actually due to using stable sorting goes
97 // into the candidate selection (more recently learned clauses are kept if
98 // they otherwise have the same glue and size).
99
100 vector<Clause *> stack;
101 const int tier1limit = tier1[false];
102 const int tier2limit = max (tier1limit, tier2[false]);
103
104 stack.reserve (stats.current.redundant);
105
106 for (const auto &c : clauses) {
107 if (!c->redundant)
108 continue; // Keep irredundant.
109 if (c->garbage)
110 continue; // Skip already marked.
111 if (c->reason)
112 continue; // Need to keep reasons.
113 const unsigned used = c->used;
114 if (used)
115 c->used--;
116 if (c->glue <= tier1limit && used)
117 continue;
118 if (c->glue <= tier2limit && used >= max_used - 1)
119 continue;
120 if (c->hyper) { // Hyper binary and ternary resolvents
121 CADICAL_assert (c->size <= 3); // are only kept for one reduce round
122 if (!used)
123 mark_garbage (c); // unless
124 continue; // used recently.
125 }
126 stack.push_back (c);
127 }
128
129 stable_sort (stack.begin (), stack.end (), reduce_less_useful ());
130 size_t target = 1e-2 * opts.reducetarget * stack.size ();
131
132 // This is defensive code, which I usually consider a bug, but here I am
133 // just not sure that using floating points in the line above is precise
134 // in all situations and instead of figuring that out, I just use this.
135 //
136 if (target > stack.size ())
137 target = stack.size ();
138
139 PHASE ("reduce", stats.reductions, "reducing %zd clauses %.0f%%", target,
140 percent (target, stats.current.redundant));
141
142 auto i = stack.begin ();
143 const auto t = i + target;
144 while (i != t) {
145 Clause *c = *i++;
146 LOG (c, "marking useless to be collected");
147 mark_garbage (c);
148 stats.reduced++;
149 }
150
151 lim.keptsize = lim.keptglue = 0;
152
153 const auto end = stack.end ();
154 for (i = t; i != end; i++) {
155 Clause *c = *i;
156 LOG (c, "keeping");
157 if (c->size > lim.keptsize)
158 lim.keptsize = c->size;
159 if (c->glue > lim.keptglue)
160 lim.keptglue = c->glue;
161 }
162
163 erase_vector (stack);
164
165 PHASE ("reduce", stats.reductions, "maximum kept size %d glue %d",
166 lim.keptsize, lim.keptglue);
167}
168
169/*------------------------------------------------------------------------*/
170
171// If chronological backtracking produces out-of-order assigned units, then
172// it is necessary to completely propagate them at the root level in order
173// to derive all implied units. Otherwise the blocking literals in
174// 'flush_watches' are messed up and CADICAL_assertion 'FW1' fails.
175
177 if (!level)
178 return true;
179 int oou = 0;
180 for (size_t i = control[1].trail; !oou && i < trail.size (); i++) {
181 const int lit = trail[i];
182 CADICAL_assert (val (lit) > 0);
183 if (var (lit).level)
184 continue;
185 LOG ("found out-of-order assigned unit %d", oou);
186 oou = lit;
187 }
188 if (!oou)
189 return true;
191 backtrack (0);
192 if (propagate ())
193 return true;
195 return false;
196}
197
198/*------------------------------------------------------------------------*/
199
200// reduction is scheduled with reduceint, reducetarget and reduceopt.
201// with reduceopt=1 the number of learnt clauses scale with
202// sqrt of conflicts times reduceint
203// the scaling is the same as with reduceopt=0 (the classical default)
204// however, the constants are different. To avoid this (and get roughly the
205// same behaviour with reduceopt=0 and reduceopt=1) we need to scale the
206// interval, namely (reduceint^2/2)
207// Lastly, reduceopt=2 just replaces sqrt conflicts with log conflicts.
208// The learnt clauses should not be bigger than
209// 1/reducetarget * reduceint * function (conflicts)
210// for function being log if reduceint=2 an sqrt otherwise.
211// This is however only the theoretical target and second chance for
212// tier2 clauses and very long lifespan of tier1 clauses (through used flag)
213// make this behave differently.
214// reduceinit shifts the curve to the right, increasing the number of
215// clauses in the solver. This impact will decrease over time.
216
218 START (reduce);
219
220 stats.reductions++;
221 report ('.', 1);
222
223 bool flush = flushing ();
224 if (flush)
225 stats.flush.count++;
226
228 goto DONE;
229
232 if (flush)
234 else
237
238 {
239 int64_t delta = opts.reduceint;
240 double factor = stats.reductions + 1;
241 if (opts.reduceopt ==
242 0) // adjust delta such this is the same as reduceopt=1
243 delta = delta * delta / 2;
244 else if (opts.reduceopt == 1) {
245 // this is the same as reduceopt=0 if reduceint = sqrt (reduceint) =
246 // 17
247 factor = sqrt ((double) stats.conflicts);
248 } else if (opts.reduceopt == 2)
249 // log scaling instead
250 factor = log ((double) stats.conflicts);
251 if (factor < 1)
252 factor = 1;
253 delta = delta * factor;
254 if (irredundant () > 1e5) {
255 delta *= log (irredundant () / 1e4) / log (10);
256 }
257 if (delta < 1)
258 delta = 1;
259 lim.reduce = stats.conflicts + delta;
260 PHASE ("reduce", stats.reductions,
261 "new reduce limit %" PRId64 " after %" PRId64 " conflicts",
262 lim.reduce, delta);
263 }
264
265 if (flush) {
266 PHASE ("flush", stats.flush.count, "new flush increment %" PRId64 "",
267 inc.flush);
268 inc.flush *= opts.flushfactor;
269 lim.flush = stats.conflicts + inc.flush;
270 PHASE ("flush", stats.flush.count, "new flush limit %" PRId64 "",
271 lim.flush);
272 }
273
274 last.reduce.conflicts = stats.conflicts;
275
276DONE:
277
278 report (flush ? 'f' : '-');
279 STOP (reduce);
280}
281
282} // namespace CaDiCaL
283
#define ABC_NAMESPACE_IMPL_START
#define ABC_NAMESPACE_IMPL_END
#define CADICAL_assert(ignore)
Definition global.h:14
#define LOG(...)
@ DONE
Definition inflate.h:51
#define PHASE(...)
Definition message.hpp:52
void erase_vector(std::vector< T > &v)
Definition util.hpp:90
double percent(double a, double b)
Definition util.hpp:21
#define STOP(P)
Definition profile.hpp:158
#define START(P)
Definition profile.hpp:150
int lit
Definition satVec.h:130
int64_t irredundant() const
Definition internal.hpp:383
Var & var(int lit)
Definition internal.hpp:452
void mark_garbage(Clause *)
void report(char type, int verbose_level=0)
vector< int > trail
Definition internal.hpp:259
const unsigned max_used
Definition internal.hpp:314
bool propagate_out_of_order_units()
signed char val(int lit) const
void mark_useless_redundant_clauses_as_garbage()
void backtrack(int target_level=0)
vector< Clause * > clauses
Definition internal.hpp:283
vector< Level > control
Definition internal.hpp:282
void mark_satisfied_clauses_as_garbage()
void mark_clauses_to_be_flushed()
bool operator()(const Clause *c, const Clause *d) const
unsigned size
Definition vector.h:35