// complement of subtrahend
// copy, since BBSub writes into it.
- //FIXME: Unnecessary array copies?
ASTVec tmp_res = BBTerm(term[0]).GetChildren();
-
const ASTVec& bbkid1 = BBTerm(term[1]).GetChildren();
BBSub(tmp_res, bbkid1);
result = _bm->CreateNode(BOOLVEC, tmp_res);
{
// For some unexplained reason, XORs are faster than converting
// them to cluases at this point
- return _bm->CreateSimpForm(XOR,
- _bm->CreateSimpForm(XOR, xi, yi),
- cin);
-
- if((ASTTrue == xi && ASTTrue == yi && ASTFalse == cin)
- || (ASTTrue == xi && ASTFalse == yi && ASTTrue == cin)
- || (ASTFalse == xi && ASTTrue == yi && ASTTrue == cin)
- || (ASTFalse == xi && ASTFalse== yi && ASTFalse == cin))
- {
- return ASTFalse;
- }
- ASTNode S1 = _bm->CreateSimpForm(OR,xi,yi,cin);
- ASTNode S2 = _bm->CreateSimpForm(OR,
- _bm->CreateSimpForm(NOT,xi),
- _bm->CreateSimpForm(NOT,yi),
- cin);
- ASTNode S3 = _bm->CreateSimpForm(OR,
- _bm->CreateSimpForm(NOT,xi),
- yi,
- _bm->CreateSimpForm(NOT,cin));
- ASTNode S4 = _bm->CreateSimpForm(OR,
- xi,
- _bm->CreateSimpForm(NOT,yi),
- _bm->CreateSimpForm(NOT,cin));
- ASTVec S;
- S.push_back(S1);
- S.push_back(S2);
- S.push_back(S3);
- S.push_back(S4);
- return _bm->CreateSimpForm(AND,S);
+ ASTNode S0 = _bm->CreateSimpForm(XOR,
+ _bm->CreateSimpForm(XOR, xi, yi),
+ cin);
+ return S0;
+ // ASTNode S1 = _bm->CreateSimpForm(OR,xi,yi,cin);
+ // ASTNode S2 = _bm->CreateSimpForm(OR,
+ // _bm->CreateSimpForm(NOT,xi),
+ // _bm->CreateSimpForm(NOT,yi),
+ // cin);
+ // ASTNode S3 = _bm->CreateSimpForm(OR,
+ // _bm->CreateSimpForm(NOT,xi),
+ // yi,
+ // _bm->CreateSimpForm(NOT,cin));
+ // ASTNode S4 = _bm->CreateSimpForm(OR,
+ // xi,
+ // _bm->CreateSimpForm(NOT,yi),
+ // _bm->CreateSimpForm(NOT,cin));
+ // ASTVec S;
+ // S.push_back(S1);
+ // S.push_back(S2);
+ // S.push_back(S3);
+ // S.push_back(S4);
+ // return _bm->CreateSimpForm(AND,S);
}
// Bitwise complement
// Workhorse for comparison routines. This does a signed BVLE if
// is_signed is true, else it's unsigned. All other comparison
// operators can be reduced to this by swapping args or
- // complementing the result bit. FIXME: If this were done MSB
- // first, it would enable a fast exit sometimes when the MSB is
- // constant, deciding the result without looking at the rest of the
- // bits.
+ // complementing the result bit.
ASTNode BitBlaster::BBBVLE(const ASTVec& left,
- const ASTVec& right, bool is_signed)
+ const ASTVec& right,
+ bool is_signed)
{
- // "thisbit" represents BVLE of the suffixes of the BVs
- // from that position . if R < L, return TRUE, else if L < R
- // return FALSE, else return BVLE of lower-order bits. MSB is
- // treated separately, because signed comparison is done by
- // complementing the MSB of each BV, then doing an unsigned
- // comparison.
- ASTVec::const_iterator lit = left.begin();
- ASTVec::const_iterator litend = left.end();
- ASTVec::const_iterator rit = right.begin();
- ASTNode prevbit = ASTTrue;
- for (; lit < litend - 1; lit++, rit++)
+ ASTVec::const_reverse_iterator lit = left.rbegin();
+ ASTVec::const_reverse_iterator litend = left.rend();
+ ASTVec::const_reverse_iterator rit = right.rbegin();
+
+ ASTNode this_compare_bit =
+ is_signed ?
+ _bm->CreateSimpForm(AND, *lit, _bm->CreateSimpNot(*rit)):
+ _bm->CreateSimpForm(AND, _bm->CreateSimpNot(*lit), *rit);
+
+ ASTVec bit_comparisons;
+ bit_comparisons.push_back(this_compare_bit);
+
+ ASTNode prev_eq_bit = _bm->CreateSimpForm(IFF, *lit, *rit);
+ for(lit++, rit++; lit < litend; lit++, rit++)
{
- ASTNode neglit = _bm->CreateSimpNot(*lit);
- ASTNode thisbit =
- _bm->CreateSimpForm(OR,
- _bm->CreateSimpForm(AND, neglit, *rit),
- _bm->CreateSimpForm(AND,
- _bm->CreateSimpForm(OR,
- neglit,
- *rit),
- prevbit));
- prevbit = thisbit;
+ this_compare_bit =
+ _bm->CreateSimpForm(AND, _bm->CreateSimpNot(*lit), *rit);
+
+ ASTNode thisbit_output =
+ _bm->CreateSimpForm(AND, this_compare_bit, prev_eq_bit);
+ bit_comparisons.push_back(thisbit_output);
+
+ // (neg(lit) OR rit)(lit OR neg(rit))
+ ASTNode this_eq_bit =
+ _bm->CreateSimpForm(AND,
+ _bm->CreateSimpForm(IFF,*lit,*rit),
+ prev_eq_bit);
+ prev_eq_bit = this_eq_bit;
}
-
- // Handle MSB -- negate MSBs if signed comparison
- // FIXME: make into refs after it's debugged.
- ASTNode lmsb = *lit;
- ASTNode rmsb = *rit;
- if (is_signed)
- {
- lmsb = _bm->CreateSimpNot(*lit);
- rmsb = _bm->CreateSimpNot(*rit);
- }
-
- ASTNode neglmsb = _bm->CreateSimpNot(lmsb);
- ASTNode msb =
- // TRUE if l < r
- _bm->CreateSimpForm(OR, _bm->CreateSimpForm(AND, neglmsb, rmsb),
- _bm->CreateSimpForm(AND,
- _bm->CreateSimpForm(OR,
- neglmsb,
- rmsb),
- prevbit)); // else prevbit
- return msb;
+
+ bit_comparisons.push_back(prev_eq_bit);
+ ASTNode output =
+ _bm->CreateSimpForm(OR, bit_comparisons);
+
+ return output;
+// // "thisbit" represents BVLE of the suffixes of the BVs
+// // from that position . if R < L, return TRUE, else if L < R
+// // return FALSE, else return BVLE of lower-order bits. MSB is
+// // treated separately, because signed comparison is done by
+// // complementing the MSB of each BV, then doing an unsigned
+// // comparison.
+// ASTVec::const_iterator lit = left.rbegin();
+// ASTVec::const_iterator litend = left.rend();
+// ASTVec::const_iterator rit = right.rbegin();
+// ASTNode prevbit = ASTTrue;
+// for (; lit < litend - 1; lit++, rit++)
+// {
+// ASTNode neglit = _bm->CreateSimpNot(*lit);
+// ASTNode thisbit =
+// _bm->CreateSimpForm(OR,
+// _bm->CreateSimpForm(AND, neglit, *rit),
+// _bm->CreateSimpForm(AND,
+// _bm->CreateSimpForm(OR,
+// neglit,
+// *rit),
+// prevbit));
+// prevbit = thisbit;
+// }
+
+// // Handle MSB -- negate MSBs if signed comparison
+// // FIXME: make into refs after it's debugged.
+// ASTNode lmsb = *lit;
+// ASTNode rmsb = *rit;
+// if (is_signed)
+// {
+// lmsb = _bm->CreateSimpNot(*lit);
+// rmsb = _bm->CreateSimpNot(*rit);
+// }
+
+// ASTNode neglmsb = _bm->CreateSimpNot(lmsb);
+// ASTNode msb =
+// // TRUE if l < r
+// _bm->CreateSimpForm(OR, _bm->CreateSimpForm(AND, neglmsb, rmsb),
+// _bm->CreateSimpForm(AND,
+// _bm->CreateSimpForm(OR,
+// neglmsb,
+// rmsb),
+// prevbit)); // else prevbit
+// return msb;
}
// Left shift within fixed field inserting zeros at LSB.