From 27150e9f2dbfc53f1833829511d0fe0f06caa92d Mon Sep 17 00:00:00 2001 From: vijay_ganesh Date: Wed, 12 Aug 2009 16:54:33 +0000 Subject: [PATCH] git-svn-id: https://stp-fast-prover.svn.sourceforge.net/svnroot/stp-fast-prover/trunk/stp@116 e59a4935-1847-0410-ae03-e826735625c1 --- .../4-alpha-helices-3-rungs-fewerbits.cvc | 473 ----------------- .../4-alpha-helices-3-rungs-macros.cvc | 500 ++++++++++++++++++ 2 files changed, 500 insertions(+), 473 deletions(-) delete mode 100644 tests/bio-tests/4-alpha-helices-3-rungs-fewerbits.cvc create mode 100644 tests/bio-tests/4-alpha-helices-3-rungs-macros.cvc diff --git a/tests/bio-tests/4-alpha-helices-3-rungs-fewerbits.cvc b/tests/bio-tests/4-alpha-helices-3-rungs-fewerbits.cvc deleted file mode 100644 index 5af3867..0000000 --- a/tests/bio-tests/4-alpha-helices-3-rungs-fewerbits.cvc +++ /dev/null @@ -1,473 +0,0 @@ -% 4 alpha helices. Each helix with 6 amino acid (residues), and 4 -% faces (two inner and two outer). Each face has 1 or 2 residues -% The problem is encoded with two bit bit-vector variable per face. - - -%%% ALPHA1 Helix variables -alpha1_inner1_length, alpha1_outer1_length, alpha1_inner2_length, alpha1_outer2_length, alpha1_inner3_length, alpha1_outer3_length : BITVECTOR(4); -alpha1_inner1_index, alpha1_outer1_index, alpha1_inner2_index, alpha1_outer2_index, alpha1_inner3_index, alpha1_outer3_index : BITVECTOR(12); -alpha1_inner1_energy, alpha1_outer1_energy, alpha1_inner2_energy, alpha1_outer2_energy, alpha1_inner3_energy, alpha1_outer3_energy : BITVECTOR(16); - - -%%% ALPHA2 Helix variables -alpha2_inner1_length, alpha2_outer1_length, alpha2_inner2_length, alpha2_outer2_length, alpha2_inner3_length, alpha2_outer3_length : BITVECTOR(4); -alpha2_inner1_index, alpha2_outer1_index, alpha2_inner2_index, alpha2_outer2_index, alpha2_inner3_index, alpha2_outer3_index : BITVECTOR(12); -alpha2_inner1_energy, alpha2_outer1_energy, alpha2_inner2_energy, alpha2_outer2_energy, alpha2_inner3_energy, alpha2_outer3_energy : BITVECTOR(16); - - -%%% ALPHA3 Helix variables -alpha3_inner1_length, alpha3_outer1_length, alpha3_inner2_length, alpha3_outer2_length, alpha3_inner3_length, alpha3_outer3_length : BITVECTOR(4); -alpha3_inner1_index, alpha3_outer1_index, alpha3_inner2_index, alpha3_outer2_index, alpha3_inner3_index, alpha3_outer3_index : BITVECTOR(12); -alpha3_inner1_energy, alpha3_outer1_energy, alpha3_inner2_energy, alpha3_outer2_energy, alpha3_inner3_energy, alpha3_outer3_energy : BITVECTOR(16); - - -%%% ALPHA4 Helix variables -alpha4_inner1_length, alpha4_outer1_length, alpha4_inner2_length, alpha4_outer2_length, alpha4_inner3_length, alpha4_outer3_length : BITVECTOR(4); -alpha4_inner1_index, alpha4_outer1_index, alpha4_inner2_index, alpha4_outer2_index, alpha4_inner3_index, alpha4_outer3_index : BITVECTOR(12); -alpha4_inner1_energy, alpha4_outer1_energy, alpha4_inner2_energy, alpha4_outer2_energy, alpha4_inner3_energy, alpha4_outer3_energy : BITVECTOR(16); - -start_coil_length, coil_12_length, coil_23_length, coil_34_length, end_coil_length : BITVECTOR(8); -start_coil_index, coil_12_index, coil_23_index, coil_34_index, end_coil_index : BITVECTOR(12); -start_coil_energy, coil_12_energy, coil_23_energy, coil_34_energy, end_coil_energy : BITVECTOR(16); - -contact_energy12_zero, contact_energy12_one, contact_energy12_two : BITVECTOR(16); -contact_energy23_zero, contact_energy23_one, contact_energy23_two : BITVECTOR(16); -contact_energy34_zero, contact_energy34_one, contact_energy34_two : BITVECTOR(16); -contact_energy41_zero, contact_energy41_one, contact_energy41_two : BITVECTOR(16); - -outer_energy : BITVECTOR(16); -inner_energy : BITVECTOR(16); -bits16_one : BITVECTOR(16); -bits16_two : BITVECTOR(16); -high_energy : BITVECTOR(16); -low_energy : BITVECTOR(16); - -rung_length_limit : BITVECTOR(4); -COIL_LEN_LOW_LIMIT : BITVECTOR(8); -COIL_LEN_HIGH_LIMIT : BITVECTOR(8); - - -%%%%%%%%%%%%%%%%%%%%%%%% START OF MACROS %%%%%%%%%%%%%%%%%%%%%%% -ASSERT (bits16_one = 0hex0001); -ASSERT (bits16_two = 0hex0002); -ASSERT (high_energy = 0hex0001); -ASSERT (low_energy = 0hex0000); -ASSERT (COIL_LEN_LOW_LIMIT = 0hex00); -ASSERT (COIL_LEN_HIGH_LIMIT = 0hex20); -%%%%%%%%%%%%%%%%%%%%%%%% END OF MACROS %%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%% START OF AMINO ACID ENERGY ARRAY %%%%%%%%%%%%% -aminoacid_energies : ARRAY BITVECTOR(12) OF BITVECTOR(16); -ASSERT (aminoacid_energies[0hex000] = high_energy); -ASSERT (aminoacid_energies[0hex001] = high_energy); -ASSERT (aminoacid_energies[0hex002] = low_energy); -ASSERT (aminoacid_energies[0hex003] = high_energy); -ASSERT (aminoacid_energies[0hex004] = low_energy); -ASSERT (aminoacid_energies[0hex005] = high_energy); -ASSERT (aminoacid_energies[0hex006] = low_energy); -ASSERT (aminoacid_energies[0hex007] = high_energy); -ASSERT (aminoacid_energies[0hex008] = low_energy); -ASSERT (aminoacid_energies[0hex009] = high_energy); -ASSERT (aminoacid_energies[0hex00A] = low_energy); -ASSERT (aminoacid_energies[0hex00B] = high_energy); -ASSERT (aminoacid_energies[0hex00C] = low_energy); -ASSERT (aminoacid_energies[0hex00D] = high_energy); -ASSERT (aminoacid_energies[0hex00E] = high_energy); -ASSERT (aminoacid_energies[0hex00F] = high_energy); -ASSERT (aminoacid_energies[0hex010] = high_energy); -ASSERT (aminoacid_energies[0hex011] = low_energy); -ASSERT (aminoacid_energies[0hex012] = high_energy); -ASSERT (aminoacid_energies[0hex013] = high_energy); -ASSERT (aminoacid_energies[0hex014] = low_energy); -ASSERT (aminoacid_energies[0hex015] = high_energy); -ASSERT (aminoacid_energies[0hex016] = low_energy); -ASSERT (aminoacid_energies[0hex017] = high_energy); -ASSERT (aminoacid_energies[0hex018] = low_energy); -ASSERT (aminoacid_energies[0hex019] = high_energy); -ASSERT (aminoacid_energies[0hex01A] = low_energy); -ASSERT (aminoacid_energies[0hex01B] = high_energy); -ASSERT (aminoacid_energies[0hex01C] = high_energy); -ASSERT (aminoacid_energies[0hex01D] = high_energy); -ASSERT (aminoacid_energies[0hex01E] = high_energy); -ASSERT (aminoacid_energies[0hex01F] = high_energy); -ASSERT (aminoacid_energies[0hex020] = high_energy); -ASSERT (aminoacid_energies[0hex021] = low_energy); -ASSERT (aminoacid_energies[0hex022] = high_energy); -ASSERT (aminoacid_energies[0hex023] = low_energy); -ASSERT (aminoacid_energies[0hex024] = high_energy); -ASSERT (aminoacid_energies[0hex025] = low_energy); -ASSERT (aminoacid_energies[0hex026] = high_energy); -ASSERT (aminoacid_energies[0hex027] = low_energy); -ASSERT (aminoacid_energies[0hex028] = high_energy); -ASSERT (aminoacid_energies[0hex029] = high_energy); -ASSERT (aminoacid_energies[0hex02A] = low_energy); -ASSERT (aminoacid_energies[0hex02B] = high_energy); -ASSERT (aminoacid_energies[0hex02C] = low_energy); -ASSERT (aminoacid_energies[0hex02D] = low_energy); -ASSERT (aminoacid_energies[0hex02E] = high_energy); -ASSERT (aminoacid_energies[0hex02F] = high_energy); -ASSERT (aminoacid_energies[0hex030] = low_energy); -ASSERT (aminoacid_energies[0hex031] = high_energy); -ASSERT (aminoacid_energies[0hex032] = low_energy); -ASSERT (aminoacid_energies[0hex033] = high_energy); -ASSERT (aminoacid_energies[0hex034] = low_energy); -ASSERT (aminoacid_energies[0hex035] = high_energy); -ASSERT (aminoacid_energies[0hex036] = low_energy); -ASSERT (aminoacid_energies[0hex037] = high_energy); -ASSERT (aminoacid_energies[0hex038] = low_energy); -ASSERT (aminoacid_energies[0hex039] = high_energy); -ASSERT (aminoacid_energies[0hex03A] = high_energy); -ASSERT (aminoacid_energies[0hex03B] = low_energy); -ASSERT (aminoacid_energies[0hex03C] = high_energy); -ASSERT (aminoacid_energies[0hex03D] = low_energy); -ASSERT (aminoacid_energies[0hex03E] = high_energy); -ASSERT (aminoacid_energies[0hex03F] = low_energy); -ASSERT (aminoacid_energies[0hex040] = high_energy); -ASSERT (aminoacid_energies[0hex041] = low_energy); -ASSERT (aminoacid_energies[0hex042] = high_energy); -ASSERT (aminoacid_energies[0hex043] = low_energy); -ASSERT (aminoacid_energies[0hex044] = high_energy); -ASSERT (aminoacid_energies[0hex045] = low_energy); -ASSERT (aminoacid_energies[0hex046] = high_energy); -ASSERT (aminoacid_energies[0hex047] = low_energy); -ASSERT (aminoacid_energies[0hex048] = high_energy); -ASSERT (aminoacid_energies[0hex049] = low_energy); -ASSERT (aminoacid_energies[0hex04A] = high_energy); -ASSERT (aminoacid_energies[0hex04B] = high_energy); -ASSERT (aminoacid_energies[0hex04C] = high_energy); -ASSERT (aminoacid_energies[0hex04D] = high_energy); -ASSERT (aminoacid_energies[0hex04E] = high_energy); -ASSERT (aminoacid_energies[0hex04F] = high_energy); -ASSERT (aminoacid_energies[0hex050] = high_energy); -ASSERT (aminoacid_energies[0hex051] = low_energy); -ASSERT (aminoacid_energies[0hex052] = high_energy); -ASSERT (aminoacid_energies[0hex053] = low_energy); -ASSERT (aminoacid_energies[0hex054] = high_energy); -ASSERT (aminoacid_energies[0hex055] = low_energy); -ASSERT (aminoacid_energies[0hex056] = high_energy); -ASSERT (aminoacid_energies[0hex057] = low_energy); -ASSERT (aminoacid_energies[0hex058] = high_energy); -ASSERT (aminoacid_energies[0hex059] = high_energy); -ASSERT (aminoacid_energies[0hex05A] = high_energy); -ASSERT (aminoacid_energies[0hex05B] = low_energy); -ASSERT (aminoacid_energies[0hex05C] = high_energy); -ASSERT (aminoacid_energies[0hex05D] = low_energy); -ASSERT (aminoacid_energies[0hex05E] = high_energy); -ASSERT (aminoacid_energies[0hex05F] = high_energy); -ASSERT (aminoacid_energies[0hex060] = low_energy); -ASSERT (aminoacid_energies[0hex061] = high_energy); -ASSERT (aminoacid_energies[0hex062] = high_energy); -ASSERT (aminoacid_energies[0hex063] = low_energy); - -%%%%%%%%%%%%%% END OF AMINO ACID ENERGY ARRAY %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%%%%%%%%%%%%%% START OF RUNG LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (alpha1_inner1_length = 0hex1 OR alpha1_inner1_length = 0hex2); -ASSERT (alpha1_outer1_length = 0hex1 OR alpha1_outer1_length = 0hex2); -ASSERT (alpha1_inner2_length = 0hex1 OR alpha1_inner2_length = 0hex2); -ASSERT (alpha1_outer2_length = 0hex1 OR alpha1_outer2_length = 0hex2); -ASSERT (alpha1_inner3_length = 0hex1 OR alpha1_inner3_length = 0hex2); -ASSERT (alpha1_outer3_length = 0hex1 OR alpha1_outer3_length = 0hex2); - -ASSERT (alpha2_inner1_length = 0hex1 OR alpha2_inner1_length = 0hex2); -ASSERT (alpha2_outer1_length = 0hex1 OR alpha2_outer1_length = 0hex2); -ASSERT (alpha2_inner2_length = 0hex1 OR alpha2_inner2_length = 0hex2); -ASSERT (alpha2_outer2_length = 0hex1 OR alpha2_outer2_length = 0hex2); -ASSERT (alpha2_inner3_length = 0hex1 OR alpha2_inner3_length = 0hex2); -ASSERT (alpha2_outer3_length = 0hex1 OR alpha2_outer3_length = 0hex2); - -ASSERT (alpha3_inner1_length = 0hex1 OR alpha3_inner1_length = 0hex2); -ASSERT (alpha3_outer1_length = 0hex1 OR alpha3_outer1_length = 0hex2); -ASSERT (alpha3_inner2_length = 0hex1 OR alpha3_inner2_length = 0hex2); -ASSERT (alpha3_outer2_length = 0hex1 OR alpha3_outer2_length = 0hex2); -ASSERT (alpha3_inner3_length = 0hex1 OR alpha3_inner3_length = 0hex2); -ASSERT (alpha3_outer3_length = 0hex1 OR alpha3_outer3_length = 0hex2); - -ASSERT (alpha4_inner1_length = 0hex1 OR alpha4_inner1_length = 0hex2); -ASSERT (alpha4_outer1_length = 0hex1 OR alpha4_outer1_length = 0hex2); -ASSERT (alpha4_inner2_length = 0hex1 OR alpha4_inner2_length = 0hex2); -ASSERT (alpha4_outer2_length = 0hex1 OR alpha4_outer2_length = 0hex2); -ASSERT (alpha4_inner3_length = 0hex1 OR alpha4_inner3_length = 0hex2); -ASSERT (alpha4_outer3_length = 0hex1 OR alpha4_outer3_length = 0hex2); - - -ASSERT (BVPLUS(4, alpha1_inner1_length, alpha1_outer1_length, alpha1_inner2_length, - alpha1_outer2_length, alpha1_inner3_length, alpha1_outer3_length) = 0hex9); -ASSERT (BVPLUS(4, alpha2_inner1_length, alpha2_outer1_length, alpha2_inner2_length, - alpha2_outer2_length, alpha2_inner3_length, alpha2_outer3_length) = 0hex9); -ASSERT (BVPLUS(4, alpha3_inner1_length, alpha3_outer1_length, alpha3_inner2_length, - alpha3_outer2_length, alpha3_inner3_length, alpha3_outer3_length) = 0hex9); -ASSERT (BVPLUS(4, alpha4_inner1_length, alpha4_outer1_length, alpha4_inner2_length, - alpha4_outer2_length, alpha4_inner3_length, alpha4_outer3_length) = 0hex9); - - -ASSERT (BVPLUS(4, alpha1_inner1_length, alpha1_outer1_length) = 0hex3); -ASSERT (BVPLUS(4, alpha1_inner2_length, alpha1_outer2_length) = 0hex3); -ASSERT (BVPLUS(4, alpha1_inner3_length, alpha1_outer3_length) = 0hex3); - -ASSERT (BVPLUS(4, alpha2_inner1_length, alpha2_outer1_length) = 0hex3); -ASSERT (BVPLUS(4, alpha2_inner2_length, alpha2_outer2_length) = 0hex3); -ASSERT (BVPLUS(4, alpha2_inner3_length, alpha2_outer3_length) = 0hex3); - -ASSERT (BVPLUS(4, alpha3_inner1_length, alpha3_outer1_length) = 0hex3); -ASSERT (BVPLUS(4, alpha3_inner2_length, alpha3_outer2_length) = 0hex3); -ASSERT (BVPLUS(4, alpha3_inner3_length, alpha3_outer3_length) = 0hex3); - -ASSERT (BVPLUS(4, alpha4_inner1_length, alpha4_outer1_length) = 0hex3); -ASSERT (BVPLUS(4, alpha4_inner2_length, alpha4_outer2_length) = 0hex3); -ASSERT (BVPLUS(4, alpha4_inner3_length, alpha4_outer3_length) = 0hex3); - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF RUNG LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%%%%%% - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF COIL LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (BVLT(COIL_LEN_LOW_LIMIT,start_coil_length) AND BVLT(start_coil_length, COIL_LEN_HIGH_LIMIT)); -ASSERT (BVLT(COIL_LEN_LOW_LIMIT,coil_12_length) AND BVLT(coil_12_length, COIL_LEN_HIGH_LIMIT)); -ASSERT (BVLT(COIL_LEN_LOW_LIMIT,coil_23_length) AND BVLT(coil_23_length, COIL_LEN_HIGH_LIMIT)); -ASSERT (BVLT(COIL_LEN_LOW_LIMIT,coil_34_length) AND BVLT(coil_34_length, COIL_LEN_HIGH_LIMIT)); -ASSERT (BVLT(COIL_LEN_LOW_LIMIT,end_coil_length) AND BVLT(end_coil_length, COIL_LEN_HIGH_LIMIT)); - -ASSERT (0hex64 = BVPLUS(8,start_coil_length,coil_12_length,coil_23_length,coil_34_length,end_coil_length, - 0hex0@alpha1_inner1_length, 0hex0@alpha1_outer1_length, 0hex0@alpha1_inner2_length, - 0hex0@alpha1_outer2_length, 0hex0@alpha1_inner3_length, 0hex0@alpha1_outer3_length, - 0hex0@alpha2_inner1_length, 0hex0@alpha2_outer1_length, 0hex0@alpha2_inner2_length, - 0hex0@alpha2_outer2_length, 0hex0@alpha2_inner3_length, 0hex0@alpha2_outer3_length, - 0hex0@alpha3_inner1_length, 0hex0@alpha3_outer1_length, 0hex0@alpha3_inner2_length, - 0hex0@alpha3_outer2_length, 0hex0@alpha3_inner3_length, 0hex0@alpha3_outer3_length, - 0hex0@alpha4_inner1_length, 0hex0@alpha4_outer1_length, 0hex0@alpha4_inner2_length, - 0hex0@alpha4_outer2_length, 0hex0@alpha4_inner3_length, 0hex0@alpha4_outer3_length)); - -ASSERT (0hex0@start_coil_length = BVPLUS(12,start_coil_index,0hex001)); -ASSERT (0hex0@coil_12_length = BVSUB(12,coil_12_index,alpha1_outer3_index)); -ASSERT (0hex0@coil_23_length = BVSUB(12,coil_23_index,alpha2_inner1_index)); -ASSERT (0hex0@coil_34_length = BVSUB(12,coil_34_index,alpha3_outer3_index)); -ASSERT (0hex0@end_coil_length = BVSUB(12,0hex063,alpha4_inner1_index)); - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF COIL LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%%%%%%%%%%%%%% START OF CHAINING %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (BVLE(0hex000,start_coil_index) AND BVLE(start_coil_index,0hex010)); -ASSERT (alpha1_inner1_length = 0hex1 => alpha1_inner1_index = BVPLUS(12,start_coil_index,0hex001)); -ASSERT (alpha1_inner1_length = 0hex2 => alpha1_inner1_index = BVPLUS(12,start_coil_index,0hex002)); -ASSERT (alpha1_outer1_length = 0hex1 => alpha1_outer1_index = BVPLUS(12,alpha1_inner1_index,0hex001)); -ASSERT (alpha1_outer1_length = 0hex2 => alpha1_outer1_index = BVPLUS(12,alpha1_inner1_index,0hex002)); -ASSERT (alpha1_inner2_length = 0hex1 => alpha1_inner2_index = BVPLUS(12,alpha1_outer1_index,0hex001)); -ASSERT (alpha1_inner2_length = 0hex2 => alpha1_inner2_index = BVPLUS(12,alpha1_outer1_index,0hex002)); -ASSERT (alpha1_outer2_length = 0hex1 => alpha1_outer2_index = BVPLUS(12,alpha1_inner2_index,0hex001)); -ASSERT (alpha1_outer2_length = 0hex2 => alpha1_outer2_index = BVPLUS(12,alpha1_inner2_index,0hex002)); -ASSERT (alpha1_inner3_length = 0hex1 => alpha1_inner3_index = BVPLUS(12,alpha1_outer2_index,0hex001)); -ASSERT (alpha1_inner3_length = 0hex2 => alpha1_inner3_index = BVPLUS(12,alpha1_outer2_index,0hex002)); -ASSERT (alpha1_outer3_length = 0hex1 => alpha1_outer3_index = BVPLUS(12,alpha1_inner3_index,0hex001)); -ASSERT (alpha1_outer3_length = 0hex2 => alpha1_outer3_index = BVPLUS(12,alpha1_inner3_index,0hex002)); - -ASSERT (BVLE(BVPLUS(12,0hex001,alpha1_outer3_index),coil_12_index) AND BVLE(coil_12_index, BVPLUS(12,alpha1_outer3_index,0hex00A))); -ASSERT (alpha2_outer3_length = 0hex1 => alpha2_outer3_index = BVPLUS(12,coil_12_index,0hex001)); -ASSERT (alpha2_outer3_length = 0hex2 => alpha2_outer3_index = BVPLUS(12,coil_12_index,0hex002)); -ASSERT (alpha2_inner3_length = 0hex1 => alpha2_inner3_index = BVPLUS(12,alpha2_outer3_index,0hex001)); -ASSERT (alpha2_inner3_length = 0hex2 => alpha2_inner3_index = BVPLUS(12,alpha2_outer3_index,0hex002)); -ASSERT (alpha2_outer2_length = 0hex1 => alpha2_outer2_index = BVPLUS(12,alpha2_inner3_index,0hex001)); -ASSERT (alpha2_outer2_length = 0hex2 => alpha2_outer2_index = BVPLUS(12,alpha2_inner3_index,0hex002)); -ASSERT (alpha2_inner2_length = 0hex1 => alpha2_inner2_index = BVPLUS(12,alpha2_outer2_index,0hex001)); -ASSERT (alpha2_inner2_length = 0hex2 => alpha2_inner2_index = BVPLUS(12,alpha2_outer2_index,0hex002)); -ASSERT (alpha2_outer1_length = 0hex1 => alpha2_outer1_index = BVPLUS(12,alpha2_inner2_index,0hex001)); -ASSERT (alpha2_outer1_length = 0hex2 => alpha2_outer1_index = BVPLUS(12,alpha2_inner2_index,0hex002)); -ASSERT (alpha2_inner1_length = 0hex1 => alpha2_inner1_index = BVPLUS(12,alpha2_outer1_index,0hex001)); -ASSERT (alpha2_inner1_length = 0hex2 => alpha2_inner1_index = BVPLUS(12,alpha2_outer1_index,0hex002)); - -ASSERT (BVLE(BVPLUS(12,0hex001,alpha2_inner1_index),coil_23_index) AND BVLE(coil_23_index, BVPLUS(12,alpha2_inner1_index,0hex00A))); -ASSERT (alpha3_inner1_length = 0hex1 => alpha3_inner1_index = BVPLUS(12,coil_23_index,0hex001)); -ASSERT (alpha3_inner1_length = 0hex2 => alpha3_inner1_index = BVPLUS(12,coil_23_index,0hex002)); -ASSERT (alpha3_outer1_length = 0hex1 => alpha3_outer1_index = BVPLUS(12,alpha3_inner1_index,0hex001)); -ASSERT (alpha3_outer1_length = 0hex2 => alpha3_outer1_index = BVPLUS(12,alpha3_inner1_index,0hex002)); -ASSERT (alpha3_inner2_length = 0hex1 => alpha3_inner2_index = BVPLUS(12,alpha3_outer1_index,0hex001)); -ASSERT (alpha3_inner2_length = 0hex2 => alpha3_inner2_index = BVPLUS(12,alpha3_outer1_index,0hex002)); -ASSERT (alpha3_outer2_length = 0hex1 => alpha3_outer2_index = BVPLUS(12,alpha3_inner2_index,0hex001)); -ASSERT (alpha3_outer2_length = 0hex2 => alpha3_outer2_index = BVPLUS(12,alpha3_inner2_index,0hex002)); -ASSERT (alpha3_inner3_length = 0hex1 => alpha3_inner3_index = BVPLUS(12,alpha3_outer2_index,0hex001)); -ASSERT (alpha3_inner3_length = 0hex2 => alpha3_inner3_index = BVPLUS(12,alpha3_outer2_index,0hex002)); -ASSERT (alpha3_outer3_length = 0hex1 => alpha3_outer3_index = BVPLUS(12,alpha3_inner3_index,0hex001)); -ASSERT (alpha3_outer3_length = 0hex2 => alpha3_outer3_index = BVPLUS(12,alpha3_inner3_index,0hex002)); - -ASSERT (BVLE(BVPLUS(12,0hex001,alpha3_outer3_index),coil_34_index) AND BVLE(coil_34_index, BVPLUS(12,alpha3_outer3_index,0hex00A))); -ASSERT (alpha4_outer3_length = 0hex1 => alpha4_outer3_index = BVPLUS(12,coil_34_index,0hex001)); -ASSERT (alpha4_outer3_length = 0hex2 => alpha4_outer3_index = BVPLUS(12,coil_34_index,0hex002)); -ASSERT (alpha4_inner3_length = 0hex1 => alpha4_inner3_index = BVPLUS(12,alpha4_outer3_index,0hex001)); -ASSERT (alpha4_inner3_length = 0hex2 => alpha4_inner3_index = BVPLUS(12,alpha4_outer3_index,0hex002)); -ASSERT (alpha4_outer2_length = 0hex1 => alpha4_outer2_index = BVPLUS(12,alpha4_inner3_index,0hex001)); -ASSERT (alpha4_outer2_length = 0hex2 => alpha4_outer2_index = BVPLUS(12,alpha4_inner3_index,0hex002)); -ASSERT (alpha4_inner2_length = 0hex1 => alpha4_inner2_index = BVPLUS(12,alpha4_outer2_index,0hex001)); -ASSERT (alpha4_inner2_length = 0hex2 => alpha4_inner2_index = BVPLUS(12,alpha4_outer2_index,0hex002)); -ASSERT (alpha4_outer1_length = 0hex1 => alpha4_outer1_index = BVPLUS(12,alpha4_inner2_index,0hex001)); -ASSERT (alpha4_outer1_length = 0hex2 => alpha4_outer1_index = BVPLUS(12,alpha4_inner2_index,0hex002)); -ASSERT (alpha4_inner1_length = 0hex1 => alpha4_inner1_index = BVPLUS(12,alpha4_outer1_index,0hex001)); -ASSERT (alpha4_inner1_length = 0hex2 => alpha4_inner1_index = BVPLUS(12,alpha4_outer1_index,0hex002)); - -ASSERT (BVLE(BVPLUS(12,0hex001,alpha4_inner1_index),end_coil_index) AND BVLE(end_coil_index, BVPLUS(12,alpha4_inner1_index,0hex020))); -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF CHAINING %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (alpha1_inner1_length = 0hex1 => alpha1_inner1_energy = aminoacid_energies[alpha1_inner1_index]); -ASSERT (alpha1_inner1_length = 0hex2 => alpha1_inner1_energy = BVPLUS(16,aminoacid_energies[alpha1_inner1_index], - aminoacid_energies[BVSUB(12,alpha1_inner1_index,0hex001)])); - -ASSERT (alpha1_outer1_length = 0hex1 => alpha1_outer1_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha1_outer1_index])); -ASSERT (alpha1_outer1_length = 0hex2 => alpha1_outer1_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha1_outer1_index], - aminoacid_energies[BVSUB(12,alpha1_outer1_index,0hex001)]))); - - -ASSERT (alpha1_inner2_length = 0hex1 => alpha1_inner2_energy = aminoacid_energies[alpha1_inner2_index]); -ASSERT (alpha1_inner2_length = 0hex2 => alpha1_inner2_energy = BVPLUS(16,aminoacid_energies[alpha1_inner2_index], - aminoacid_energies[BVSUB(12,alpha1_inner2_index,0hex001)])); - - -ASSERT (alpha1_outer2_length = 0hex1 => alpha1_outer2_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha1_outer2_index])); -ASSERT (alpha1_outer2_length = 0hex2 => alpha1_outer2_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha1_outer2_index], - aminoacid_energies[BVSUB(12,alpha1_outer2_index,0hex001)]))); - -ASSERT (alpha1_inner3_length = 0hex1 => alpha1_inner3_energy = aminoacid_energies[alpha1_inner3_index]); -ASSERT (alpha1_inner3_length = 0hex2 => alpha1_inner3_energy = BVPLUS(16,aminoacid_energies[alpha1_inner3_index], - aminoacid_energies[BVSUB(12,alpha1_inner3_index,0hex001)])); - - -ASSERT (alpha1_outer3_length = 0hex1 => alpha1_outer3_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha1_outer3_index])); -ASSERT (alpha1_outer3_length = 0hex2 => alpha1_outer3_energy = BVSUB(16,bits16_two, BVPLUS(16,aminoacid_energies[alpha1_outer3_index], - aminoacid_energies[BVSUB(12,alpha1_outer3_index,0hex001)]))); - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (alpha2_inner1_length = 0hex1 => alpha2_inner1_energy = aminoacid_energies[alpha2_inner1_index]); -ASSERT (alpha2_inner1_length = 0hex2 => alpha2_inner1_energy = BVPLUS(16,aminoacid_energies[alpha2_inner1_index], - aminoacid_energies[BVSUB(12,alpha2_inner1_index,0hex001)])); - -ASSERT (alpha2_outer1_length = 0hex1 => alpha2_outer1_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha2_outer1_index])); -ASSERT (alpha2_outer1_length = 0hex2 => alpha2_outer1_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha2_outer1_index], - aminoacid_energies[BVSUB(12,alpha2_outer1_index,0hex001)]))); - - -ASSERT (alpha2_inner2_length = 0hex1 => alpha2_inner2_energy = aminoacid_energies[alpha2_inner2_index]); -ASSERT (alpha2_inner2_length = 0hex2 => alpha2_inner2_energy = BVPLUS(16,aminoacid_energies[alpha2_inner2_index], - aminoacid_energies[BVSUB(12,alpha2_inner2_index,0hex001)])); - - -ASSERT (alpha2_outer2_length = 0hex1 => alpha2_outer2_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha2_outer2_index])); -ASSERT (alpha2_outer2_length = 0hex2 => alpha2_outer2_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha2_outer2_index], - aminoacid_energies[BVSUB(12,alpha2_outer2_index,0hex001)]))); - -ASSERT (alpha2_inner3_length = 0hex1 => alpha2_inner3_energy = aminoacid_energies[alpha2_inner3_index]); -ASSERT (alpha2_inner3_length = 0hex2 => alpha2_inner3_energy = BVPLUS(16,aminoacid_energies[alpha2_inner3_index], - aminoacid_energies[BVSUB(12,alpha2_inner3_index,0hex001)])); - - -ASSERT (alpha2_outer3_length = 0hex1 => alpha2_outer3_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha2_outer3_index])); -ASSERT (alpha2_outer3_length = 0hex2 => alpha2_outer3_energy = BVSUB(16,bits16_two, BVPLUS(16,aminoacid_energies[alpha2_outer3_index], - aminoacid_energies[BVSUB(12,alpha2_outer3_index,0hex001)]))); - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA3 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (alpha3_inner1_length = 0hex1 => alpha3_inner1_energy = aminoacid_energies[alpha3_inner1_index]); -ASSERT (alpha3_inner1_length = 0hex2 => alpha3_inner1_energy = BVPLUS(16,aminoacid_energies[alpha3_inner1_index], - aminoacid_energies[BVSUB(12,alpha3_inner1_index,0hex001)])); - -ASSERT (alpha3_outer1_length = 0hex1 => alpha3_outer1_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha3_outer1_index])); -ASSERT (alpha3_outer1_length = 0hex2 => alpha3_outer1_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha3_outer1_index], - aminoacid_energies[BVSUB(12,alpha3_outer1_index,0hex001)]))); - - -ASSERT (alpha3_inner2_length = 0hex1 => alpha3_inner2_energy = aminoacid_energies[alpha3_inner2_index]); -ASSERT (alpha3_inner2_length = 0hex2 => alpha3_inner2_energy = BVPLUS(16,aminoacid_energies[alpha3_inner2_index], - aminoacid_energies[BVSUB(12,alpha3_inner2_index,0hex001)])); - - -ASSERT (alpha3_outer2_length = 0hex1 => alpha3_outer2_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha3_outer2_index])); -ASSERT (alpha3_outer2_length = 0hex2 => alpha3_outer2_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha3_outer2_index], - aminoacid_energies[BVSUB(12,alpha3_outer2_index,0hex001)]))); - -ASSERT (alpha3_inner3_length = 0hex1 => alpha3_inner3_energy = aminoacid_energies[alpha3_inner3_index]); -ASSERT (alpha3_inner3_length = 0hex2 => alpha3_inner3_energy = BVPLUS(16,aminoacid_energies[alpha3_inner3_index], - aminoacid_energies[BVSUB(12,alpha3_inner3_index,0hex001)])); - - -ASSERT (alpha3_outer3_length = 0hex1 => alpha3_outer3_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha3_outer3_index])); -ASSERT (alpha3_outer3_length = 0hex2 => alpha3_outer3_energy = BVSUB(16,bits16_two, BVPLUS(16,aminoacid_energies[alpha3_outer3_index], - aminoacid_energies[BVSUB(12,alpha3_outer3_index,0hex001)]))); - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA3 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - -%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA4 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (alpha4_inner1_length = 0hex1 => alpha4_inner1_energy = aminoacid_energies[alpha4_inner1_index]); -ASSERT (alpha4_inner1_length = 0hex2 => alpha4_inner1_energy = BVPLUS(16,aminoacid_energies[alpha4_inner1_index], - aminoacid_energies[BVSUB(12,alpha4_inner1_index,0hex001)])); - -ASSERT (alpha4_outer1_length = 0hex1 => alpha4_outer1_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha4_outer1_index])); -ASSERT (alpha4_outer1_length = 0hex2 => alpha4_outer1_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha4_outer1_index], - aminoacid_energies[BVSUB(12,alpha4_outer1_index,0hex001)]))); - - -ASSERT (alpha4_inner2_length = 0hex1 => alpha4_inner2_energy = aminoacid_energies[alpha4_inner2_index]); -ASSERT (alpha4_inner2_length = 0hex2 => alpha4_inner2_energy = BVPLUS(16,aminoacid_energies[alpha4_inner2_index], - aminoacid_energies[BVSUB(12,alpha4_inner2_index,0hex001)])); - - -ASSERT (alpha4_outer2_length = 0hex1 => alpha4_outer2_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha4_outer2_index])); -ASSERT (alpha4_outer2_length = 0hex2 => alpha4_outer2_energy = BVSUB(16,bits16_two,BVPLUS(16,aminoacid_energies[alpha4_outer2_index], - aminoacid_energies[BVSUB(12,alpha4_outer2_index,0hex001)]))); - -ASSERT (alpha4_inner3_length = 0hex1 => alpha4_inner3_energy = aminoacid_energies[alpha4_inner3_index]); -ASSERT (alpha4_inner3_length = 0hex2 => alpha4_inner3_energy = BVPLUS(16,aminoacid_energies[alpha4_inner3_index], - aminoacid_energies[BVSUB(12,alpha4_inner3_index,0hex001)])); - - -ASSERT (alpha4_outer3_length = 0hex1 => alpha4_outer3_energy = BVSUB(16,bits16_one,aminoacid_energies[alpha4_outer3_index])); -ASSERT (alpha4_outer3_length = 0hex2 => alpha4_outer3_energy = BVSUB(16,bits16_two, BVPLUS(16,aminoacid_energies[alpha4_outer3_index], - aminoacid_energies[BVSUB(12,alpha4_outer3_index,0hex001)]))); - -%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA4 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - -ASSERT (contact_energy12_zero = BVPLUS(16, alpha1_inner1_energy, alpha2_inner1_energy)); -ASSERT (contact_energy12_one = BVPLUS(16, alpha1_inner2_energy, alpha2_inner2_energy)); -ASSERT (contact_energy12_two = BVPLUS(16, alpha1_inner3_energy, alpha2_inner3_energy)); - - -ASSERT (contact_energy23_zero = BVPLUS(16, alpha2_inner1_energy, alpha3_inner1_energy)); -ASSERT (contact_energy23_one = BVPLUS(16, alpha2_inner2_energy, alpha3_inner2_energy)); -ASSERT (contact_energy23_two = BVPLUS(16, alpha2_inner3_energy, alpha3_inner3_energy)); - - -ASSERT (contact_energy34_zero = BVPLUS(16, alpha3_inner1_energy, alpha4_inner1_energy)); -ASSERT (contact_energy34_one = BVPLUS(16, alpha3_inner2_energy, alpha4_inner2_energy)); -ASSERT (contact_energy34_two = BVPLUS(16, alpha3_inner3_energy, alpha4_inner3_energy)); - -ASSERT (contact_energy41_zero = BVPLUS(16, alpha4_inner1_energy, alpha1_inner1_energy)); -ASSERT (contact_energy41_one = BVPLUS(16, alpha4_inner2_energy, alpha1_inner2_energy)); -ASSERT (contact_energy41_two = BVPLUS(16, alpha4_inner3_energy, alpha1_inner3_energy)); - -ASSERT (outer_energy = BVPLUS(16, alpha1_outer1_energy, alpha1_outer2_energy, alpha1_outer3_energy, - alpha2_outer1_energy, alpha2_outer2_energy, alpha2_outer3_energy, - alpha3_outer1_energy, alpha3_outer2_energy, alpha3_outer3_energy, - alpha4_outer1_energy, alpha4_outer2_energy, alpha4_outer3_energy)); - - -ASSERT (inner_energy = BVPLUS(16, contact_energy12_zero, contact_energy12_one, contact_energy12_two, - contact_energy23_zero, contact_energy23_one, contact_energy23_two, - contact_energy34_zero, contact_energy34_one, contact_energy34_two, - contact_energy41_zero, contact_energy41_one, contact_energy41_two)); - - -% final query -ASSERT (BVGE(inner_energy, 0hex00F0)); -QUERY FALSE; \ No newline at end of file diff --git a/tests/bio-tests/4-alpha-helices-3-rungs-macros.cvc b/tests/bio-tests/4-alpha-helices-3-rungs-macros.cvc new file mode 100644 index 0000000..94337a9 --- /dev/null +++ b/tests/bio-tests/4-alpha-helices-3-rungs-macros.cvc @@ -0,0 +1,500 @@ +% 4 alpha helices. Each helix with 6 amino acid (residues), and 4 +% faces (two inner and two outer). Each face has 1 or 2 residues +% The problem is encoded with two bit bit-vector variable per face. + + +%%% ALPHA1 Helix variables +alpha1_inner1_length, alpha1_outer1_length, alpha1_inner2_length, alpha1_outer2_length, alpha1_inner3_length, alpha1_outer3_length : BITVECTOR(4); +alpha1_inner1_index, alpha1_outer1_index, alpha1_inner2_index, alpha1_outer2_index, alpha1_inner3_index, alpha1_outer3_index : BITVECTOR(12); +alpha1_inner1_energy, alpha1_outer1_energy, alpha1_inner2_energy, alpha1_outer2_energy, alpha1_inner3_energy, alpha1_outer3_energy : BITVECTOR(16); + + +%%% ALPHA2 Helix variables +alpha2_inner1_length, alpha2_outer1_length, alpha2_inner2_length, alpha2_outer2_length, alpha2_inner3_length, alpha2_outer3_length : BITVECTOR(4); +alpha2_inner1_index, alpha2_outer1_index, alpha2_inner2_index, alpha2_outer2_index, alpha2_inner3_index, alpha2_outer3_index : BITVECTOR(12); +alpha2_inner1_energy, alpha2_outer1_energy, alpha2_inner2_energy, alpha2_outer2_energy, alpha2_inner3_energy, alpha2_outer3_energy : BITVECTOR(16); + + +%%% ALPHA3 Helix variables +alpha3_inner1_length, alpha3_outer1_length, alpha3_inner2_length, alpha3_outer2_length, alpha3_inner3_length, alpha3_outer3_length : BITVECTOR(4); +alpha3_inner1_index, alpha3_outer1_index, alpha3_inner2_index, alpha3_outer2_index, alpha3_inner3_index, alpha3_outer3_index : BITVECTOR(12); +alpha3_inner1_energy, alpha3_outer1_energy, alpha3_inner2_energy, alpha3_outer2_energy, alpha3_inner3_energy, alpha3_outer3_energy : BITVECTOR(16); + + +%%% ALPHA4 Helix variables +alpha4_inner1_length, alpha4_outer1_length, alpha4_inner2_length, alpha4_outer2_length, alpha4_inner3_length, alpha4_outer3_length : BITVECTOR(4); +alpha4_inner1_index, alpha4_outer1_index, alpha4_inner2_index, alpha4_outer2_index, alpha4_inner3_index, alpha4_outer3_index : BITVECTOR(12); +alpha4_inner1_energy, alpha4_outer1_energy, alpha4_inner2_energy, alpha4_outer2_energy, alpha4_inner3_energy, alpha4_outer3_energy : BITVECTOR(16); + +start_coil_length, coil_12_length, coil_23_length, coil_34_length, end_coil_length : BITVECTOR(8); +start_coil_index, coil_12_index, coil_23_index, coil_34_index, end_coil_index : BITVECTOR(12); +start_coil_energy, coil_12_energy, coil_23_energy, coil_34_energy, end_coil_energy : BITVECTOR(16); + +contact_energy12_zero, contact_energy12_one, contact_energy12_two : BITVECTOR(16); +contact_energy23_zero, contact_energy23_one, contact_energy23_two : BITVECTOR(16); +contact_energy34_zero, contact_energy34_one, contact_energy34_two : BITVECTOR(16); +contact_energy41_zero, contact_energy41_one, contact_energy41_two : BITVECTOR(16); +outer_energy : BITVECTOR(16); +inner_energy : BITVECTOR(16); + + +%%%%%%%%%%%%%%%%%%%%%%%% START OF MACROS %%%%%%%%%%%%%%%%%%%%%%% +BITS12_ZERO_MACRO : BITVECTOR(12); +BITS12_ONE_MACRO : BITVECTOR(12); +BITS16_ONE_MACRO : BITVECTOR(16); +BITS16_TWO_MACRO : BITVECTOR(16); + +HIGH_ENERGY_MACRO : BITVECTOR(16); +LOW_ENERGY_MACRO : BITVECTOR(16); + +FACE_LEN1_MACRO : BITVECTOR(4); +FACE_LEN2_MACRO : BITVECTOR(4); +RUNG_LEN_MACRO : BITVECTOR(4); +HELIX_LEN_MACRO : BITVECTOR(4); +COIL_LEN_LOW_MACRO : BITVECTOR(8); +COIL_LEN_HIGH_MACRO : BITVECTOR(8); + +LAST_ARRAYINDEX_MACRO : BITVECTOR(12); +STARTCOIL_HIGHINDEX_MACRO : BITVECTOR(12); +COIL12_HIGHINDEX_MACRO : BITVECTOR(12); +COIL23_HIGHINDEX_MACRO : BITVECTOR(12); +COIL34_HIGHINDEX_MACRO : BITVECTOR(12); +ENDCOIL_HIGHINDEX_MACRO : BITVECTOR(12); + +ASSERT (BITS12_ZERO_MACRO = 0hex000); +ASSERT (BITS12_ONE_MACRO = 0hex001); +ASSERT (BITS16_ONE_MACRO = 0hex0001); +ASSERT (BITS16_TWO_MACRO = 0hex0002); + +ASSERT (HIGH_ENERGY_MACRO = 0hex0001); +ASSERT (LOW_ENERGY_MACRO = 0hex0000); + +ASSERT (FACE_LEN1_MACRO = 0hex1); +ASSERT (FACE_LEN2_MACRO = 0hex2); +ASSERT (RUNG_LEN_MACRO = 0hex3); +ASSERT (HELIX_LEN_MACRO = 0hex9); +ASSERT (COIL_LEN_LOW_MACRO = 0hex00); +ASSERT (COIL_LEN_HIGH_MACRO = 0hex20); + +ASSERT (LAST_ARRAYINDEX_MACRO = 0hex063); +ASSERT (STARTCOIL_HIGHINDEX_MACRO = 0hex010); +ASSERT (COIL12_HIGHINDEX_MACRO = 0hex00A); +ASSERT (COIL23_HIGHINDEX_MACRO = 0hex00A); +ASSERT (COIL34_HIGHINDEX_MACRO = 0hex00A); +ASSERT (ENDCOIL_HIGHINDEX_MACRO = 0hex020); + +%%%%%%%%%%%%%%%%%%%%%%%% END OF MACROS %%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%% START OF AMINO ACID ENERGY ARRAY %%%%%%%%%%%%% +aminoacid_energies : ARRAY BITVECTOR(12) OF BITVECTOR(16); +ASSERT (aminoacid_energies[0hex000] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex001] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex002] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex003] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex004] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex005] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex006] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex007] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex008] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex009] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex00A] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex00B] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex00C] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex00D] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex00E] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex00F] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex010] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex011] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex012] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex013] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex014] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex015] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex016] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex017] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex018] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex019] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex01A] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex01B] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex01C] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex01D] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex01E] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex01F] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex020] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex021] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex022] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex023] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex024] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex025] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex026] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex027] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex028] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex029] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex02A] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex02B] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex02C] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex02D] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex02E] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex02F] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex030] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex031] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex032] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex033] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex034] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex035] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex036] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex037] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex038] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex039] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex03A] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex03B] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex03C] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex03D] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex03E] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex03F] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex040] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex041] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex042] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex043] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex044] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex045] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex046] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex047] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex048] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex049] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex04A] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex04B] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex04C] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex04D] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex04E] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex04F] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex050] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex051] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex052] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex053] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex054] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex055] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex056] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex057] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex058] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex059] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex05A] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex05B] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex05C] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex05D] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex05E] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex05F] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex060] = LOW_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex061] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex062] = HIGH_ENERGY_MACRO); +ASSERT (aminoacid_energies[0hex063] = LOW_ENERGY_MACRO); +%%%%%%%%%%%%%% END OF AMINO ACID ENERGY ARRAY %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%%%%%%%%%%%%%% START OF RUNG LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (alpha1_inner1_length = FACE_LEN1_MACRO OR alpha1_inner1_length = FACE_LEN2_MACRO); +ASSERT (alpha1_outer1_length = FACE_LEN1_MACRO OR alpha1_outer1_length = FACE_LEN2_MACRO); +ASSERT (alpha1_inner2_length = FACE_LEN1_MACRO OR alpha1_inner2_length = FACE_LEN2_MACRO); +ASSERT (alpha1_outer2_length = FACE_LEN1_MACRO OR alpha1_outer2_length = FACE_LEN2_MACRO); +ASSERT (alpha1_inner3_length = FACE_LEN1_MACRO OR alpha1_inner3_length = FACE_LEN2_MACRO); +ASSERT (alpha1_outer3_length = FACE_LEN1_MACRO OR alpha1_outer3_length = FACE_LEN2_MACRO); + +ASSERT (alpha2_inner1_length = FACE_LEN1_MACRO OR alpha2_inner1_length = FACE_LEN2_MACRO); +ASSERT (alpha2_outer1_length = FACE_LEN1_MACRO OR alpha2_outer1_length = FACE_LEN2_MACRO); +ASSERT (alpha2_inner2_length = FACE_LEN1_MACRO OR alpha2_inner2_length = FACE_LEN2_MACRO); +ASSERT (alpha2_outer2_length = FACE_LEN1_MACRO OR alpha2_outer2_length = FACE_LEN2_MACRO); +ASSERT (alpha2_inner3_length = FACE_LEN1_MACRO OR alpha2_inner3_length = FACE_LEN2_MACRO); +ASSERT (alpha2_outer3_length = FACE_LEN1_MACRO OR alpha2_outer3_length = FACE_LEN2_MACRO); + +ASSERT (alpha3_inner1_length = FACE_LEN1_MACRO OR alpha3_inner1_length = FACE_LEN2_MACRO); +ASSERT (alpha3_outer1_length = FACE_LEN1_MACRO OR alpha3_outer1_length = FACE_LEN2_MACRO); +ASSERT (alpha3_inner2_length = FACE_LEN1_MACRO OR alpha3_inner2_length = FACE_LEN2_MACRO); +ASSERT (alpha3_outer2_length = FACE_LEN1_MACRO OR alpha3_outer2_length = FACE_LEN2_MACRO); +ASSERT (alpha3_inner3_length = FACE_LEN1_MACRO OR alpha3_inner3_length = FACE_LEN2_MACRO); +ASSERT (alpha3_outer3_length = FACE_LEN1_MACRO OR alpha3_outer3_length = FACE_LEN2_MACRO); + +ASSERT (alpha4_inner1_length = FACE_LEN1_MACRO OR alpha4_inner1_length = FACE_LEN2_MACRO); +ASSERT (alpha4_outer1_length = FACE_LEN1_MACRO OR alpha4_outer1_length = FACE_LEN2_MACRO); +ASSERT (alpha4_inner2_length = FACE_LEN1_MACRO OR alpha4_inner2_length = FACE_LEN2_MACRO); +ASSERT (alpha4_outer2_length = FACE_LEN1_MACRO OR alpha4_outer2_length = FACE_LEN2_MACRO); +ASSERT (alpha4_inner3_length = FACE_LEN1_MACRO OR alpha4_inner3_length = FACE_LEN2_MACRO); +ASSERT (alpha4_outer3_length = FACE_LEN1_MACRO OR alpha4_outer3_length = FACE_LEN2_MACRO); + + +ASSERT (BVPLUS(4, alpha1_inner1_length, alpha1_outer1_length, alpha1_inner2_length, + alpha1_outer2_length, alpha1_inner3_length, alpha1_outer3_length) = HELIX_LEN_MACRO); +ASSERT (BVPLUS(4, alpha2_inner1_length, alpha2_outer1_length, alpha2_inner2_length, + alpha2_outer2_length, alpha2_inner3_length, alpha2_outer3_length) = HELIX_LEN_MACRO); +ASSERT (BVPLUS(4, alpha3_inner1_length, alpha3_outer1_length, alpha3_inner2_length, + alpha3_outer2_length, alpha3_inner3_length, alpha3_outer3_length) = HELIX_LEN_MACRO); +ASSERT (BVPLUS(4, alpha4_inner1_length, alpha4_outer1_length, alpha4_inner2_length, + alpha4_outer2_length, alpha4_inner3_length, alpha4_outer3_length) = HELIX_LEN_MACRO); + + +ASSERT (BVPLUS(4, alpha1_inner1_length, alpha1_outer1_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha1_inner2_length, alpha1_outer2_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha1_inner3_length, alpha1_outer3_length) = RUNG_LEN_MACRO); + +ASSERT (BVPLUS(4, alpha2_inner1_length, alpha2_outer1_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha2_inner2_length, alpha2_outer2_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha2_inner3_length, alpha2_outer3_length) = RUNG_LEN_MACRO); + +ASSERT (BVPLUS(4, alpha3_inner1_length, alpha3_outer1_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha3_inner2_length, alpha3_outer2_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha3_inner3_length, alpha3_outer3_length) = RUNG_LEN_MACRO); + +ASSERT (BVPLUS(4, alpha4_inner1_length, alpha4_outer1_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha4_inner2_length, alpha4_outer2_length) = RUNG_LEN_MACRO); +ASSERT (BVPLUS(4, alpha4_inner3_length, alpha4_outer3_length) = RUNG_LEN_MACRO); + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF RUNG LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%%%%%% + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF COIL LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (BVLT(COIL_LEN_LOW_MACRO,start_coil_length) AND BVLT(start_coil_length, COIL_LEN_HIGH_MACRO)); +ASSERT (BVLT(COIL_LEN_LOW_MACRO,coil_12_length) AND BVLT(coil_12_length, COIL_LEN_HIGH_MACRO)); +ASSERT (BVLT(COIL_LEN_LOW_MACRO,coil_23_length) AND BVLT(coil_23_length, COIL_LEN_HIGH_MACRO)); +ASSERT (BVLT(COIL_LEN_LOW_MACRO,coil_34_length) AND BVLT(coil_34_length, COIL_LEN_HIGH_MACRO)); +ASSERT (BVLT(COIL_LEN_LOW_MACRO,end_coil_length) AND BVLT(end_coil_length, COIL_LEN_HIGH_MACRO)); + +ASSERT (0hex64 = BVPLUS(8,start_coil_length,coil_12_length,coil_23_length,coil_34_length,end_coil_length, + 0hex0@alpha1_inner1_length, 0hex0@alpha1_outer1_length, 0hex0@alpha1_inner2_length, + 0hex0@alpha1_outer2_length, 0hex0@alpha1_inner3_length, 0hex0@alpha1_outer3_length, + 0hex0@alpha2_inner1_length, 0hex0@alpha2_outer1_length, 0hex0@alpha2_inner2_length, + 0hex0@alpha2_outer2_length, 0hex0@alpha2_inner3_length, 0hex0@alpha2_outer3_length, + 0hex0@alpha3_inner1_length, 0hex0@alpha3_outer1_length, 0hex0@alpha3_inner2_length, + 0hex0@alpha3_outer2_length, 0hex0@alpha3_inner3_length, 0hex0@alpha3_outer3_length, + 0hex0@alpha4_inner1_length, 0hex0@alpha4_outer1_length, 0hex0@alpha4_inner2_length, + 0hex0@alpha4_outer2_length, 0hex0@alpha4_inner3_length, 0hex0@alpha4_outer3_length)); + +ASSERT (0hex0@start_coil_length = BVPLUS(12,start_coil_index,BITS12_ONE_MACRO)); +ASSERT (0hex0@coil_12_length = BVSUB(12,coil_12_index,alpha1_outer3_index)); +ASSERT (0hex0@coil_23_length = BVSUB(12,coil_23_index,alpha2_inner1_index)); +ASSERT (0hex0@coil_34_length = BVSUB(12,coil_34_index,alpha3_outer3_index)); +ASSERT (0hex0@end_coil_length = BVSUB(12,LAST_ARRAYINDEX_MACRO,alpha4_inner1_index)); + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF COIL LENGTH CONSTRAINTS %%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%%%%%%%%%%%%%% START OF CHAINING %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +ASSERT (BVLE(BITS12_ZERO_MACRO,start_coil_index) AND BVLE(start_coil_index,STARTCOIL_HIGHINDEX_MACRO)); +ASSERT (alpha1_inner1_length = FACE_LEN1_MACRO => alpha1_inner1_index = BVPLUS(12,start_coil_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha1_inner1_length = FACE_LEN2_MACRO => alpha1_inner1_index = BVPLUS(12,start_coil_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha1_outer1_length = FACE_LEN1_MACRO => alpha1_outer1_index = BVPLUS(12,alpha1_inner1_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha1_outer1_length = FACE_LEN2_MACRO => alpha1_outer1_index = BVPLUS(12,alpha1_inner1_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha1_inner2_length = FACE_LEN1_MACRO => alpha1_inner2_index = BVPLUS(12,alpha1_outer1_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha1_inner2_length = FACE_LEN2_MACRO => alpha1_inner2_index = BVPLUS(12,alpha1_outer1_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha1_outer2_length = FACE_LEN1_MACRO => alpha1_outer2_index = BVPLUS(12,alpha1_inner2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha1_outer2_length = FACE_LEN2_MACRO => alpha1_outer2_index = BVPLUS(12,alpha1_inner2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha1_inner3_length = FACE_LEN1_MACRO => alpha1_inner3_index = BVPLUS(12,alpha1_outer2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha1_inner3_length = FACE_LEN2_MACRO => alpha1_inner3_index = BVPLUS(12,alpha1_outer2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha1_outer3_length = FACE_LEN1_MACRO => alpha1_outer3_index = BVPLUS(12,alpha1_inner3_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha1_outer3_length = FACE_LEN2_MACRO => alpha1_outer3_index = BVPLUS(12,alpha1_inner3_index,0hex00@FACE_LEN2_MACRO)); + +ASSERT (BVLE(BVPLUS(12,BITS12_ONE_MACRO,alpha1_outer3_index),coil_12_index) AND BVLE(coil_12_index, BVPLUS(12,alpha1_outer3_index,COIL12_HIGHINDEX_MACRO))); +ASSERT (alpha2_outer3_length = FACE_LEN1_MACRO => alpha2_outer3_index = BVPLUS(12,coil_12_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha2_outer3_length = FACE_LEN2_MACRO => alpha2_outer3_index = BVPLUS(12,coil_12_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha2_inner3_length = FACE_LEN1_MACRO => alpha2_inner3_index = BVPLUS(12,alpha2_outer3_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha2_inner3_length = FACE_LEN2_MACRO => alpha2_inner3_index = BVPLUS(12,alpha2_outer3_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha2_outer2_length = FACE_LEN1_MACRO => alpha2_outer2_index = BVPLUS(12,alpha2_inner3_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha2_outer2_length = FACE_LEN2_MACRO => alpha2_outer2_index = BVPLUS(12,alpha2_inner3_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha2_inner2_length = FACE_LEN1_MACRO => alpha2_inner2_index = BVPLUS(12,alpha2_outer2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha2_inner2_length = FACE_LEN2_MACRO => alpha2_inner2_index = BVPLUS(12,alpha2_outer2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha2_outer1_length = FACE_LEN1_MACRO => alpha2_outer1_index = BVPLUS(12,alpha2_inner2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha2_outer1_length = FACE_LEN2_MACRO => alpha2_outer1_index = BVPLUS(12,alpha2_inner2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha2_inner1_length = FACE_LEN1_MACRO => alpha2_inner1_index = BVPLUS(12,alpha2_outer1_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha2_inner1_length = FACE_LEN2_MACRO => alpha2_inner1_index = BVPLUS(12,alpha2_outer1_index,0hex00@FACE_LEN2_MACRO)); + +ASSERT (BVLE(BVPLUS(12,BITS12_ONE_MACRO,alpha2_inner1_index),coil_23_index) AND BVLE(coil_23_index, BVPLUS(12,alpha2_inner1_index,COIL23_HIGHINDEX_MACRO))); +ASSERT (alpha3_inner1_length = FACE_LEN1_MACRO => alpha3_inner1_index = BVPLUS(12,coil_23_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha3_inner1_length = FACE_LEN2_MACRO => alpha3_inner1_index = BVPLUS(12,coil_23_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha3_outer1_length = FACE_LEN1_MACRO => alpha3_outer1_index = BVPLUS(12,alpha3_inner1_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha3_outer1_length = FACE_LEN2_MACRO => alpha3_outer1_index = BVPLUS(12,alpha3_inner1_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha3_inner2_length = FACE_LEN1_MACRO => alpha3_inner2_index = BVPLUS(12,alpha3_outer1_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha3_inner2_length = FACE_LEN2_MACRO => alpha3_inner2_index = BVPLUS(12,alpha3_outer1_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha3_outer2_length = FACE_LEN1_MACRO => alpha3_outer2_index = BVPLUS(12,alpha3_inner2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha3_outer2_length = FACE_LEN2_MACRO => alpha3_outer2_index = BVPLUS(12,alpha3_inner2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha3_inner3_length = FACE_LEN1_MACRO => alpha3_inner3_index = BVPLUS(12,alpha3_outer2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha3_inner3_length = FACE_LEN2_MACRO => alpha3_inner3_index = BVPLUS(12,alpha3_outer2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha3_outer3_length = FACE_LEN1_MACRO => alpha3_outer3_index = BVPLUS(12,alpha3_inner3_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha3_outer3_length = FACE_LEN2_MACRO => alpha3_outer3_index = BVPLUS(12,alpha3_inner3_index,0hex00@FACE_LEN2_MACRO)); + +ASSERT (BVLE(BVPLUS(12,BITS12_ONE_MACRO,alpha3_outer3_index),coil_34_index) AND BVLE(coil_34_index, BVPLUS(12,alpha3_outer3_index,COIL34_HIGHINDEX_MACRO))); +ASSERT (alpha4_outer3_length = FACE_LEN1_MACRO => alpha4_outer3_index = BVPLUS(12,coil_34_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha4_outer3_length = FACE_LEN2_MACRO => alpha4_outer3_index = BVPLUS(12,coil_34_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha4_inner3_length = FACE_LEN1_MACRO => alpha4_inner3_index = BVPLUS(12,alpha4_outer3_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha4_inner3_length = FACE_LEN2_MACRO => alpha4_inner3_index = BVPLUS(12,alpha4_outer3_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha4_outer2_length = FACE_LEN1_MACRO => alpha4_outer2_index = BVPLUS(12,alpha4_inner3_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha4_outer2_length = FACE_LEN2_MACRO => alpha4_outer2_index = BVPLUS(12,alpha4_inner3_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha4_inner2_length = FACE_LEN1_MACRO => alpha4_inner2_index = BVPLUS(12,alpha4_outer2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha4_inner2_length = FACE_LEN2_MACRO => alpha4_inner2_index = BVPLUS(12,alpha4_outer2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha4_outer1_length = FACE_LEN1_MACRO => alpha4_outer1_index = BVPLUS(12,alpha4_inner2_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha4_outer1_length = FACE_LEN2_MACRO => alpha4_outer1_index = BVPLUS(12,alpha4_inner2_index,0hex00@FACE_LEN2_MACRO)); +ASSERT (alpha4_inner1_length = FACE_LEN1_MACRO => alpha4_inner1_index = BVPLUS(12,alpha4_outer1_index,0hex00@FACE_LEN1_MACRO)); +ASSERT (alpha4_inner1_length = FACE_LEN2_MACRO => alpha4_inner1_index = BVPLUS(12,alpha4_outer1_index,0hex00@FACE_LEN2_MACRO)); + +ASSERT (BVLE(BVPLUS(12,BITS12_ONE_MACRO,alpha4_inner1_index),end_coil_index) AND BVLE(end_coil_index, BVPLUS(12,alpha4_inner1_index,ENDCOIL_HIGHINDEX_MACRO))); +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF CHAINING %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (alpha1_inner1_length = FACE_LEN1_MACRO => alpha1_inner1_energy = aminoacid_energies[alpha1_inner1_index]); +ASSERT (alpha1_inner1_length = FACE_LEN2_MACRO => alpha1_inner1_energy = BVPLUS(16,aminoacid_energies[alpha1_inner1_index], + aminoacid_energies[BVSUB(12,alpha1_inner1_index,BITS12_ONE_MACRO)])); + +ASSERT (alpha1_outer1_length = FACE_LEN1_MACRO => alpha1_outer1_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha1_outer1_index])); +ASSERT (alpha1_outer1_length = FACE_LEN2_MACRO => alpha1_outer1_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha1_outer1_index], + aminoacid_energies[BVSUB(12,alpha1_outer1_index,BITS12_ONE_MACRO)]))); + + +ASSERT (alpha1_inner2_length = FACE_LEN1_MACRO => alpha1_inner2_energy = aminoacid_energies[alpha1_inner2_index]); +ASSERT (alpha1_inner2_length = FACE_LEN2_MACRO => alpha1_inner2_energy = BVPLUS(16,aminoacid_energies[alpha1_inner2_index], + aminoacid_energies[BVSUB(12,alpha1_inner2_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha1_outer2_length = FACE_LEN1_MACRO => alpha1_outer2_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha1_outer2_index])); +ASSERT (alpha1_outer2_length = FACE_LEN2_MACRO => alpha1_outer2_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha1_outer2_index], + aminoacid_energies[BVSUB(12,alpha1_outer2_index,BITS12_ONE_MACRO)]))); + +ASSERT (alpha1_inner3_length = FACE_LEN1_MACRO => alpha1_inner3_energy = aminoacid_energies[alpha1_inner3_index]); +ASSERT (alpha1_inner3_length = FACE_LEN2_MACRO => alpha1_inner3_energy = BVPLUS(16,aminoacid_energies[alpha1_inner3_index], + aminoacid_energies[BVSUB(12,alpha1_inner3_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha1_outer3_length = FACE_LEN1_MACRO => alpha1_outer3_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha1_outer3_index])); +ASSERT (alpha1_outer3_length = FACE_LEN2_MACRO => alpha1_outer3_energy = BVSUB(16,BITS16_TWO_MACRO, BVPLUS(16,aminoacid_energies[alpha1_outer3_index], + aminoacid_energies[BVSUB(12,alpha1_outer3_index,BITS12_ONE_MACRO)]))); + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (alpha2_inner1_length = FACE_LEN1_MACRO => alpha2_inner1_energy = aminoacid_energies[alpha2_inner1_index]); +ASSERT (alpha2_inner1_length = FACE_LEN2_MACRO => alpha2_inner1_energy = BVPLUS(16,aminoacid_energies[alpha2_inner1_index], + aminoacid_energies[BVSUB(12,alpha2_inner1_index,BITS12_ONE_MACRO)])); + +ASSERT (alpha2_outer1_length = FACE_LEN1_MACRO => alpha2_outer1_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha2_outer1_index])); +ASSERT (alpha2_outer1_length = FACE_LEN2_MACRO => alpha2_outer1_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha2_outer1_index], + aminoacid_energies[BVSUB(12,alpha2_outer1_index,BITS12_ONE_MACRO)]))); + + +ASSERT (alpha2_inner2_length = FACE_LEN1_MACRO => alpha2_inner2_energy = aminoacid_energies[alpha2_inner2_index]); +ASSERT (alpha2_inner2_length = FACE_LEN2_MACRO => alpha2_inner2_energy = BVPLUS(16,aminoacid_energies[alpha2_inner2_index], + aminoacid_energies[BVSUB(12,alpha2_inner2_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha2_outer2_length = FACE_LEN1_MACRO => alpha2_outer2_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha2_outer2_index])); +ASSERT (alpha2_outer2_length = FACE_LEN2_MACRO => alpha2_outer2_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha2_outer2_index], + aminoacid_energies[BVSUB(12,alpha2_outer2_index,BITS12_ONE_MACRO)]))); + +ASSERT (alpha2_inner3_length = FACE_LEN1_MACRO => alpha2_inner3_energy = aminoacid_energies[alpha2_inner3_index]); +ASSERT (alpha2_inner3_length = FACE_LEN2_MACRO => alpha2_inner3_energy = BVPLUS(16,aminoacid_energies[alpha2_inner3_index], + aminoacid_energies[BVSUB(12,alpha2_inner3_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha2_outer3_length = FACE_LEN1_MACRO => alpha2_outer3_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha2_outer3_index])); +ASSERT (alpha2_outer3_length = FACE_LEN2_MACRO => alpha2_outer3_energy = BVSUB(16,BITS16_TWO_MACRO, BVPLUS(16,aminoacid_energies[alpha2_outer3_index], + aminoacid_energies[BVSUB(12,alpha2_outer3_index,BITS12_ONE_MACRO)]))); + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA3 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (alpha3_inner1_length = FACE_LEN1_MACRO => alpha3_inner1_energy = aminoacid_energies[alpha3_inner1_index]); +ASSERT (alpha3_inner1_length = FACE_LEN2_MACRO => alpha3_inner1_energy = BVPLUS(16,aminoacid_energies[alpha3_inner1_index], + aminoacid_energies[BVSUB(12,alpha3_inner1_index,BITS12_ONE_MACRO)])); + +ASSERT (alpha3_outer1_length = FACE_LEN1_MACRO => alpha3_outer1_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha3_outer1_index])); +ASSERT (alpha3_outer1_length = FACE_LEN2_MACRO => alpha3_outer1_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha3_outer1_index], + aminoacid_energies[BVSUB(12,alpha3_outer1_index,BITS12_ONE_MACRO)]))); + + +ASSERT (alpha3_inner2_length = FACE_LEN1_MACRO => alpha3_inner2_energy = aminoacid_energies[alpha3_inner2_index]); +ASSERT (alpha3_inner2_length = FACE_LEN2_MACRO => alpha3_inner2_energy = BVPLUS(16,aminoacid_energies[alpha3_inner2_index], + aminoacid_energies[BVSUB(12,alpha3_inner2_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha3_outer2_length = FACE_LEN1_MACRO => alpha3_outer2_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha3_outer2_index])); +ASSERT (alpha3_outer2_length = FACE_LEN2_MACRO => alpha3_outer2_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha3_outer2_index], + aminoacid_energies[BVSUB(12,alpha3_outer2_index,BITS12_ONE_MACRO)]))); + +ASSERT (alpha3_inner3_length = FACE_LEN1_MACRO => alpha3_inner3_energy = aminoacid_energies[alpha3_inner3_index]); +ASSERT (alpha3_inner3_length = FACE_LEN2_MACRO => alpha3_inner3_energy = BVPLUS(16,aminoacid_energies[alpha3_inner3_index], + aminoacid_energies[BVSUB(12,alpha3_inner3_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha3_outer3_length = FACE_LEN1_MACRO => alpha3_outer3_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha3_outer3_index])); +ASSERT (alpha3_outer3_length = FACE_LEN2_MACRO => alpha3_outer3_energy = BVSUB(16,BITS16_TWO_MACRO, BVPLUS(16,aminoacid_energies[alpha3_outer3_index], + aminoacid_energies[BVSUB(12,alpha3_outer3_index,BITS12_ONE_MACRO)]))); + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA3 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + +%%%%%%%%%%%%%%%%%%%%%%%%%% START OF ENERGY COMPUTATION ALPHA4 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (alpha4_inner1_length = FACE_LEN1_MACRO => alpha4_inner1_energy = aminoacid_energies[alpha4_inner1_index]); +ASSERT (alpha4_inner1_length = FACE_LEN2_MACRO => alpha4_inner1_energy = BVPLUS(16,aminoacid_energies[alpha4_inner1_index], + aminoacid_energies[BVSUB(12,alpha4_inner1_index,BITS12_ONE_MACRO)])); + +ASSERT (alpha4_outer1_length = FACE_LEN1_MACRO => alpha4_outer1_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha4_outer1_index])); +ASSERT (alpha4_outer1_length = FACE_LEN2_MACRO => alpha4_outer1_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha4_outer1_index], + aminoacid_energies[BVSUB(12,alpha4_outer1_index,BITS12_ONE_MACRO)]))); + + +ASSERT (alpha4_inner2_length = FACE_LEN1_MACRO => alpha4_inner2_energy = aminoacid_energies[alpha4_inner2_index]); +ASSERT (alpha4_inner2_length = FACE_LEN2_MACRO => alpha4_inner2_energy = BVPLUS(16,aminoacid_energies[alpha4_inner2_index], + aminoacid_energies[BVSUB(12,alpha4_inner2_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha4_outer2_length = FACE_LEN1_MACRO => alpha4_outer2_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha4_outer2_index])); +ASSERT (alpha4_outer2_length = FACE_LEN2_MACRO => alpha4_outer2_energy = BVSUB(16,BITS16_TWO_MACRO,BVPLUS(16,aminoacid_energies[alpha4_outer2_index], + aminoacid_energies[BVSUB(12,alpha4_outer2_index,BITS12_ONE_MACRO)]))); + +ASSERT (alpha4_inner3_length = FACE_LEN1_MACRO => alpha4_inner3_energy = aminoacid_energies[alpha4_inner3_index]); +ASSERT (alpha4_inner3_length = FACE_LEN2_MACRO => alpha4_inner3_energy = BVPLUS(16,aminoacid_energies[alpha4_inner3_index], + aminoacid_energies[BVSUB(12,alpha4_inner3_index,BITS12_ONE_MACRO)])); + + +ASSERT (alpha4_outer3_length = FACE_LEN1_MACRO => alpha4_outer3_energy = BVSUB(16,BITS16_ONE_MACRO,aminoacid_energies[alpha4_outer3_index])); +ASSERT (alpha4_outer3_length = FACE_LEN2_MACRO => alpha4_outer3_energy = BVSUB(16,BITS16_TWO_MACRO, BVPLUS(16,aminoacid_energies[alpha4_outer3_index], + aminoacid_energies[BVSUB(12,alpha4_outer3_index,BITS12_ONE_MACRO)]))); + +%%%%%%%%%%%%%%%%%%%%%%%%%% END OF ENERGY COMPUTATION ALPHA4 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +ASSERT (contact_energy12_zero = BVPLUS(16, alpha1_inner1_energy, alpha2_inner1_energy)); +ASSERT (contact_energy12_one = BVPLUS(16, alpha1_inner2_energy, alpha2_inner2_energy)); +ASSERT (contact_energy12_two = BVPLUS(16, alpha1_inner3_energy, alpha2_inner3_energy)); + + +ASSERT (contact_energy23_zero = BVPLUS(16, alpha2_inner1_energy, alpha3_inner1_energy)); +ASSERT (contact_energy23_one = BVPLUS(16, alpha2_inner2_energy, alpha3_inner2_energy)); +ASSERT (contact_energy23_two = BVPLUS(16, alpha2_inner3_energy, alpha3_inner3_energy)); + + +ASSERT (contact_energy34_zero = BVPLUS(16, alpha3_inner1_energy, alpha4_inner1_energy)); +ASSERT (contact_energy34_one = BVPLUS(16, alpha3_inner2_energy, alpha4_inner2_energy)); +ASSERT (contact_energy34_two = BVPLUS(16, alpha3_inner3_energy, alpha4_inner3_energy)); + +ASSERT (contact_energy41_zero = BVPLUS(16, alpha4_inner1_energy, alpha1_inner1_energy)); +ASSERT (contact_energy41_one = BVPLUS(16, alpha4_inner2_energy, alpha1_inner2_energy)); +ASSERT (contact_energy41_two = BVPLUS(16, alpha4_inner3_energy, alpha1_inner3_energy)); + +ASSERT (outer_energy = BVPLUS(16, alpha1_outer1_energy, alpha1_outer2_energy, alpha1_outer3_energy, + alpha2_outer1_energy, alpha2_outer2_energy, alpha2_outer3_energy, + alpha3_outer1_energy, alpha3_outer2_energy, alpha3_outer3_energy, + alpha4_outer1_energy, alpha4_outer2_energy, alpha4_outer3_energy)); + + +ASSERT (inner_energy = BVPLUS(16, contact_energy12_zero, contact_energy12_one, contact_energy12_two, + contact_energy23_zero, contact_energy23_one, contact_energy23_two, + contact_energy34_zero, contact_energy34_one, contact_energy34_two, + contact_energy41_zero, contact_energy41_one, contact_energy41_two)); + + +% final query +ASSERT (BVGE(inner_energy, 0hex0010)); +QUERY FALSE; \ No newline at end of file -- 2.47.3