Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add unbounded AES-GCM verification. #139

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
168 changes: 168 additions & 0 deletions SAW/proof/AES/AES-CTR32.saw
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/

// Using experimental proof command "crucible_llvm_verify_x86"
enable_experimental;


////////////////////////////////////////////////////////////////////////////////
// Specifications

let aes_hw_ctr32_encrypt_blocks_spec len = do {
let len' = eval_size {| len * AES_BLOCK_SIZE |};
global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};

(in_, in_ptr) <- ptr_to_fresh_readonly "in" (llvm_array len' (llvm_int 8));
out_ptr <- crucible_alloc (llvm_array len' (llvm_int 8));
key_ptr <- crucible_alloc_readonly (llvm_struct "struct.aes_key_st");
key <- fresh_aes_key_st;
points_to_aes_key_st key_ptr key;
(ivec, ivec_ptr) <- ptr_to_fresh_readonly "ivec" (llvm_array AES_BLOCK_SIZE (llvm_int 8));

crucible_execute_func [in_ptr, out_ptr, (crucible_term {{ `len : [64] }}), key_ptr, ivec_ptr];

crucible_points_to out_ptr (crucible_term {{ aes_hw_ctr32_encrypt_blocks in_ key ivec }});

global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};
};

let aes_hw_ctr32_encrypt_blocks_spec' blocks = do {
let len' = eval_size {| blocks * AES_BLOCK_SIZE |};
global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};

(in_, in_ptr) <- ptr_to_fresh_readonly "in" (llvm_array len' (llvm_int 8));
out_ptr <- crucible_alloc (llvm_array len' (llvm_int 8));
key_ptr <- crucible_alloc_readonly (llvm_struct "struct.aes_key_st");
key <- fresh_aes_key_st;
points_to_aes_key_st key_ptr key;
(ivec, ivec_ptr) <- ptr_to_fresh_readonly "ivec" (llvm_array AES_BLOCK_SIZE (llvm_int 8));

crucible_execute_func [in_ptr, out_ptr, (crucible_term {{ `blocks : [64] }}), key_ptr, ivec_ptr];

crucible_points_to out_ptr (crucible_term {{ split`{each=8} (join (aes_ctr32_encrypt_blocks (join key) (join (take ivec)) (join (drop ivec)) (split (join in_)))) }});

global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};
};

let aes_hw_ctr32_encrypt_blocks_array_spec blocks = do {
let len = eval_size {| blocks * AES_BLOCK_SIZE |};
global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};

(in_, in_ptr) <- ptr_to_fresh_array_readonly "in" {{ `len : [64] }};
(out, out_ptr) <- ptr_to_fresh_array "out" {{ `len : [64] }};
key_ptr <- crucible_alloc_readonly (llvm_struct "struct.aes_key_st");
key <- fresh_aes_key_st;
points_to_aes_key_st key_ptr key;
(ivec, ivec_ptr) <- ptr_to_fresh_readonly "ivec" (llvm_array AES_BLOCK_SIZE (llvm_int 8));

crucible_execute_func [in_ptr, out_ptr, (crucible_term {{ `blocks : [64] }}), key_ptr, ivec_ptr];

crucible_points_to_array_prefix
out_ptr
{{ aes_ctr32_encrypt_blocks_array (join key) (join (take ivec)) (join (drop ivec)) in_ `blocks 0 out }}
{{ `len : [64] }};

global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};
};

let aes_hw_ctr32_encrypt_bounded_array_spec = do {
global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};

blocks <- llvm_fresh_var "blocks" (llvm_int 64);
let len = {{ blocks * `AES_BLOCK_SIZE }};
crucible_precond {{ 0 < blocks }};
crucible_precond {{ blocks < `MIN_BULK_BLOCKS }};

(in_, in_ptr) <- ptr_to_fresh_array_readonly "in" len;
(out, out_ptr) <- ptr_to_fresh_array "out" len;

key_ptr <- crucible_alloc_readonly (llvm_struct "struct.aes_key_st");
key <- fresh_aes_key_st;
points_to_aes_key_st key_ptr key;
(ivec, ivec_ptr) <- ptr_to_fresh_readonly "ivec" (llvm_array AES_BLOCK_SIZE (llvm_int 8));

crucible_execute_func [in_ptr, out_ptr, crucible_term blocks, key_ptr, ivec_ptr];

crucible_points_to_array_prefix
out_ptr
{{ aes_ctr32_encrypt_blocks_array (join key) (join (take ivec)) (join (drop ivec)) in_ blocks 0 out }}
len;

global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};
};


////////////////////////////////////////////////////////////////////////////////
// Proof commands
/*
When verifying aes_hw_ctr32_encrypt_blocks, the binary analysis must locally
treat r11 as callee-preserved. This is necessary because this routine saves
the original stack pointer in r11 and then calls helper routines, preventing
the binary analysis from inferring that the return address is still on the stack
when the routine returns. The called helper routines do not modify r11.
*/

let aes_hw_ctr32_tactic = do {
simplify (cryptol_ss ());
simplify (addsimps slice_384_thms basic_ss);
goal_eval_unint ["AESRound", "AESFinalRound", "aesenc", "aesenclast"];
simplify (addsimps add_xor_slice_thms basic_ss);
simplify (addsimps aesenclast_thms basic_ss);
w4_unint_yices ["AESRound", "AESFinalRound"];
};

add_x86_preserved_reg "r11";
aes_hw_ctr32_encrypt_blocks_encrypt_ov <- llvm_verify_x86 m "../../build/x86/crypto/crypto_test" "aes_hw_ctr32_encrypt_blocks"
[]
true
(aes_hw_ctr32_encrypt_blocks_spec whole_blocks_after_bulk_encrypt)
aes_hw_ctr32_tactic;

aes_hw_ctr32_encrypt_blocks_decrypt_ov <- llvm_verify_x86 m "../../build/x86/crypto/crypto_test" "aes_hw_ctr32_encrypt_blocks"
[]
true
(aes_hw_ctr32_encrypt_blocks_spec whole_blocks_after_bulk_decrypt)
aes_hw_ctr32_tactic;
default_x86_preserved_reg;

aes_hw_ctr32_copy_thms <- for (eval_list {{ [ 1:[16] .. < MIN_BULK_BLOCKS ] }}) (\i -> do {
let blocks = eval_int i;
print (str_concat "aes_hw_ctr32 copy lemma: " (show blocks));
prove_theorem
(do {
w4_unint_z3 ["aes_ctr32_encrypt_block"];
})
(rewrite (cryptol_ss ()) {{ \key iv ctr in out ->
arrayEq
(arrayCopy out 0 (aes_ctr32_encrypt_blocks_array key iv ctr in `blocks 0 out) 0 `(16*blocks))
(aes_ctr32_encrypt_blocks_array key iv ctr in `blocks 0 out)
}});
});

aes_hw_ctr32_encrypt_blocks_concrete_ovs <- for (eval_list {{ [ 1:[16] .. < MIN_BULK_BLOCKS ] }}) (\i -> do {
let blocks = eval_int i;
print (str_concat "aes_hw_ctr32_encrypt blocks=" (show blocks));
add_x86_preserved_reg "r11";
ov <- llvm_verify_x86 m "../../build/x86/crypto/crypto_test"
"aes_hw_ctr32_encrypt_blocks"
[]
true
(aes_hw_ctr32_encrypt_blocks_spec' blocks)
aes_hw_ctr32_tactic;
default_x86_preserved_reg;
llvm_refine_spec' m "aes_hw_ctr32_encrypt_blocks"
[ov]
(aes_hw_ctr32_encrypt_blocks_array_spec blocks)
(w4_unint_z3 ["aes_ctr32_encrypt_block"]);
});

aes_hw_ctr32_encrypt_blocks_bounded_array_ov <- llvm_refine_spec' m "aes_hw_ctr32_encrypt_blocks"
aes_hw_ctr32_encrypt_blocks_concrete_ovs
aes_hw_ctr32_encrypt_bounded_array_spec
(do {
simplify (addsimps aes_hw_ctr32_copy_thms (cryptol_ss ()));
w4_unint_z3 ["aes_ctr32_encrypt_blocks_array"];
});

145 changes: 107 additions & 38 deletions SAW/proof/AES/AES-GCM.saw
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import "../../../cryptol-specs/Primitive/Symmetric/Cipher/Block/AES.cry";
import "../../../cryptol-specs/Primitive/Symmetric/Cipher/Authenticated/AES_256_GCM.cry";
import "../../spec/AES/X86.cry";
import "../../spec/AES/AES-GCM.cry";
import "../../spec/AES/AES-GCM-implementation.cry";
import "../../spec/AES/AES-GCM-unbounded.cry";


enable_experimental;
Expand Down Expand Up @@ -93,6 +95,21 @@ let aes_block_size = 1;
// the IV for AES-GCM consists of 12 32-bit integers
let aes_iv_len = 12;

// This computes the total number of message blocks that can be
// handeled by a single AES/GCM mode session. The GCM counter is
// a 32-bit counter which starts at 1, and we need to leave a block at
// the end for the authentication tag. This gives us a total of
// slightly fewer than 2^^32 blocks we can handle.
let TOTAL_MESSAGE_BLOCKS = eval_size {| 2^^32 - 2 |};

// This is the minimum number of blocks that can be processed by the bulk
// encrypt/decrypt phase. This is due to the fact that the bulk encryption
// phase processes 6 block chunks, and has a pipeline setup which is three
// stages deep. Thus, 18 blocks is the minimum number of blocks it will
// process; fewer than that and it will simply rely on the separate AES/CTR32
// and GHASH routines.
let MIN_BULK_BLOCKS = 18;


/*
* Architecture features for the AVX+shrd code path
Expand All @@ -104,40 +121,9 @@ let {{ ia32cap = [0xffffffff, 0xffffffff, 0x3ffcffff, 0xffffffff] : [4][32] }};

include "AES.saw";
include "GHASH.saw";
include "AES-CTR32.saw";
include "AESNI-GCM.saw";

/*
When verifying aes_hw_ctr32_encrypt_blocks, the binary analysis must locally
treat r11 as callee-preserved. This is necessary because this routine saves
the original stack pointer in r11 and then calls helper routines, preventing
the binary analysis from inferring that the return address is still on the stack
when the routine returns. The called helper routines do not modify r11.
*/

let aes_hw_ctr32_tactic = do {
simplify (cryptol_ss ());
simplify (addsimps slice_384_thms basic_ss);
simplify (addsimps [cmp_sub_thm] empty_ss);
goal_eval_unint ["AESRound", "AESFinalRound", "aesenc", "aesenclast"];
simplify (addsimps add_xor_slice_thms basic_ss);
simplify (addsimps aesenclast_thms basic_ss);
w4_unint_yices ["AESRound", "AESFinalRound"];
};

add_x86_preserved_reg "r11";
aes_hw_ctr32_encrypt_blocks_encrypt_ov <- llvm_verify_x86 m "../../build/x86/crypto/crypto_test" "aes_hw_ctr32_encrypt_blocks"
[]
true
(aes_hw_ctr32_encrypt_blocks_spec whole_blocks_after_bulk_encrypt)
aes_hw_ctr32_tactic;

aes_hw_ctr32_encrypt_blocks_decrypt_ov <- llvm_verify_x86 m "../../build/x86/crypto/crypto_test" "aes_hw_ctr32_encrypt_blocks"
[]
true
(aes_hw_ctr32_encrypt_blocks_spec whole_blocks_after_bulk_decrypt)
aes_hw_ctr32_tactic;
default_x86_preserved_reg;


////////////////////////////////////////////////////////////////////////////////
// Specifications
Expand All @@ -158,6 +144,10 @@ let EVP_CIPH_FLAG_CUSTOM_CIPHER = 0x400;
let EVP_CIPH_FLAG_AEAD_CIPHER = 0x800;
let EVP_CIPH_CUSTOM_COPY = 0x1000;

// This is the total number of bytes that can be in the plain/cyphertext
// for AES-GCM.
let TOTAL_MESSAGE_MAX_LENGTH = eval_size {| TOTAL_MESSAGE_BLOCKS * AES_BLOCK_SIZE |};

let points_to_evp_cipher_st ptr = do {
crucible_points_to (crucible_elem ptr 0) (crucible_term {{ `NID_aes_256_gcm : [32] }});
crucible_points_to (crucible_elem ptr 1) (crucible_term {{ `aes_block_size : [32] }});
Expand Down Expand Up @@ -193,13 +183,21 @@ let fresh_aes_gcm_ctx len = do {
return {{ { key = key, iv = iv, Xi = Xi, len = `len } : AES_GCM_Ctx }};
};

let fresh_aes_gcm_ctx' mres = do {
key <- fresh_aes_key_st;
iv <- crucible_fresh_var "iv" (llvm_array aes_iv_len (llvm_int 8));
Xi <- crucible_fresh_var "Xi" (llvm_array AES_BLOCK_SIZE (llvm_int 8));
len_60 <- llvm_fresh_var "ctx.len" (llvm_int 60);
let len = {{ (len_60 # `mres): [64] }};
return {{ { key = key, iv = iv, Xi = Xi, len = len } : AES_GCM_Ctx }};
};

let points_to_gcm128_key_st ptr ctx = do {
crucible_points_to_untyped (crucible_elem ptr 1) (crucible_term {{ get_Htable ctx }});
crucible_points_to_untyped (crucible_elem ptr 0) (crucible_term {{ get_H ctx }});
crucible_points_to (crucible_elem ptr 2) (crucible_global "gcm_gmult_avx");
crucible_points_to (crucible_elem ptr 3) (crucible_global "gcm_ghash_avx");
crucible_points_to (crucible_elem ptr 4) (crucible_global "aes_hw_encrypt");
crucible_points_to (crucible_elem ptr 5) (crucible_term {{ 1 : [8] }});
crucible_points_to_untyped (crucible_elem ptr 0) (crucible_term {{ get_Htable ctx.key }});
crucible_points_to (crucible_elem ptr 1) (crucible_global "gcm_gmult_avx");
crucible_points_to (crucible_elem ptr 2) (crucible_global "gcm_ghash_avx");
crucible_points_to (crucible_elem ptr 3) (crucible_global "aes_hw_encrypt");
crucible_points_to (crucible_elem ptr 4) (crucible_term {{ 1 : [8] }});
};

let points_to_GCM128_CONTEXT ptr ctx mres = do {
Expand Down Expand Up @@ -297,6 +295,77 @@ llvm_verify m "EVP_DecryptUpdate"
(EVP_CipherUpdate_spec {{ 0 : [32] }} evp_cipher_update_gcm_len evp_cipher_update_len)
evp_cipher_tactic;

enable_what4_eval;

llvm_verify m "EVP_EncryptUpdate"
[ aes_gcm_from_cipher_ctx_ov
, aes_hw_encrypt_ov
, aes_hw_ctr32_encrypt_blocks_bounded_array_ov
, gcm_gmult_avx_ov
, gcm_ghash_avx_bounded_array_ov
, aesni_gcm_encrypt_array_ov
, aesni_gcm_decrypt_array_ov
]
true
(EVP_CipherUpdate_array_spec {{ 1 : [32] }} 1 15)
(do {
simplify (cryptol_ss ());
goal_eval_unint ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod"];
simplify (addsimps [aesni_gcm_encrypt_Yi_thm] empty_ss);
goal_eval_unint ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod"];
simplify (addsimps [bvand_bvudiv_thm, bvudiv_bvmul_bvudiv_thm, bvurem_16_append_thm] basic_ss);
simplify (addsimps [arrayLookupUnint_thm, arrayUpdateUnint_thm, arrayCopyUnint_thm, arrayConstantUnint_thm] empty_ss);
simplify (addsimps add_xor_slice_thms basic_ss);
goal_eval_unint ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
simplify (addsimps [ite_bveq_0_thm, ite_bveq_1_thm, bveq_ite_bv8_0_thm, bveq_ite_bv8_1_thm, arrayeq_ite_0_thm, arrayeq_ite_1_thm, foo_thm, bar_thm] basic_ss);
goal_eval_unint ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
print_goal;
is_out_post <- goal_has_some_tag ["output buffer postcondition"];
is_Xi_post <- goal_has_some_tag ["Xi postcondition"];
if is_out_post then do {
w4_unint_yices ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
} else if is_Xi_post then do {
w4_unint_z3_using "qfufbv" ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
} else do {
w4_unint_z3 ["aesni_gcm_encrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
};
});

llvm_verify m "EVP_DecryptUpdate"
[ aes_gcm_from_cipher_ctx_ov
, aes_hw_encrypt_ov
, aes_hw_ctr32_encrypt_blocks_bounded_array_ov
, gcm_gmult_avx_ov
, gcm_ghash_avx_bounded_array_ov
, aesni_gcm_encrypt_array_ov
, aesni_gcm_decrypt_array_ov
]
true
(EVP_CipherUpdate_array_spec {{ 0 : [32] }} 1 15)
(do {
simplify (cryptol_ss ());
goal_eval_unint ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod"];
simplify (addsimps [aesni_gcm_decrypt_Yi_thm] empty_ss);
goal_eval_unint ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod"];
simplify (addsimps [bvand_bvudiv_thm, bvudiv_bvmul_bvudiv_thm, bvurem_16_append_thm] basic_ss);
simplify (addsimps [arrayLookupUnint_thm, arrayUpdateUnint_thm, arrayCopyUnint_thm, arrayConstantUnint_thm] empty_ss);
simplify (addsimps add_xor_slice_thms basic_ss);
goal_eval_unint ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
simplify (addsimps [ite_bveq_0_thm, ite_bveq_1_thm, bveq_ite_bv8_0_thm, bveq_ite_bv8_1_thm, arrayeq_ite_0_thm, arrayeq_ite_1_thm, foo_thm, bar_thm] basic_ss);
goal_eval_unint ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
print_goal;
is_out_post <- goal_has_some_tag ["output buffer postcondition"];
is_Xi_post <- goal_has_some_tag ["Xi postcondition"];
if is_out_post then do {
w4_unint_yices ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
} else if is_Xi_post then do {
w4_unint_z3_using "qfufbv" ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
} else do {
w4_unint_z3 ["aesni_gcm_decrypt", "aes_ctr32_encrypt_blocks_array", "aes_hw_encrypt", "gcm_ghash_blocks_array", "gcm_polyval", "pmult", "pmod", "arrayLookupUnint", "arrayUpdateUnint", "arrayCopyUnint", "arrayConstantUnint"];
};
});

disable_what4_eval;
disable_what4_hash_consing;


Expand Down
19 changes: 0 additions & 19 deletions SAW/proof/AES/AES.saw
Original file line number Diff line number Diff line change
Expand Up @@ -106,25 +106,6 @@ let aes_hw_decrypt_in_place_spec = do {
};


let aes_hw_ctr32_encrypt_blocks_spec len = do {
let len' = eval_size {| len * AES_BLOCK_SIZE |};
global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};

(in_, in_ptr) <- ptr_to_fresh_readonly "in" (llvm_array len' (llvm_int 8));
out_ptr <- crucible_alloc (llvm_array len' (llvm_int 8));
key_ptr <- crucible_alloc_readonly (llvm_struct "struct.aes_key_st");
key <- fresh_aes_key_st;
points_to_aes_key_st key_ptr key;
(ivec, ivec_ptr) <- ptr_to_fresh_readonly "ivec" (llvm_array AES_BLOCK_SIZE (llvm_int 8));

crucible_execute_func [in_ptr, out_ptr, (crucible_term {{ `len : [64] }}), key_ptr, ivec_ptr];

crucible_points_to out_ptr (crucible_term {{ aes_hw_ctr32_encrypt_blocks in_ key ivec }});

global_alloc_init "OPENSSL_ia32cap_P" {{ ia32cap }};
};



////////////////////////////////////////////////////////////////////////////////
// Proof commands
Expand Down
Loading
Loading