effect
stringclasses 48
values | original_source_type
stringlengths 0
23k
| opens_and_abbrevs
listlengths 2
92
| isa_cross_project_example
bool 1
class | source_definition
stringlengths 9
57.9k
| partial_definition
stringlengths 7
23.3k
| is_div
bool 2
classes | is_type
null | is_proof
bool 2
classes | completed_definiton
stringlengths 1
250k
| dependencies
dict | effect_flags
sequencelengths 0
2
| ideal_premises
sequencelengths 0
236
| mutual_with
sequencelengths 0
11
| file_context
stringlengths 0
407k
| interleaved
bool 1
class | is_simply_typed
bool 2
classes | file_name
stringlengths 5
48
| vconfig
dict | is_simple_lemma
null | source_type
stringlengths 10
23k
| proof_features
sequencelengths 0
1
| name
stringlengths 8
95
| source
dict | verbose_type
stringlengths 1
7.42k
| source_range
dict |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Prims.Tot | val to_uint8 (#n: nat{n <= 8}) (x: BV.bv_t n) : Tot (y: U8.t{U8.v y < pow2 n}) | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.BitVector",
"short_module": "BV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec to_uint8
(#n: nat { n <= 8 })
(x: BV.bv_t n)
: Tot (y: U8.t { U8.v y < pow2 n })
= if n = 0
then 0uy
else
let hi = to_uint8 #(n - 1) (Seq.slice x 0 (n - 1)) in
let hi' = hi `U8.mul` 2uy in
let (r: U8.t { U8.v r < 2 }) = if Seq.index x (n - 1) then 1uy else 0uy in
hi' `U8.add` r | val to_uint8 (#n: nat{n <= 8}) (x: BV.bv_t n) : Tot (y: U8.t{U8.v y < pow2 n})
let rec to_uint8 (#n: nat{n <= 8}) (x: BV.bv_t n) : Tot (y: U8.t{U8.v y < pow2 n}) = | false | null | false | if n = 0
then 0uy
else
let hi = to_uint8 #(n - 1) (Seq.slice x 0 (n - 1)) in
let hi' = hi `U8.mul` 2uy in
let r:r: U8.t{U8.v r < 2} = if Seq.index x (n - 1) then 1uy else 0uy in
hi' `U8.add` r | {
"checked_file": "LowParse.Spec.BitVector.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Int.fsti.checked",
"LowParse.Spec.Combinators.fsti.checked",
"LowParse.Spec.BoundedInt.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.BitVector.fst"
} | [
"total"
] | [
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.BitVector.bv_t",
"Prims.op_Equality",
"Prims.int",
"FStar.UInt8.__uint_to_t",
"Prims.bool",
"FStar.UInt8.add",
"FStar.UInt8.t",
"Prims.op_LessThan",
"FStar.UInt8.v",
"FStar.Seq.Base.index",
"Prims.op_Subtraction",
"FStar.UInt8.mul",
"Prims.pow2",
"LowParse.Spec.BitVector.to_uint8",
"FStar.Seq.Base.slice"
] | [] | module LowParse.Spec.BitVector
open FStar.Mul
module BV = FStar.BitVector
module U8 = FStar.UInt8
module Seq = FStar.Seq
(* Big-endian conversion of a bit vector to a UInt8 *)
let rec to_uint8
(#n: nat { n <= 8 })
(x: BV.bv_t n) | false | false | LowParse.Spec.BitVector.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val to_uint8 (#n: nat{n <= 8}) (x: BV.bv_t n) : Tot (y: U8.t{U8.v y < pow2 n}) | [
"recursion"
] | LowParse.Spec.BitVector.to_uint8 | {
"file_name": "src/lowparse/LowParse.Spec.BitVector.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | x: FStar.BitVector.bv_t n -> y: FStar.UInt8.t{FStar.UInt8.v y < Prims.pow2 n} | {
"end_col": 18,
"end_line": 20,
"start_col": 2,
"start_line": 14
} |
FStar.Pervasives.Lemma | val to_uint8_of_uint8 (n: nat{n <= 8}) (x: U8.t{U8.v x < pow2 n})
: Lemma (to_uint8 (of_uint8 n x) == x) | [
{
"abbrev": true,
"full_module": "FStar.Seq",
"short_module": "Seq"
},
{
"abbrev": true,
"full_module": "FStar.UInt8",
"short_module": "U8"
},
{
"abbrev": true,
"full_module": "FStar.BitVector",
"short_module": "BV"
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "LowParse.Spec",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec to_uint8_of_uint8
(n: nat { n <= 8 })
(x: U8.t { U8.v x < pow2 n })
: Lemma
(to_uint8 (of_uint8 n x) == x)
= if n = 0
then ()
else begin
assert (Seq.slice (of_uint8 n x) 0 (n - 1) `Seq.equal` of_uint8 (n - 1) (x `U8.div` 2uy));
to_uint8_of_uint8 (n - 1) (x `U8.div` 2uy)
end | val to_uint8_of_uint8 (n: nat{n <= 8}) (x: U8.t{U8.v x < pow2 n})
: Lemma (to_uint8 (of_uint8 n x) == x)
let rec to_uint8_of_uint8 (n: nat{n <= 8}) (x: U8.t{U8.v x < pow2 n})
: Lemma (to_uint8 (of_uint8 n x) == x) = | false | null | true | if n = 0
then ()
else
(assert ((Seq.slice (of_uint8 n x) 0 (n - 1)) `Seq.equal` (of_uint8 (n - 1) (x `U8.div` 2uy)));
to_uint8_of_uint8 (n - 1) (x `U8.div` 2uy)) | {
"checked_file": "LowParse.Spec.BitVector.fst.checked",
"dependencies": [
"prims.fst.checked",
"LowParse.Spec.Int.fsti.checked",
"LowParse.Spec.Combinators.fsti.checked",
"LowParse.Spec.BoundedInt.fsti.checked",
"FStar.UInt8.fsti.checked",
"FStar.UInt32.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.BitVector.fst.checked"
],
"interface_file": false,
"source_file": "LowParse.Spec.BitVector.fst"
} | [
"lemma"
] | [
"Prims.nat",
"Prims.b2t",
"Prims.op_LessThanOrEqual",
"FStar.UInt8.t",
"Prims.op_LessThan",
"FStar.UInt8.v",
"Prims.pow2",
"Prims.op_Equality",
"Prims.int",
"Prims.bool",
"LowParse.Spec.BitVector.to_uint8_of_uint8",
"Prims.op_Subtraction",
"FStar.UInt8.div",
"FStar.UInt8.__uint_to_t",
"Prims.unit",
"Prims._assert",
"FStar.Seq.Base.equal",
"FStar.Seq.Base.slice",
"LowParse.Spec.BitVector.of_uint8",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"Prims.l_or",
"LowParse.Spec.BitVector.to_uint8",
"Prims.Nil",
"FStar.Pervasives.pattern"
] | [] | module LowParse.Spec.BitVector
open FStar.Mul
module BV = FStar.BitVector
module U8 = FStar.UInt8
module Seq = FStar.Seq
(* Big-endian conversion of a bit vector to a UInt8 *)
let rec to_uint8
(#n: nat { n <= 8 })
(x: BV.bv_t n)
: Tot (y: U8.t { U8.v y < pow2 n })
= if n = 0
then 0uy
else
let hi = to_uint8 #(n - 1) (Seq.slice x 0 (n - 1)) in
let hi' = hi `U8.mul` 2uy in
let (r: U8.t { U8.v r < 2 }) = if Seq.index x (n - 1) then 1uy else 0uy in
hi' `U8.add` r
let rec of_uint8
(n: nat { n <= 8 })
(x: U8.t { U8.v x < pow2 n })
: Tot (BV.bv_t n)
= if n = 0
then Seq.empty
else
let hi = of_uint8 (n - 1) (x `U8.div` 2uy) in
Seq.snoc hi (x `U8.rem` 2uy = 1uy)
#push-options "--z3rlimit 32"
let rec to_uint8_of_uint8
(n: nat { n <= 8 })
(x: U8.t { U8.v x < pow2 n })
: Lemma | false | false | LowParse.Spec.BitVector.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 1,
"max_fuel": 8,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 32,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val to_uint8_of_uint8 (n: nat{n <= 8}) (x: U8.t{U8.v x < pow2 n})
: Lemma (to_uint8 (of_uint8 n x) == x) | [
"recursion"
] | LowParse.Spec.BitVector.to_uint8_of_uint8 | {
"file_name": "src/lowparse/LowParse.Spec.BitVector.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | n: Prims.nat{n <= 8} -> x: FStar.UInt8.t{FStar.UInt8.v x < Prims.pow2 n}
-> FStar.Pervasives.Lemma
(ensures LowParse.Spec.BitVector.to_uint8 (LowParse.Spec.BitVector.of_uint8 n x) == x) | {
"end_col": 5,
"end_line": 44,
"start_col": 2,
"start_line": 39
} |
Prims.Tot | val is_total_or_gtotal : comp -> Tot bool | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c) | val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c = | false | null | false | Some? (get_total_or_gtotal_ret_type c) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.comp",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Reflection.Types.typ",
"FStar.InteractiveHelpers.ExploreTerm.get_total_or_gtotal_ret_type",
"Prims.bool"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_total_or_gtotal : comp -> Tot bool | [] | FStar.InteractiveHelpers.ExploreTerm.is_total_or_gtotal | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | c: FStar.Reflection.Types.comp -> Prims.bool | {
"end_col": 40,
"end_line": 131,
"start_col": 2,
"start_line": 131
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let st_effect_qn = "FStar.HyperStack.ST.ST" | let st_effect_qn = | false | null | false | "FStar.HyperStack.ST.ST" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure" | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val st_effect_qn : Prims.string | [] | FStar.InteractiveHelpers.ExploreTerm.st_effect_qn | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.string | {
"end_col": 43,
"end_line": 23,
"start_col": 19,
"start_line": 23
} |
|
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pure_hoare_effect_qn = "Prims.Pure" | let pure_hoare_effect_qn = | false | null | false | "Prims.Pure" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pure_hoare_effect_qn : Prims.string | [] | FStar.InteractiveHelpers.ExploreTerm.pure_hoare_effect_qn | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.string | {
"end_col": 39,
"end_line": 21,
"start_col": 27,
"start_line": 21
} |
|
Prims.Tot | val get_total_or_gtotal_ret_type : comp -> Tot (option typ) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None | val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c = | false | null | false | match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.comp",
"FStar.Reflection.V1.Builtins.inspect_comp",
"FStar.Reflection.Types.typ",
"FStar.Pervasives.Native.Some",
"FStar.Reflection.V1.Data.comp_view",
"FStar.Pervasives.Native.None",
"FStar.Pervasives.Native.option"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ) | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val get_total_or_gtotal_ret_type : comp -> Tot (option typ) | [] | FStar.InteractiveHelpers.ExploreTerm.get_total_or_gtotal_ret_type | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | c: FStar.Reflection.Types.comp -> FStar.Pervasives.Native.option FStar.Reflection.Types.typ | {
"end_col": 13,
"end_line": 120,
"start_col": 2,
"start_line": 118
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let pure_effect_qn = "Prims.PURE" | let pure_effect_qn = | false | null | false | "Prims.PURE" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val pure_effect_qn : Prims.string | [] | FStar.InteractiveHelpers.ExploreTerm.pure_effect_qn | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.string | {
"end_col": 33,
"end_line": 20,
"start_col": 21,
"start_line": 20
} |
|
FStar.Tactics.Effect.Tac | val get_type_info_from_type (ty: typ) : Tac type_info | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None | val get_type_info_from_type (ty: typ) : Tac type_info
let get_type_info_from_type (ty: typ) : Tac type_info = | true | null | false | match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b:binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.typ",
"FStar.Reflection.Types.bv",
"FStar.Reflection.Types.term",
"FStar.InteractiveHelpers.ExploreTerm.mk_type_info",
"FStar.Pervasives.Native.Some",
"FStar.InteractiveHelpers.ExploreTerm.type_info",
"FStar.Tactics.V1.Builtins.pack",
"FStar.Reflection.V1.Data.Tv_Abs",
"FStar.InteractiveHelpers.Base.prettify_term",
"FStar.Reflection.Types.binder",
"FStar.Reflection.V1.Derived.mk_binder",
"FStar.Reflection.V1.Data.term_view",
"FStar.Pervasives.Native.None",
"FStar.Tactics.V1.Builtins.inspect"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val get_type_info_from_type (ty: typ) : Tac type_info | [] | FStar.InteractiveHelpers.ExploreTerm.get_type_info_from_type | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ty: FStar.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac FStar.InteractiveHelpers.ExploreTerm.type_info | {
"end_col": 24,
"end_line": 107,
"start_col": 2,
"start_line": 98
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let unit_type_info = mk_type_info (`unit) None | let unit_type_info = | false | null | false | mk_type_info (`unit) None | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.ExploreTerm.mk_type_info",
"FStar.Pervasives.Native.None",
"FStar.Reflection.Types.term"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")" | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unit_type_info : FStar.InteractiveHelpers.ExploreTerm.type_info | [] | FStar.InteractiveHelpers.ExploreTerm.unit_type_info | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | FStar.InteractiveHelpers.ExploreTerm.type_info | {
"end_col": 46,
"end_line": 87,
"start_col": 21,
"start_line": 87
} |
|
FStar.Tactics.Effect.Tac | val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c | val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c = | true | null | false | subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.binder",
"FStar.Reflection.Types.term",
"FStar.Reflection.Types.comp",
"FStar.InteractiveHelpers.ExploreTerm.subst_bv_in_comp",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Reflection.Types.typ",
"FStar.Tactics.V1.Derived.binder_sort"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp | [] | FStar.InteractiveHelpers.ExploreTerm.subst_binder_in_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
e: FStar.Reflection.Types.env ->
b: FStar.Reflection.Types.binder ->
t: FStar.Reflection.Types.term ->
c: FStar.Reflection.Types.comp
-> FStar.Tactics.Effect.Tac FStar.Reflection.Types.comp | {
"end_col": 57,
"end_line": 201,
"start_col": 2,
"start_line": 201
} |
FStar.Tactics.Effect.Tac | val is_unit_type : typ -> Tac bool | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false | val is_unit_type : typ -> Tac bool
let is_unit_type ty = | true | null | false | match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.typ",
"FStar.Reflection.Types.fv",
"FStar.InteractiveHelpers.Base.fv_eq_name",
"FStar.Reflection.Const.unit_lid",
"FStar.Reflection.V1.Data.term_view",
"Prims.bool",
"FStar.Tactics.V1.Builtins.inspect"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val is_unit_type : typ -> Tac bool | [] | FStar.InteractiveHelpers.ExploreTerm.is_unit_type | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ty: FStar.Reflection.Types.typ -> FStar.Tactics.Effect.Tac Prims.bool | {
"end_col": 14,
"end_line": 137,
"start_col": 2,
"start_line": 135
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let stack_effect_qn = "FStar.HyperStack.ST.Stack" | let stack_effect_qn = | false | null | false | "FStar.HyperStack.ST.Stack" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE" | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val stack_effect_qn : Prims.string | [] | FStar.InteractiveHelpers.ExploreTerm.stack_effect_qn | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | Prims.string | {
"end_col": 49,
"end_line": 22,
"start_col": 22,
"start_line": 22
} |
|
FStar.Tactics.Effect.Tac | val safe_tcc (e:env) (t:term) : Tac (option comp) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let safe_tcc e t =
try Some (tcc e t) with | _ -> None | val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t = | true | null | false | try Some (tcc e t) with | _ -> None | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.term",
"FStar.Tactics.V1.Derived.try_with",
"FStar.Pervasives.Native.option",
"FStar.Reflection.Types.comp",
"Prims.unit",
"FStar.Pervasives.Native.Some",
"FStar.Tactics.V1.Builtins.tcc",
"Prims.exn",
"FStar.Pervasives.Native.None"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp) | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val safe_tcc (e:env) (t:term) : Tac (option comp) | [] | FStar.InteractiveHelpers.ExploreTerm.safe_tcc | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | e: FStar.Reflection.Types.env -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option FStar.Reflection.Types.comp) | {
"end_col": 37,
"end_line": 95,
"start_col": 2,
"start_line": 95
} |
FStar.Tactics.Effect.Tac | val type_info_to_string : type_info -> Tac string | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")" | val type_info_to_string : type_info -> Tac string
let type_info_to_string info = | true | null | false | "Mktype_info (" ^ term_to_string info.ty ^ ") (" ^ option_to_string term_to_string info.refin ^ ")" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.InteractiveHelpers.ExploreTerm.type_info",
"Prims.op_Hat",
"Prims.string",
"FStar.InteractiveHelpers.Base.option_to_string",
"FStar.Reflection.Types.term",
"FStar.Tactics.V1.Builtins.term_to_string",
"FStar.InteractiveHelpers.ExploreTerm.__proj__Mktype_info__item__refin",
"FStar.InteractiveHelpers.ExploreTerm.__proj__Mktype_info__item__ty"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val type_info_to_string : type_info -> Tac string | [] | FStar.InteractiveHelpers.ExploreTerm.type_info_to_string | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | info: FStar.InteractiveHelpers.ExploreTerm.type_info -> FStar.Tactics.Effect.Tac Prims.string | {
"end_col": 50,
"end_line": 85,
"start_col": 2,
"start_line": 83
} |
Prims.Tot | val effect_name_to_type (ename : name) : Tot effect_type | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown | val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename: name) : Tot effect_type = | false | null | false | let ename = flatten_name ename in
if ename = pure_effect_qn
then E_PURE
else
if ename = pure_hoare_effect_qn
then E_Pure
else if ename = stack_effect_qn then E_Stack else if ename = st_effect_qn then E_ST else E_Unknown | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.name",
"Prims.op_Equality",
"Prims.string",
"FStar.InteractiveHelpers.ExploreTerm.pure_effect_qn",
"FStar.InteractiveHelpers.ExploreTerm.E_PURE",
"Prims.bool",
"FStar.InteractiveHelpers.ExploreTerm.pure_hoare_effect_qn",
"FStar.InteractiveHelpers.ExploreTerm.E_Pure",
"FStar.InteractiveHelpers.ExploreTerm.stack_effect_qn",
"FStar.InteractiveHelpers.ExploreTerm.E_Stack",
"FStar.InteractiveHelpers.ExploreTerm.st_effect_qn",
"FStar.InteractiveHelpers.ExploreTerm.E_ST",
"FStar.InteractiveHelpers.ExploreTerm.E_Unknown",
"FStar.InteractiveHelpers.ExploreTerm.effect_type",
"FStar.Reflection.V1.Derived.flatten_name"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val effect_name_to_type (ename : name) : Tot effect_type | [] | FStar.InteractiveHelpers.ExploreTerm.effect_name_to_type | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ename: FStar.Reflection.Types.name -> FStar.InteractiveHelpers.ExploreTerm.effect_type | {
"end_col": 16,
"end_line": 65,
"start_col": 58,
"start_line": 59
} |
FStar.Tactics.Effect.Tac | val inst_comp_once : env -> comp -> term -> Tac comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end | val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t = | true | null | false | let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
match inspect ty' with
| Tv_Arrow b1 c1 -> subst_binder_in_comp e b1 t c1
| _ -> mfail "inst_comp_once: inconsistent state" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.comp",
"FStar.Reflection.Types.term",
"FStar.Reflection.Types.binder",
"FStar.InteractiveHelpers.ExploreTerm.subst_binder_in_comp",
"FStar.Reflection.V1.Data.term_view",
"FStar.InteractiveHelpers.Base.mfail",
"FStar.Tactics.V1.Builtins.inspect",
"FStar.Reflection.Types.typ",
"FStar.InteractiveHelpers.ExploreTerm.unfold_until_arrow",
"FStar.InteractiveHelpers.ExploreTerm.get_comp_ret_type"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val inst_comp_once : env -> comp -> term -> Tac comp | [] | FStar.InteractiveHelpers.ExploreTerm.inst_comp_once | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | e: FStar.Reflection.Types.env -> c: FStar.Reflection.Types.comp -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.Reflection.Types.comp | {
"end_col": 5,
"end_line": 263,
"start_col": 26,
"start_line": 255
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag) | let explorer (a: Type) = | false | null | false | a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view -> Tac (a & ctrl_flag) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.Base.genv",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Reflection.V1.Data.term_view",
"FStar.Pervasives.Native.option",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Tactics.Types.ctrl_flag"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might). | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val explorer : a: Type -> Type | [] | FStar.InteractiveHelpers.ExploreTerm.explorer | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | a: Type -> Type | {
"end_col": 21,
"end_line": 431,
"start_col": 2,
"start_line": 430
} |
|
FStar.Tactics.Effect.Tac | val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)] | val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c = | true | null | false | apply_subst_in_comp e c [((b, sort), t)] | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.bv",
"FStar.Reflection.Types.typ",
"FStar.Reflection.Types.term",
"FStar.Reflection.Types.comp",
"FStar.InteractiveHelpers.Base.apply_subst_in_comp",
"Prims.Cons",
"FStar.Pervasives.Native.tuple2",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp | [] | FStar.InteractiveHelpers.ExploreTerm.subst_bv_in_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
e: FStar.Reflection.Types.env ->
b: FStar.Reflection.Types.bv ->
sort: FStar.Reflection.Types.typ ->
t: FStar.Reflection.Types.term ->
c: FStar.Reflection.Types.comp
-> FStar.Tactics.Effect.Tac FStar.Reflection.Types.comp | {
"end_col": 42,
"end_line": 197,
"start_col": 2,
"start_line": 197
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let mk_type_info = Mktype_info | let mk_type_info = | false | null | false | Mktype_info | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.ExploreTerm.Mktype_info"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
} | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val mk_type_info : ty: FStar.Reflection.Types.typ -> refin: FStar.Pervasives.Native.option FStar.Reflection.Types.term
-> FStar.InteractiveHelpers.ExploreTerm.type_info | [] | FStar.InteractiveHelpers.ExploreTerm.mk_type_info | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ty: FStar.Reflection.Types.typ -> refin: FStar.Pervasives.Native.option FStar.Reflection.Types.term
-> FStar.InteractiveHelpers.ExploreTerm.type_info | {
"end_col": 30,
"end_line": 79,
"start_col": 19,
"start_line": 79
} |
|
FStar.Tactics.Effect.Tac | val term_has_shadowed_variables : genv -> term -> Tac bool | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let term_has_shadowed_variables ge t =
let fvl = free_in t in
Some? (List.Tot.tryFind (bv_is_shadowed ge) fvl) | val term_has_shadowed_variables : genv -> term -> Tac bool
let term_has_shadowed_variables ge t = | true | null | false | let fvl = free_in t in
Some? (List.Tot.tryFind (bv_is_shadowed ge) fvl) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Reflection.Types.term",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.Reflection.Types.bv",
"FStar.List.Tot.Base.tryFind",
"FStar.InteractiveHelpers.Base.bv_is_shadowed",
"Prims.bool",
"Prims.list",
"FStar.InteractiveHelpers.ExploreTerm.free_in"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might).
let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag)
// TODO: use more
let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) =
let x1, flag1 = f1 x in
if flag1 = Continue then
f2 x1
else x1, convert_ctrl_flag flag1
// TODO: change the signature to move the dbg flag
val explore_term :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge : genv
(* the list of terms traversed so far (first is most recent) with the environment
* at the time they were traversed *)
-> parents : list (genv & term_view)
-> c : option typ_or_comp
-> t:term ->
Tac (a & ctrl_flag)
val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag)
(* TODO: carry around the list of encompassing terms *)
let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 =
print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0);
let tv0 = inspect t0 in
let x0, flag = f x ge0 pl0 c0 tv0 in
let pl1 = (ge0, tv0) :: pl0 in
if flag = Continue then
begin match tv0 with
| Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue
| Tv_App hd (a,qual) ->
(* Explore the argument - we update the target typ_or_comp when doing so.
* Note that the only way to get the correct target type is to deconstruct
* the type of the head *)
let a_c = safe_arg_typ_or_comp dbg ge0.env hd in
print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^
option_to_string typ_or_comp_to_string a_c);
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in
(* Explore the head - no type information here: we can compute it,
* but it seems useless (or maybe use it only if it is not Total) *)
if flag1 = Continue then
explore_term dbg dfs f x1 ge0 pl1 None hd
else x1, convert_ctrl_flag flag1
| Tv_Abs br body ->
let ge1 = genv_push_binder ge0 br false None in
let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in
explore_term dbg dfs f x0 ge1 pl1 c1 body
| Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *)
| Tv_Type _ -> x0, Continue
| Tv_Refine bv sort ref ->
let bvv = inspect_bv bv in
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in
if flag1 = Continue then
let ge1 = genv_push_bv ge0 bv sort false None in
explore_term dbg dfs f x1 ge1 pl1 None ref
else x1, convert_ctrl_flag flag1
| Tv_Const _ -> x0, Continue
| Tv_Uvar _ _ -> x0, Continue
| Tv_Let recf attrs bv ty def body ->
(* Binding definition exploration - for the target computation: initially we
* used the type of the definition, however it is often unnecessarily complex.
* Now, we use the type of the binder used for the binding. *)
let def_c = Some (TC_Typ ty [] 0) in
let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in
(* Exploration of the following instructions *)
let ge1 = genv_push_bv ge0 bv ty false (Some def) in
let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in
(* Perform the exploration in the proper order *)
let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in
bind_expl x0 expl1 expl2
| Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here
(* Auxiliary function to explore the branches *)
let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)=
let x0, flag = x_flag in
if flag = Continue then
let pat, branch_body = br in
(* Explore the pattern *)
let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in
if flag1 = Continue then
(* Explore the branch body *)
explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body
else x1, convert_ctrl_flag flag1
(* Don't convert the flag *)
else x0, flag
in
(* Explore the scrutinee *)
let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in
let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in
(* Explore the branches *)
fold_left explore_branch x1 branches
| Tv_AscribedT e ty tac _ ->
let c1 = Some (TC_Typ ty [] 0) in
let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in
if flag = Continue then
explore_term dbg dfs #a f x1 ge0 pl1 c1 e
else x1, convert_ctrl_flag flag
| Tv_AscribedC e c1 tac _ ->
(* TODO: explore the comp *)
explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e
| _ ->
(* Unknown *)
x0, Continue
end
else x0, convert_ctrl_flag flag
and explore_pattern dbg dfs #a f x ge0 pat =
print_dbg dbg ("[> explore_pattern:");
match pat with
| Pat_Constant _ -> ge0, x, Continue
| Pat_Cons fv us patterns ->
let explore_pat ge_x_flag pat =
let ge0, x, flag = ge_x_flag in
let pat1, _ = pat in
if flag = Continue then
explore_pattern dbg dfs #a f x ge0 pat1
else
(* Don't convert the flag *)
ge0, x, flag
in
fold_left explore_pat (ge0, x, Continue) patterns
| Pat_Var bv st ->
let ge1 = genv_push_bv ge0 bv (unseal st) false None in
ge1, x, Continue
| Pat_Dot_Term _ -> ge0, x, Continue
(*** Variables in a term *)
/// Returns the list of free variables contained in a term
val free_in : term -> Tac (list bv)
let free_in t =
let same_name (bv1 bv2 : bv) : Tac bool =
name_of_bv bv1 = name_of_bv bv2
in
let update_free (fl:list bv) (ge:genv) (pl:list (genv & term_view))
(c:option typ_or_comp) (tv:term_view) :
Tac (list bv & ctrl_flag) =
match tv with
| Tv_Var bv | Tv_BVar bv ->
(* Check if the binding was not introduced during the traversal *)
begin match genv_get_from_name ge (name_of_bv bv) with
| None ->
(* Check if we didn't already count the binding *)
let fl' = if Tactics.tryFind (same_name bv) fl then fl else bv :: fl in
fl', Continue
| Some _ -> fl, Continue
end
| _ -> fl, Continue
in
let e = top_env () in (* we actually don't care about the environment *)
let ge = mk_genv e [] [] in
List.Tot.rev (fst (explore_term false false update_free [] ge [] None t))
/// Returns the list of abstract variables appearing in a term, in the order in
/// which they were introduced in the context.
val abs_free_in : genv -> term -> Tac (list (bv & typ))
let abs_free_in ge t =
let fvl = free_in t in
let absl = List.rev (genv_abstract_bvs ge) in
let is_free_in_term bv =
Some? (List.Tot.find (bv_eq bv) fvl)
in
let absfree = List.Tot.concatMap
(fun (bv, ty) -> if is_free_in_term bv then [bv,ty] else []) absl
in
absfree
/// Returns the list of free shadowed variables appearing in a term.
val shadowed_free_in : genv -> term -> Tac (list bv)
let shadowed_free_in ge t =
let fvl = free_in t in
List.Tot.filter (fun bv -> bv_is_shadowed ge bv) fvl
/// Returns true if a term contains variables which are shadowed in a given environment | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val term_has_shadowed_variables : genv -> term -> Tac bool | [] | FStar.InteractiveHelpers.ExploreTerm.term_has_shadowed_variables | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ge: FStar.InteractiveHelpers.Base.genv -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac Prims.bool | {
"end_col": 50,
"end_line": 619,
"start_col": 38,
"start_line": 617
} |
FStar.Tactics.Effect.Tac | val safe_tc (e:env) (t:term) : Tac (option term) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let safe_tc e t =
try Some (tc e t) with | _ -> None | val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t = | true | null | false | try Some (tc e t) with | _ -> None | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.term",
"FStar.Tactics.V1.Derived.try_with",
"FStar.Pervasives.Native.option",
"Prims.unit",
"FStar.Pervasives.Native.Some",
"FStar.Tactics.V1.Builtins.tc",
"Prims.exn",
"FStar.Pervasives.Native.None"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term) | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val safe_tc (e:env) (t:term) : Tac (option term) | [] | FStar.InteractiveHelpers.ExploreTerm.safe_tc | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | e: FStar.Reflection.Types.env -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac (FStar.Pervasives.Native.option FStar.Reflection.Types.term) | {
"end_col": 36,
"end_line": 91,
"start_col": 2,
"start_line": 91
} |
FStar.Tactics.Effect.Tac | val shadowed_free_in : genv -> term -> Tac (list bv) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let shadowed_free_in ge t =
let fvl = free_in t in
List.Tot.filter (fun bv -> bv_is_shadowed ge bv) fvl | val shadowed_free_in : genv -> term -> Tac (list bv)
let shadowed_free_in ge t = | true | null | false | let fvl = free_in t in
List.Tot.filter (fun bv -> bv_is_shadowed ge bv) fvl | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Reflection.Types.term",
"FStar.List.Tot.Base.filter",
"FStar.Reflection.Types.bv",
"FStar.InteractiveHelpers.Base.bv_is_shadowed",
"Prims.bool",
"Prims.list",
"FStar.InteractiveHelpers.ExploreTerm.free_in"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might).
let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag)
// TODO: use more
let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) =
let x1, flag1 = f1 x in
if flag1 = Continue then
f2 x1
else x1, convert_ctrl_flag flag1
// TODO: change the signature to move the dbg flag
val explore_term :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge : genv
(* the list of terms traversed so far (first is most recent) with the environment
* at the time they were traversed *)
-> parents : list (genv & term_view)
-> c : option typ_or_comp
-> t:term ->
Tac (a & ctrl_flag)
val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag)
(* TODO: carry around the list of encompassing terms *)
let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 =
print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0);
let tv0 = inspect t0 in
let x0, flag = f x ge0 pl0 c0 tv0 in
let pl1 = (ge0, tv0) :: pl0 in
if flag = Continue then
begin match tv0 with
| Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue
| Tv_App hd (a,qual) ->
(* Explore the argument - we update the target typ_or_comp when doing so.
* Note that the only way to get the correct target type is to deconstruct
* the type of the head *)
let a_c = safe_arg_typ_or_comp dbg ge0.env hd in
print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^
option_to_string typ_or_comp_to_string a_c);
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in
(* Explore the head - no type information here: we can compute it,
* but it seems useless (or maybe use it only if it is not Total) *)
if flag1 = Continue then
explore_term dbg dfs f x1 ge0 pl1 None hd
else x1, convert_ctrl_flag flag1
| Tv_Abs br body ->
let ge1 = genv_push_binder ge0 br false None in
let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in
explore_term dbg dfs f x0 ge1 pl1 c1 body
| Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *)
| Tv_Type _ -> x0, Continue
| Tv_Refine bv sort ref ->
let bvv = inspect_bv bv in
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in
if flag1 = Continue then
let ge1 = genv_push_bv ge0 bv sort false None in
explore_term dbg dfs f x1 ge1 pl1 None ref
else x1, convert_ctrl_flag flag1
| Tv_Const _ -> x0, Continue
| Tv_Uvar _ _ -> x0, Continue
| Tv_Let recf attrs bv ty def body ->
(* Binding definition exploration - for the target computation: initially we
* used the type of the definition, however it is often unnecessarily complex.
* Now, we use the type of the binder used for the binding. *)
let def_c = Some (TC_Typ ty [] 0) in
let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in
(* Exploration of the following instructions *)
let ge1 = genv_push_bv ge0 bv ty false (Some def) in
let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in
(* Perform the exploration in the proper order *)
let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in
bind_expl x0 expl1 expl2
| Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here
(* Auxiliary function to explore the branches *)
let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)=
let x0, flag = x_flag in
if flag = Continue then
let pat, branch_body = br in
(* Explore the pattern *)
let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in
if flag1 = Continue then
(* Explore the branch body *)
explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body
else x1, convert_ctrl_flag flag1
(* Don't convert the flag *)
else x0, flag
in
(* Explore the scrutinee *)
let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in
let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in
(* Explore the branches *)
fold_left explore_branch x1 branches
| Tv_AscribedT e ty tac _ ->
let c1 = Some (TC_Typ ty [] 0) in
let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in
if flag = Continue then
explore_term dbg dfs #a f x1 ge0 pl1 c1 e
else x1, convert_ctrl_flag flag
| Tv_AscribedC e c1 tac _ ->
(* TODO: explore the comp *)
explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e
| _ ->
(* Unknown *)
x0, Continue
end
else x0, convert_ctrl_flag flag
and explore_pattern dbg dfs #a f x ge0 pat =
print_dbg dbg ("[> explore_pattern:");
match pat with
| Pat_Constant _ -> ge0, x, Continue
| Pat_Cons fv us patterns ->
let explore_pat ge_x_flag pat =
let ge0, x, flag = ge_x_flag in
let pat1, _ = pat in
if flag = Continue then
explore_pattern dbg dfs #a f x ge0 pat1
else
(* Don't convert the flag *)
ge0, x, flag
in
fold_left explore_pat (ge0, x, Continue) patterns
| Pat_Var bv st ->
let ge1 = genv_push_bv ge0 bv (unseal st) false None in
ge1, x, Continue
| Pat_Dot_Term _ -> ge0, x, Continue
(*** Variables in a term *)
/// Returns the list of free variables contained in a term
val free_in : term -> Tac (list bv)
let free_in t =
let same_name (bv1 bv2 : bv) : Tac bool =
name_of_bv bv1 = name_of_bv bv2
in
let update_free (fl:list bv) (ge:genv) (pl:list (genv & term_view))
(c:option typ_or_comp) (tv:term_view) :
Tac (list bv & ctrl_flag) =
match tv with
| Tv_Var bv | Tv_BVar bv ->
(* Check if the binding was not introduced during the traversal *)
begin match genv_get_from_name ge (name_of_bv bv) with
| None ->
(* Check if we didn't already count the binding *)
let fl' = if Tactics.tryFind (same_name bv) fl then fl else bv :: fl in
fl', Continue
| Some _ -> fl, Continue
end
| _ -> fl, Continue
in
let e = top_env () in (* we actually don't care about the environment *)
let ge = mk_genv e [] [] in
List.Tot.rev (fst (explore_term false false update_free [] ge [] None t))
/// Returns the list of abstract variables appearing in a term, in the order in
/// which they were introduced in the context.
val abs_free_in : genv -> term -> Tac (list (bv & typ))
let abs_free_in ge t =
let fvl = free_in t in
let absl = List.rev (genv_abstract_bvs ge) in
let is_free_in_term bv =
Some? (List.Tot.find (bv_eq bv) fvl)
in
let absfree = List.Tot.concatMap
(fun (bv, ty) -> if is_free_in_term bv then [bv,ty] else []) absl
in
absfree
/// Returns the list of free shadowed variables appearing in a term. | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val shadowed_free_in : genv -> term -> Tac (list bv) | [] | FStar.InteractiveHelpers.ExploreTerm.shadowed_free_in | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ge: FStar.InteractiveHelpers.Base.genv -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac (Prims.list FStar.Reflection.Types.bv) | {
"end_col": 54,
"end_line": 613,
"start_col": 27,
"start_line": 611
} |
FStar.Tactics.Effect.Tac | val _abs_update_typ (b: binder) (ty: typ) (pl: list binder) (e: env) : Tac typ_or_comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err | val _abs_update_typ (b: binder) (ty: typ) (pl: list binder) (e: env) : Tac typ_or_comp
let _abs_update_typ (b: binder) (ty: typ) (pl: list binder) (e: env) : Tac typ_or_comp = | true | null | false | try
let ty' = unfold_until_arrow e ty in
match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> mfail "_abs_update_typ: inconsistent state"
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.binder",
"FStar.Reflection.Types.typ",
"Prims.list",
"FStar.Reflection.Types.env",
"FStar.Tactics.V1.Derived.try_with",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"Prims.unit",
"FStar.Reflection.Types.comp",
"FStar.InteractiveHelpers.ExploreTerm.TC_Comp",
"Prims.Cons",
"FStar.InteractiveHelpers.ExploreTerm.subst_binder_in_comp",
"FStar.Reflection.Types.term",
"FStar.Tactics.V1.Builtins.pack",
"FStar.Reflection.V1.Data.Tv_Var",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Reflection.V1.Data.term_view",
"FStar.InteractiveHelpers.Base.mfail",
"FStar.Tactics.V1.Builtins.inspect",
"FStar.InteractiveHelpers.ExploreTerm.unfold_until_arrow",
"Prims.exn",
"Prims.string",
"Prims.op_Hat",
"FStar.Tactics.V1.Builtins.term_to_string",
"FStar.Tactics.Effect.raise"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp = | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val _abs_update_typ (b: binder) (ty: typ) (pl: list binder) (e: env) : Tac typ_or_comp | [] | FStar.InteractiveHelpers.ExploreTerm._abs_update_typ | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: FStar.Reflection.Types.binder ->
ty: FStar.Reflection.Types.typ ->
pl: Prims.list FStar.Reflection.Types.binder ->
e: FStar.Reflection.Types.env
-> FStar.Tactics.Effect.Tac FStar.InteractiveHelpers.ExploreTerm.typ_or_comp | {
"end_col": 20,
"end_line": 300,
"start_col": 2,
"start_line": 288
} |
FStar.Tactics.Effect.Tac | val abs_free_in : genv -> term -> Tac (list (bv & typ)) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let abs_free_in ge t =
let fvl = free_in t in
let absl = List.rev (genv_abstract_bvs ge) in
let is_free_in_term bv =
Some? (List.Tot.find (bv_eq bv) fvl)
in
let absfree = List.Tot.concatMap
(fun (bv, ty) -> if is_free_in_term bv then [bv,ty] else []) absl
in
absfree | val abs_free_in : genv -> term -> Tac (list (bv & typ))
let abs_free_in ge t = | true | null | false | let fvl = free_in t in
let absl = List.rev (genv_abstract_bvs ge) in
let is_free_in_term bv = Some? (List.Tot.find (bv_eq bv) fvl) in
let absfree =
List.Tot.concatMap (fun (bv, ty) -> if is_free_in_term bv then [bv, ty] else []) absl
in
absfree | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.InteractiveHelpers.Base.genv",
"FStar.Reflection.Types.term",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Reflection.Types.bv",
"FStar.Reflection.Types.typ",
"FStar.List.Tot.Base.concatMap",
"Prims.Cons",
"FStar.Pervasives.Native.Mktuple2",
"Prims.Nil",
"Prims.bool",
"FStar.Pervasives.Native.uu___is_Some",
"Prims.b2t",
"FStar.InteractiveHelpers.Base.bv_eq",
"FStar.List.Tot.Base.find",
"FStar.List.Tot.Base.rev",
"FStar.InteractiveHelpers.Base.genv_abstract_bvs",
"FStar.InteractiveHelpers.ExploreTerm.free_in"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might).
let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag)
// TODO: use more
let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) =
let x1, flag1 = f1 x in
if flag1 = Continue then
f2 x1
else x1, convert_ctrl_flag flag1
// TODO: change the signature to move the dbg flag
val explore_term :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge : genv
(* the list of terms traversed so far (first is most recent) with the environment
* at the time they were traversed *)
-> parents : list (genv & term_view)
-> c : option typ_or_comp
-> t:term ->
Tac (a & ctrl_flag)
val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag)
(* TODO: carry around the list of encompassing terms *)
let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 =
print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0);
let tv0 = inspect t0 in
let x0, flag = f x ge0 pl0 c0 tv0 in
let pl1 = (ge0, tv0) :: pl0 in
if flag = Continue then
begin match tv0 with
| Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue
| Tv_App hd (a,qual) ->
(* Explore the argument - we update the target typ_or_comp when doing so.
* Note that the only way to get the correct target type is to deconstruct
* the type of the head *)
let a_c = safe_arg_typ_or_comp dbg ge0.env hd in
print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^
option_to_string typ_or_comp_to_string a_c);
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in
(* Explore the head - no type information here: we can compute it,
* but it seems useless (or maybe use it only if it is not Total) *)
if flag1 = Continue then
explore_term dbg dfs f x1 ge0 pl1 None hd
else x1, convert_ctrl_flag flag1
| Tv_Abs br body ->
let ge1 = genv_push_binder ge0 br false None in
let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in
explore_term dbg dfs f x0 ge1 pl1 c1 body
| Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *)
| Tv_Type _ -> x0, Continue
| Tv_Refine bv sort ref ->
let bvv = inspect_bv bv in
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in
if flag1 = Continue then
let ge1 = genv_push_bv ge0 bv sort false None in
explore_term dbg dfs f x1 ge1 pl1 None ref
else x1, convert_ctrl_flag flag1
| Tv_Const _ -> x0, Continue
| Tv_Uvar _ _ -> x0, Continue
| Tv_Let recf attrs bv ty def body ->
(* Binding definition exploration - for the target computation: initially we
* used the type of the definition, however it is often unnecessarily complex.
* Now, we use the type of the binder used for the binding. *)
let def_c = Some (TC_Typ ty [] 0) in
let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in
(* Exploration of the following instructions *)
let ge1 = genv_push_bv ge0 bv ty false (Some def) in
let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in
(* Perform the exploration in the proper order *)
let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in
bind_expl x0 expl1 expl2
| Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here
(* Auxiliary function to explore the branches *)
let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)=
let x0, flag = x_flag in
if flag = Continue then
let pat, branch_body = br in
(* Explore the pattern *)
let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in
if flag1 = Continue then
(* Explore the branch body *)
explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body
else x1, convert_ctrl_flag flag1
(* Don't convert the flag *)
else x0, flag
in
(* Explore the scrutinee *)
let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in
let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in
(* Explore the branches *)
fold_left explore_branch x1 branches
| Tv_AscribedT e ty tac _ ->
let c1 = Some (TC_Typ ty [] 0) in
let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in
if flag = Continue then
explore_term dbg dfs #a f x1 ge0 pl1 c1 e
else x1, convert_ctrl_flag flag
| Tv_AscribedC e c1 tac _ ->
(* TODO: explore the comp *)
explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e
| _ ->
(* Unknown *)
x0, Continue
end
else x0, convert_ctrl_flag flag
and explore_pattern dbg dfs #a f x ge0 pat =
print_dbg dbg ("[> explore_pattern:");
match pat with
| Pat_Constant _ -> ge0, x, Continue
| Pat_Cons fv us patterns ->
let explore_pat ge_x_flag pat =
let ge0, x, flag = ge_x_flag in
let pat1, _ = pat in
if flag = Continue then
explore_pattern dbg dfs #a f x ge0 pat1
else
(* Don't convert the flag *)
ge0, x, flag
in
fold_left explore_pat (ge0, x, Continue) patterns
| Pat_Var bv st ->
let ge1 = genv_push_bv ge0 bv (unseal st) false None in
ge1, x, Continue
| Pat_Dot_Term _ -> ge0, x, Continue
(*** Variables in a term *)
/// Returns the list of free variables contained in a term
val free_in : term -> Tac (list bv)
let free_in t =
let same_name (bv1 bv2 : bv) : Tac bool =
name_of_bv bv1 = name_of_bv bv2
in
let update_free (fl:list bv) (ge:genv) (pl:list (genv & term_view))
(c:option typ_or_comp) (tv:term_view) :
Tac (list bv & ctrl_flag) =
match tv with
| Tv_Var bv | Tv_BVar bv ->
(* Check if the binding was not introduced during the traversal *)
begin match genv_get_from_name ge (name_of_bv bv) with
| None ->
(* Check if we didn't already count the binding *)
let fl' = if Tactics.tryFind (same_name bv) fl then fl else bv :: fl in
fl', Continue
| Some _ -> fl, Continue
end
| _ -> fl, Continue
in
let e = top_env () in (* we actually don't care about the environment *)
let ge = mk_genv e [] [] in
List.Tot.rev (fst (explore_term false false update_free [] ge [] None t))
/// Returns the list of abstract variables appearing in a term, in the order in
/// which they were introduced in the context. | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val abs_free_in : genv -> term -> Tac (list (bv & typ)) | [] | FStar.InteractiveHelpers.ExploreTerm.abs_free_in | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ge: FStar.InteractiveHelpers.Base.genv -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac (Prims.list (FStar.Reflection.Types.bv * FStar.Reflection.Types.typ)) | {
"end_col": 9,
"end_line": 607,
"start_col": 22,
"start_line": 598
} |
FStar.Tactics.Effect.Tac | val unfold_until_arrow : env -> typ -> Tac typ | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end | val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 = | true | null | false | if Tv_Arrow? (inspect ty0)
then ty0
else
let ty = norm_term_env e [] ty0 in
let unfold_fv (fv: fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
let ty' = norm_term_env e [delta_only [fvn]] ty in
match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
else ty'
| _ -> ty'
in
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
let hd, args = collect_app ty in
(match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0))
| Tv_Refine bv sort ref -> unfold_until_arrow e sort
| Tv_AscribedT body _ _ _ | Tv_AscribedC body _ _ _ -> unfold_until_arrow e body
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.typ",
"Prims.bool",
"FStar.Reflection.Types.binder",
"FStar.Reflection.Types.comp",
"FStar.Reflection.Types.fv",
"FStar.InteractiveHelpers.ExploreTerm.unfold_until_arrow",
"FStar.Reflection.Types.term",
"FStar.Reflection.V1.Data.argv",
"Prims.list",
"FStar.Reflection.V1.Derived.mk_app",
"FStar.Reflection.V1.Data.term_view",
"FStar.InteractiveHelpers.Base.mfail",
"Prims.string",
"Prims.op_Hat",
"FStar.Tactics.V1.Builtins.term_to_string",
"FStar.Tactics.V1.Builtins.inspect",
"FStar.Pervasives.Native.tuple2",
"FStar.Tactics.V1.SyntaxHelpers.collect_app",
"FStar.Reflection.Types.bv",
"FStar.Pervasives.Native.option",
"Prims.op_Equality",
"FStar.Reflection.V1.Derived.flatten_name",
"FStar.Reflection.V1.Builtins.inspect_fv",
"FStar.Tactics.V1.Builtins.norm_term_env",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.delta_only",
"Prims.Nil",
"FStar.Tactics.V1.Builtins.pack",
"FStar.Reflection.V1.Data.Tv_FVar",
"FStar.Reflection.V1.Data.uu___is_Tv_Arrow"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val unfold_until_arrow : env -> typ -> Tac typ | [
"recursion"
] | FStar.InteractiveHelpers.ExploreTerm.unfold_until_arrow | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | e: FStar.Reflection.Types.env -> ty0: FStar.Reflection.Types.typ
-> FStar.Tactics.Effect.Tac FStar.Reflection.Types.typ | {
"end_col": 7,
"end_line": 251,
"start_col": 2,
"start_line": 207
} |
FStar.Tactics.Effect.Tac | val comp_qualifier (c : comp) : Tac string | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff" | val comp_qualifier (c : comp) : Tac string
let comp_qualifier (c: comp) : Tac string = | true | null | false | match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.comp",
"FStar.Reflection.V1.Builtins.inspect_comp",
"FStar.Reflection.Types.typ",
"FStar.Reflection.Types.term",
"FStar.Reflection.V1.Data.universes",
"FStar.Reflection.Types.name",
"Prims.list",
"FStar.Reflection.V1.Data.argv",
"Prims.string"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1" | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val comp_qualifier (c : comp) : Tac string | [] | FStar.InteractiveHelpers.ExploreTerm.comp_qualifier | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | c: FStar.Reflection.Types.comp -> FStar.Tactics.Effect.Tac Prims.string | {
"end_col": 30,
"end_line": 35,
"start_col": 2,
"start_line": 31
} |
Prims.Tot | val params_of_typ_or_comp (c: typ_or_comp) : list binder | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl | val params_of_typ_or_comp (c: typ_or_comp) : list binder
let params_of_typ_or_comp (c: typ_or_comp) : list binder = | false | null | false | match c with | TC_Typ _ pl _ | TC_Comp _ pl _ -> pl | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Reflection.Types.typ",
"Prims.list",
"FStar.Reflection.Types.binder",
"Prims.nat",
"FStar.Reflection.Types.comp"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp`` | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val params_of_typ_or_comp (c: typ_or_comp) : list binder | [] | FStar.InteractiveHelpers.ExploreTerm.params_of_typ_or_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | c: FStar.InteractiveHelpers.ExploreTerm.typ_or_comp -> Prims.list FStar.Reflection.Types.binder | {
"end_col": 40,
"end_line": 172,
"start_col": 2,
"start_line": 171
} |
Prims.Tot | val get_comp_ret_type : comp -> Tot typ | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit) | val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c = | false | null | false | match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty | C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.Reflection.Types.comp",
"FStar.Reflection.V1.Builtins.inspect_comp",
"FStar.Reflection.Types.typ",
"FStar.Reflection.V1.Data.universes",
"FStar.Reflection.Types.name",
"FStar.Reflection.Types.term",
"Prims.list",
"FStar.Reflection.V1.Data.argv"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val get_comp_ret_type : comp -> Tot typ | [] | FStar.InteractiveHelpers.ExploreTerm.get_comp_ret_type | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | c: FStar.Reflection.Types.comp -> FStar.Reflection.Types.typ | {
"end_col": 28,
"end_line": 127,
"start_col": 2,
"start_line": 124
} |
Prims.Tot | val effect_type_to_string : effect_type -> string | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown" | val effect_type_to_string : effect_type -> string
let effect_type_to_string ety = | false | null | false | match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown" | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.ExploreTerm.effect_type",
"Prims.string"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1" | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val effect_type_to_string : effect_type -> string | [] | FStar.InteractiveHelpers.ExploreTerm.effect_type_to_string | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | ety: FStar.InteractiveHelpers.ExploreTerm.effect_type -> Prims.string | {
"end_col": 28,
"end_line": 54,
"start_col": 2,
"start_line": 46
} |
FStar.Tactics.Effect.Tac | val get_type_info (e: env) (t: term) : Tac (option type_info) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty) | val get_type_info (e: env) (t: term) : Tac (option type_info)
let get_type_info (e: env) (t: term) : Tac (option type_info) = | true | null | false | match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.term",
"FStar.Pervasives.Native.None",
"FStar.InteractiveHelpers.ExploreTerm.type_info",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.Some",
"FStar.InteractiveHelpers.ExploreTerm.get_type_info_from_type",
"FStar.InteractiveHelpers.ExploreTerm.safe_tc"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1" | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val get_type_info (e: env) (t: term) : Tac (option type_info) | [] | FStar.InteractiveHelpers.ExploreTerm.get_type_info | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | e: FStar.Reflection.Types.env -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac
(FStar.Pervasives.Native.option FStar.InteractiveHelpers.ExploreTerm.type_info) | {
"end_col": 48,
"end_line": 113,
"start_col": 2,
"start_line": 111
} |
FStar.Tactics.Effect.Tac | val inst_comp : env -> comp -> list term -> Tac comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl' | val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl = | true | null | false | match tl with
| [] -> c
| t :: tl' ->
let c' =
try inst_comp_once e c t
with
| MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl' | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.comp",
"Prims.list",
"FStar.Reflection.Types.term",
"FStar.InteractiveHelpers.ExploreTerm.inst_comp",
"FStar.Tactics.V1.Derived.try_with",
"Prims.unit",
"FStar.InteractiveHelpers.ExploreTerm.inst_comp_once",
"Prims.exn",
"Prims.string",
"FStar.InteractiveHelpers.Base.mfail",
"Prims.op_Hat",
"FStar.Tactics.Effect.raise"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val inst_comp : env -> comp -> list term -> Tac comp | [
"recursion"
] | FStar.InteractiveHelpers.ExploreTerm.inst_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
e: FStar.Reflection.Types.env ->
c: FStar.Reflection.Types.comp ->
tl: Prims.list FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac FStar.Reflection.Types.comp | {
"end_col": 22,
"end_line": 274,
"start_col": 2,
"start_line": 267
} |
Prims.Tot | val effect_type_is_pure : effect_type -> Tot bool | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false | val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype = | false | null | false | match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.ExploreTerm.effect_type",
"Prims.bool"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val effect_type_is_pure : effect_type -> Tot bool | [] | FStar.InteractiveHelpers.ExploreTerm.effect_type_is_pure | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | etype: FStar.InteractiveHelpers.ExploreTerm.effect_type -> Prims.bool | {
"end_col": 39,
"end_line": 71,
"start_col": 2,
"start_line": 69
} |
Prims.Tot | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort | let convert_ctrl_flag (flag: ctrl_flag) = | false | null | false | match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.Tactics.Types.ctrl_flag",
"FStar.Tactics.Types.Continue",
"FStar.Tactics.Types.Abort"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val convert_ctrl_flag : flag: FStar.Tactics.Types.ctrl_flag -> FStar.Tactics.Types.ctrl_flag | [] | FStar.InteractiveHelpers.ExploreTerm.convert_ctrl_flag | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | flag: FStar.Tactics.Types.ctrl_flag -> FStar.Tactics.Types.ctrl_flag | {
"end_col": 18,
"end_line": 415,
"start_col": 2,
"start_line": 412
} |
|
Prims.Tot | val num_unflushed_of_typ_or_comp (c: typ_or_comp) : nat | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n | val num_unflushed_of_typ_or_comp (c: typ_or_comp) : nat
let num_unflushed_of_typ_or_comp (c: typ_or_comp) : nat = | false | null | false | match c with | TC_Typ _ _ n | TC_Comp _ _ n -> n | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [
"total"
] | [
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Reflection.Types.typ",
"Prims.list",
"FStar.Reflection.Types.binder",
"Prims.nat",
"FStar.Reflection.Types.comp"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl | false | true | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val num_unflushed_of_typ_or_comp (c: typ_or_comp) : nat | [] | FStar.InteractiveHelpers.ExploreTerm.num_unflushed_of_typ_or_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | c: FStar.InteractiveHelpers.ExploreTerm.typ_or_comp -> Prims.nat | {
"end_col": 37,
"end_line": 176,
"start_col": 0,
"start_line": 175
} |
FStar.Tactics.Effect.Tac | val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1) | val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let abs_update_typ_or_comp (b: binder) (c: typ_or_comp) (e: env) : Tac typ_or_comp = | true | null | false | match c with
| TC_Typ v pl n -> TC_Typ v (b :: pl) (n + 1)
| TC_Comp v pl n -> TC_Comp v (b :: pl) (n + 1) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.binder",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.typ",
"Prims.list",
"Prims.nat",
"FStar.InteractiveHelpers.ExploreTerm.TC_Typ",
"Prims.Cons",
"Prims.op_Addition",
"FStar.Reflection.Types.comp",
"FStar.InteractiveHelpers.ExploreTerm.TC_Comp"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp | [] | FStar.InteractiveHelpers.ExploreTerm.abs_update_typ_or_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: FStar.Reflection.Types.binder ->
c: FStar.InteractiveHelpers.ExploreTerm.typ_or_comp ->
e: FStar.Reflection.Types.env
-> FStar.Tactics.Effect.Tac FStar.InteractiveHelpers.ExploreTerm.typ_or_comp | {
"end_col": 45,
"end_line": 312,
"start_col": 2,
"start_line": 303
} |
FStar.Tactics.Effect.Tac | val bind_expl (#a: Type) (x: a) (f1 f2: (a -> Tac (a & ctrl_flag))) : Tac (a & ctrl_flag) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) =
let x1, flag1 = f1 x in
if flag1 = Continue then
f2 x1
else x1, convert_ctrl_flag flag1 | val bind_expl (#a: Type) (x: a) (f1 f2: (a -> Tac (a & ctrl_flag))) : Tac (a & ctrl_flag)
let bind_expl (#a: Type) (x: a) (f1 f2: (a -> Tac (a & ctrl_flag))) : Tac (a & ctrl_flag) = | true | null | false | let x1, flag1 = f1 x in
if flag1 = Continue then f2 x1 else x1, convert_ctrl_flag flag1 | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Pervasives.Native.tuple2",
"FStar.Tactics.Types.ctrl_flag",
"Prims.op_Equality",
"FStar.Tactics.Types.Continue",
"Prims.bool",
"FStar.Pervasives.Native.Mktuple2",
"FStar.InteractiveHelpers.ExploreTerm.convert_ctrl_flag"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might).
let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag) | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val bind_expl (#a: Type) (x: a) (f1 f2: (a -> Tac (a & ctrl_flag))) : Tac (a & ctrl_flag) | [] | FStar.InteractiveHelpers.ExploreTerm.bind_expl | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
x: a ->
f1: (_: a -> FStar.Tactics.Effect.Tac (a * FStar.Tactics.Types.ctrl_flag)) ->
f2: (_: a -> FStar.Tactics.Effect.Tac (a * FStar.Tactics.Types.ctrl_flag))
-> FStar.Tactics.Effect.Tac (a * FStar.Tactics.Types.ctrl_flag) | {
"end_col": 34,
"end_line": 438,
"start_col": 92,
"start_line": 434
} |
FStar.Tactics.Effect.Tac | val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err | val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e = | true | null | false | match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with
| MetaAnalysis msg -> None
| err -> raise err | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.binder",
"FStar.Pervasives.Native.option",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Reflection.Types.env",
"FStar.Pervasives.Native.None",
"FStar.Tactics.V1.Derived.try_with",
"Prims.unit",
"FStar.Pervasives.Native.Some",
"FStar.InteractiveHelpers.ExploreTerm.abs_update_typ_or_comp",
"Prims.exn",
"Prims.string",
"FStar.Tactics.Effect.raise"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp) | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp) | [] | FStar.InteractiveHelpers.ExploreTerm.abs_update_opt_typ_or_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
b: FStar.Reflection.Types.binder ->
opt_c: FStar.Pervasives.Native.option FStar.InteractiveHelpers.ExploreTerm.typ_or_comp ->
e: FStar.Reflection.Types.env
-> FStar.Tactics.Effect.Tac
(FStar.Pervasives.Native.option FStar.InteractiveHelpers.ExploreTerm.typ_or_comp) | {
"end_col": 27,
"end_line": 324,
"start_col": 2,
"start_line": 317
} |
FStar.Tactics.Effect.Tac | val typ_or_comp_to_string (tyc: typ_or_comp) : Tac string | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed | val typ_or_comp_to_string (tyc: typ_or_comp) : Tac string
let typ_or_comp_to_string (tyc: typ_or_comp) : Tac string = | true | null | false | match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^
term_to_string v ^
") " ^ list_to_string (fun b -> name_of_binder b) pl ^ " " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^
acomp_to_string c ^
") " ^ list_to_string (fun b -> name_of_binder b) pl ^ " " ^ string_of_int num_unflushed | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Reflection.Types.typ",
"Prims.list",
"FStar.Reflection.Types.binder",
"Prims.nat",
"Prims.op_Hat",
"Prims.string",
"Prims.string_of_int",
"FStar.InteractiveHelpers.Base.list_to_string",
"FStar.Tactics.V1.Derived.name_of_binder",
"FStar.Tactics.V1.Builtins.term_to_string",
"FStar.Reflection.Types.comp",
"FStar.InteractiveHelpers.Base.acomp_to_string"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val typ_or_comp_to_string (tyc: typ_or_comp) : Tac string | [] | FStar.InteractiveHelpers.ExploreTerm.typ_or_comp_to_string | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | tyc: FStar.InteractiveHelpers.ExploreTerm.typ_or_comp -> FStar.Tactics.Effect.Tac Prims.string | {
"end_col": 37,
"end_line": 167,
"start_col": 2,
"start_line": 161
} |
FStar.Tactics.Effect.Tac | val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 =
print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0);
let tv0 = inspect t0 in
let x0, flag = f x ge0 pl0 c0 tv0 in
let pl1 = (ge0, tv0) :: pl0 in
if flag = Continue then
begin match tv0 with
| Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue
| Tv_App hd (a,qual) ->
(* Explore the argument - we update the target typ_or_comp when doing so.
* Note that the only way to get the correct target type is to deconstruct
* the type of the head *)
let a_c = safe_arg_typ_or_comp dbg ge0.env hd in
print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^
option_to_string typ_or_comp_to_string a_c);
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in
(* Explore the head - no type information here: we can compute it,
* but it seems useless (or maybe use it only if it is not Total) *)
if flag1 = Continue then
explore_term dbg dfs f x1 ge0 pl1 None hd
else x1, convert_ctrl_flag flag1
| Tv_Abs br body ->
let ge1 = genv_push_binder ge0 br false None in
let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in
explore_term dbg dfs f x0 ge1 pl1 c1 body
| Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *)
| Tv_Type _ -> x0, Continue
| Tv_Refine bv sort ref ->
let bvv = inspect_bv bv in
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in
if flag1 = Continue then
let ge1 = genv_push_bv ge0 bv sort false None in
explore_term dbg dfs f x1 ge1 pl1 None ref
else x1, convert_ctrl_flag flag1
| Tv_Const _ -> x0, Continue
| Tv_Uvar _ _ -> x0, Continue
| Tv_Let recf attrs bv ty def body ->
(* Binding definition exploration - for the target computation: initially we
* used the type of the definition, however it is often unnecessarily complex.
* Now, we use the type of the binder used for the binding. *)
let def_c = Some (TC_Typ ty [] 0) in
let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in
(* Exploration of the following instructions *)
let ge1 = genv_push_bv ge0 bv ty false (Some def) in
let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in
(* Perform the exploration in the proper order *)
let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in
bind_expl x0 expl1 expl2
| Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here
(* Auxiliary function to explore the branches *)
let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)=
let x0, flag = x_flag in
if flag = Continue then
let pat, branch_body = br in
(* Explore the pattern *)
let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in
if flag1 = Continue then
(* Explore the branch body *)
explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body
else x1, convert_ctrl_flag flag1
(* Don't convert the flag *)
else x0, flag
in
(* Explore the scrutinee *)
let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in
let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in
(* Explore the branches *)
fold_left explore_branch x1 branches
| Tv_AscribedT e ty tac _ ->
let c1 = Some (TC_Typ ty [] 0) in
let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in
if flag = Continue then
explore_term dbg dfs #a f x1 ge0 pl1 c1 e
else x1, convert_ctrl_flag flag
| Tv_AscribedC e c1 tac _ ->
(* TODO: explore the comp *)
explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e
| _ ->
(* Unknown *)
x0, Continue
end
else x0, convert_ctrl_flag flag
and explore_pattern dbg dfs #a f x ge0 pat =
print_dbg dbg ("[> explore_pattern:");
match pat with
| Pat_Constant _ -> ge0, x, Continue
| Pat_Cons fv us patterns ->
let explore_pat ge_x_flag pat =
let ge0, x, flag = ge_x_flag in
let pat1, _ = pat in
if flag = Continue then
explore_pattern dbg dfs #a f x ge0 pat1
else
(* Don't convert the flag *)
ge0, x, flag
in
fold_left explore_pat (ge0, x, Continue) patterns
| Pat_Var bv st ->
let ge1 = genv_push_bv ge0 bv (unseal st) false None in
ge1, x, Continue
| Pat_Dot_Term _ -> ge0, x, Continue | val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag)
let rec explore_pattern dbg dfs #a f x ge0 pat = | true | null | false | print_dbg dbg ("[> explore_pattern:");
match pat with
| Pat_Constant _ -> ge0, x, Continue
| Pat_Cons fv us patterns ->
let explore_pat ge_x_flag pat =
let ge0, x, flag = ge_x_flag in
let pat1, _ = pat in
if flag = Continue then explore_pattern dbg dfs #a f x ge0 pat1 else ge0, x, flag
in
fold_left explore_pat (ge0, x, Continue) patterns
| Pat_Var bv st ->
let ge1 = genv_push_bv ge0 bv (unseal st) false None in
ge1, x, Continue
| Pat_Dot_Term _ -> ge0, x, Continue | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"Prims.bool",
"FStar.InteractiveHelpers.ExploreTerm.explorer",
"FStar.InteractiveHelpers.Base.genv",
"FStar.Reflection.V1.Data.pattern",
"FStar.Reflection.V1.Data.vconst",
"FStar.Pervasives.Native.Mktuple3",
"FStar.Tactics.Types.ctrl_flag",
"FStar.Tactics.Types.Continue",
"FStar.Pervasives.Native.tuple3",
"FStar.Reflection.Types.fv",
"FStar.Pervasives.Native.option",
"FStar.Reflection.V1.Data.universes",
"Prims.list",
"FStar.Pervasives.Native.tuple2",
"FStar.Tactics.Util.fold_left",
"Prims.op_Equality",
"FStar.InteractiveHelpers.ExploreTerm.explore_pattern",
"FStar.Reflection.Types.bv",
"FStar.Sealed.sealed",
"FStar.Reflection.Types.typ",
"FStar.InteractiveHelpers.Base.genv_push_bv",
"FStar.Pervasives.Native.None",
"FStar.Reflection.Types.term",
"FStar.Tactics.Unseal.unseal",
"Prims.unit",
"FStar.InteractiveHelpers.Base.print_dbg"
] | [
"explore_term",
"explore_pattern"
] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might).
let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag)
// TODO: use more
let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) =
let x1, flag1 = f1 x in
if flag1 = Continue then
f2 x1
else x1, convert_ctrl_flag flag1
// TODO: change the signature to move the dbg flag
val explore_term :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge : genv
(* the list of terms traversed so far (first is most recent) with the environment
* at the time they were traversed *)
-> parents : list (genv & term_view)
-> c : option typ_or_comp
-> t:term ->
Tac (a & ctrl_flag)
val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag)
(* TODO: carry around the list of encompassing terms *)
let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 =
print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0);
let tv0 = inspect t0 in
let x0, flag = f x ge0 pl0 c0 tv0 in
let pl1 = (ge0, tv0) :: pl0 in
if flag = Continue then
begin match tv0 with
| Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue
| Tv_App hd (a,qual) ->
(* Explore the argument - we update the target typ_or_comp when doing so.
* Note that the only way to get the correct target type is to deconstruct
* the type of the head *)
let a_c = safe_arg_typ_or_comp dbg ge0.env hd in
print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^
option_to_string typ_or_comp_to_string a_c);
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in
(* Explore the head - no type information here: we can compute it,
* but it seems useless (or maybe use it only if it is not Total) *)
if flag1 = Continue then
explore_term dbg dfs f x1 ge0 pl1 None hd
else x1, convert_ctrl_flag flag1
| Tv_Abs br body ->
let ge1 = genv_push_binder ge0 br false None in
let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in
explore_term dbg dfs f x0 ge1 pl1 c1 body
| Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *)
| Tv_Type _ -> x0, Continue
| Tv_Refine bv sort ref ->
let bvv = inspect_bv bv in
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in
if flag1 = Continue then
let ge1 = genv_push_bv ge0 bv sort false None in
explore_term dbg dfs f x1 ge1 pl1 None ref
else x1, convert_ctrl_flag flag1
| Tv_Const _ -> x0, Continue
| Tv_Uvar _ _ -> x0, Continue
| Tv_Let recf attrs bv ty def body ->
(* Binding definition exploration - for the target computation: initially we
* used the type of the definition, however it is often unnecessarily complex.
* Now, we use the type of the binder used for the binding. *)
let def_c = Some (TC_Typ ty [] 0) in
let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in
(* Exploration of the following instructions *)
let ge1 = genv_push_bv ge0 bv ty false (Some def) in
let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in
(* Perform the exploration in the proper order *)
let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in
bind_expl x0 expl1 expl2
| Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here
(* Auxiliary function to explore the branches *)
let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)=
let x0, flag = x_flag in
if flag = Continue then
let pat, branch_body = br in
(* Explore the pattern *)
let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in
if flag1 = Continue then
(* Explore the branch body *)
explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body
else x1, convert_ctrl_flag flag1
(* Don't convert the flag *)
else x0, flag
in
(* Explore the scrutinee *)
let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in
let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in
(* Explore the branches *)
fold_left explore_branch x1 branches
| Tv_AscribedT e ty tac _ ->
let c1 = Some (TC_Typ ty [] 0) in
let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in
if flag = Continue then
explore_term dbg dfs #a f x1 ge0 pl1 c1 e
else x1, convert_ctrl_flag flag
| Tv_AscribedC e c1 tac _ ->
(* TODO: explore the comp *)
explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e
| _ ->
(* Unknown *)
x0, Continue
end
else x0, convert_ctrl_flag flag | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag) | [
"mutual recursion"
] | FStar.InteractiveHelpers.ExploreTerm.explore_pattern | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
dbg: Prims.bool ->
dfs: Prims.bool ->
f: FStar.InteractiveHelpers.ExploreTerm.explorer a ->
x: a ->
ge: FStar.InteractiveHelpers.Base.genv ->
pat: FStar.Reflection.V1.Data.pattern
-> FStar.Tactics.Effect.Tac
((FStar.InteractiveHelpers.Base.genv * a) * FStar.Tactics.Types.ctrl_flag) | {
"end_col": 38,
"end_line": 567,
"start_col": 2,
"start_line": 550
} |
FStar.Tactics.Effect.Tac | val _flush_typ_or_comp_comp
(dbg: bool)
(e: env)
(rem: list binder)
(inst: list ((bv & typ) & term))
(c: comp)
: Tac comp | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem) | val _flush_typ_or_comp_comp
(dbg: bool)
(e: env)
(rem: list binder)
(inst: list ((bv & typ) & term))
(c: comp)
: Tac comp
let rec _flush_typ_or_comp_comp
(dbg: bool)
(e: env)
(rem: list binder)
(inst: list ((bv & typ) & term))
(c: comp)
: Tac comp = | true | null | false | let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] -> flush c inst
| b :: rem' ->
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg
e
rem'
(((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b))) :: inst)
c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^
acomp_to_string c ^ "\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem
) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"Prims.bool",
"FStar.Reflection.Types.env",
"Prims.list",
"FStar.Reflection.Types.binder",
"FStar.Pervasives.Native.tuple2",
"FStar.Reflection.Types.bv",
"FStar.Reflection.Types.typ",
"FStar.Reflection.Types.term",
"FStar.Reflection.Types.comp",
"FStar.InteractiveHelpers.ExploreTerm._flush_typ_or_comp_comp",
"Prims.Cons",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Tactics.V1.Builtins.pack",
"FStar.Reflection.V1.Data.Tv_Var",
"FStar.Reflection.V1.Derived.bv_of_binder",
"FStar.Tactics.V1.Derived.binder_sort",
"FStar.Reflection.V1.Data.term_view",
"FStar.InteractiveHelpers.Base.mfail",
"Prims.string",
"Prims.op_Hat",
"FStar.InteractiveHelpers.Base.list_to_string",
"FStar.Tactics.V1.Derived.name_of_binder",
"FStar.InteractiveHelpers.Base.acomp_to_string",
"FStar.Tactics.V1.Builtins.inspect",
"Prims.Nil",
"FStar.InteractiveHelpers.ExploreTerm.get_comp_ret_type",
"FStar.Reflection.V1.Data.uu___is_Tv_Arrow",
"FStar.InteractiveHelpers.Base.apply_subst_in_comp",
"FStar.List.Tot.Base.rev"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val _flush_typ_or_comp_comp
(dbg: bool)
(e: env)
(rem: list binder)
(inst: list ((bv & typ) & term))
(c: comp)
: Tac comp | [
"recursion"
] | FStar.InteractiveHelpers.ExploreTerm._flush_typ_or_comp_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} |
dbg: Prims.bool ->
e: FStar.Reflection.Types.env ->
rem: Prims.list FStar.Reflection.Types.binder ->
inst:
Prims.list ((FStar.Reflection.Types.bv * FStar.Reflection.Types.typ) *
FStar.Reflection.Types.term) ->
c: FStar.Reflection.Types.comp
-> FStar.Tactics.Effect.Tac FStar.Reflection.Types.comp | {
"end_col": 86,
"end_line": 359,
"start_col": 53,
"start_line": 337
} |
FStar.Tactics.Effect.Tac | val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)}) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0) | val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t = | true | null | false | match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^ "\n-term: " ^ term_to_string t ^ "\n-comp: None");
None
| Some c ->
print_dbg dbg
("[> safe_typ_or_comp:" ^ "\n-term: " ^ term_to_string t ^ "\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"Prims.bool",
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.term",
"FStar.Pervasives.Native.None",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Pervasives.Native.option",
"Prims.l_imp",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.InteractiveHelpers.ExploreTerm.uu___is_TC_Comp",
"FStar.Pervasives.Native.__proj__Some__item__v",
"Prims.unit",
"FStar.InteractiveHelpers.Base.print_dbg",
"Prims.string",
"Prims.op_Hat",
"FStar.Tactics.V1.Builtins.term_to_string",
"FStar.Reflection.Types.comp",
"FStar.Pervasives.Native.Some",
"FStar.InteractiveHelpers.ExploreTerm.TC_Comp",
"Prims.Nil",
"FStar.Reflection.Types.binder",
"FStar.InteractiveHelpers.Base.acomp_to_string",
"FStar.InteractiveHelpers.ExploreTerm.safe_tcc"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)}) | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)}) | [] | FStar.InteractiveHelpers.ExploreTerm.safe_typ_or_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | dbg: Prims.bool -> e: FStar.Reflection.Types.env -> t: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac
(opt:
FStar.Pervasives.Native.option FStar.InteractiveHelpers.ExploreTerm.typ_or_comp
{Some? opt ==> TC_Comp? (Some?.v opt)}) | {
"end_col": 25,
"end_line": 193,
"start_col": 2,
"start_line": 183
} |
FStar.Tactics.Effect.Tac | val free_in : term -> Tac (list bv) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let free_in t =
let same_name (bv1 bv2 : bv) : Tac bool =
name_of_bv bv1 = name_of_bv bv2
in
let update_free (fl:list bv) (ge:genv) (pl:list (genv & term_view))
(c:option typ_or_comp) (tv:term_view) :
Tac (list bv & ctrl_flag) =
match tv with
| Tv_Var bv | Tv_BVar bv ->
(* Check if the binding was not introduced during the traversal *)
begin match genv_get_from_name ge (name_of_bv bv) with
| None ->
(* Check if we didn't already count the binding *)
let fl' = if Tactics.tryFind (same_name bv) fl then fl else bv :: fl in
fl', Continue
| Some _ -> fl, Continue
end
| _ -> fl, Continue
in
let e = top_env () in (* we actually don't care about the environment *)
let ge = mk_genv e [] [] in
List.Tot.rev (fst (explore_term false false update_free [] ge [] None t)) | val free_in : term -> Tac (list bv)
let free_in t = | true | null | false | let same_name (bv1 bv2: bv) : Tac bool = name_of_bv bv1 = name_of_bv bv2 in
let update_free
(fl: list bv)
(ge: genv)
(pl: list (genv & term_view))
(c: option typ_or_comp)
(tv: term_view)
: Tac (list bv & ctrl_flag) =
match tv with
| Tv_Var bv
| Tv_BVar bv ->
(match genv_get_from_name ge (name_of_bv bv) with
| None ->
let fl' = if Tactics.tryFind (same_name bv) fl then fl else bv :: fl in
fl', Continue
| Some _ -> fl, Continue)
| _ -> fl, Continue
in
let e = top_env () in
let ge = mk_genv e [] [] in
List.Tot.rev (fst (explore_term false false update_free [] ge [] None t)) | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"FStar.Reflection.Types.term",
"FStar.List.Tot.Base.rev",
"FStar.Reflection.Types.bv",
"Prims.list",
"FStar.Pervasives.Native.fst",
"FStar.Tactics.Types.ctrl_flag",
"FStar.Pervasives.Native.tuple2",
"FStar.InteractiveHelpers.ExploreTerm.explore_term",
"Prims.Nil",
"FStar.InteractiveHelpers.Base.genv",
"FStar.Reflection.V1.Data.term_view",
"FStar.Pervasives.Native.None",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.InteractiveHelpers.Base.mk_genv",
"FStar.Pervasives.Native.tuple3",
"FStar.Reflection.Types.typ",
"Prims.bool",
"FStar.Reflection.Types.env",
"FStar.Tactics.V1.Builtins.top_env",
"FStar.Pervasives.Native.option",
"FStar.Pervasives.Native.Mktuple2",
"FStar.Tactics.Types.Continue",
"Prims.Cons",
"FStar.Tactics.Util.tryFind",
"FStar.InteractiveHelpers.Base.genv_get_from_name",
"Prims.string",
"FStar.Tactics.V1.Derived.name_of_bv",
"Prims.op_Equality"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None
/// Exploring a term
(*** Term exploration *)
/// Explore a term, correctly updating the environment when traversing abstractions
let convert_ctrl_flag (flag : ctrl_flag) =
match flag with
| Continue -> Continue
| Skip -> Continue
| Abort -> Abort
/// TODO: for now I need to use universe 0 for type a because otherwise it doesn't
/// type check
/// ctrl_flag:
/// - Continue: continue exploring the term
/// - Skip: don't explore the sub-terms of this term
/// - Abort: stop exploration
/// TODO: we might want a more precise control (like: don't explore the type of the
/// ascription but explore its body)
/// Note that ``explore_term`` doesn't use the environment parameter besides pushing
/// binders and passing it to ``f``, which means that you can give it arbitrary
/// environments, ``explore_term`` itself won't fail (but the passed function might).
let explorer (a : Type) =
a -> genv -> list (genv & term_view) -> option typ_or_comp -> term_view ->
Tac (a & ctrl_flag)
// TODO: use more
let bind_expl (#a : Type) (x : a) (f1 f2 : a -> Tac (a & ctrl_flag)) : Tac (a & ctrl_flag) =
let x1, flag1 = f1 x in
if flag1 = Continue then
f2 x1
else x1, convert_ctrl_flag flag1
// TODO: change the signature to move the dbg flag
val explore_term :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge : genv
(* the list of terms traversed so far (first is most recent) with the environment
* at the time they were traversed *)
-> parents : list (genv & term_view)
-> c : option typ_or_comp
-> t:term ->
Tac (a & ctrl_flag)
val explore_pattern :
dbg : bool
-> dfs : bool (* depth-first search *)
-> #a : Type0
-> f : explorer a
-> x : a
-> ge:genv
-> pat:pattern ->
Tac (genv & a & ctrl_flag)
(* TODO: carry around the list of encompassing terms *)
let rec explore_term dbg dfs #a f x ge0 pl0 c0 t0 =
print_dbg dbg ("[> explore_term: " ^ term_construct t0 ^ ":\n" ^ term_to_string t0);
let tv0 = inspect t0 in
let x0, flag = f x ge0 pl0 c0 tv0 in
let pl1 = (ge0, tv0) :: pl0 in
if flag = Continue then
begin match tv0 with
| Tv_Var _ | Tv_BVar _ | Tv_FVar _ -> x0, Continue
| Tv_App hd (a,qual) ->
(* Explore the argument - we update the target typ_or_comp when doing so.
* Note that the only way to get the correct target type is to deconstruct
* the type of the head *)
let a_c = safe_arg_typ_or_comp dbg ge0.env hd in
print_dbg dbg ("Tv_App: updated target typ_or_comp to:\n" ^
option_to_string typ_or_comp_to_string a_c);
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 a_c a in
(* Explore the head - no type information here: we can compute it,
* but it seems useless (or maybe use it only if it is not Total) *)
if flag1 = Continue then
explore_term dbg dfs f x1 ge0 pl1 None hd
else x1, convert_ctrl_flag flag1
| Tv_Abs br body ->
let ge1 = genv_push_binder ge0 br false None in
let c1 = abs_update_opt_typ_or_comp br c0 ge1.env in
explore_term dbg dfs f x0 ge1 pl1 c1 body
| Tv_Arrow br c0 -> x0, Continue (* TODO: we might want to explore that *)
| Tv_Type _ -> x0, Continue
| Tv_Refine bv sort ref ->
let bvv = inspect_bv bv in
let x1, flag1 = explore_term dbg dfs f x0 ge0 pl1 None sort in
if flag1 = Continue then
let ge1 = genv_push_bv ge0 bv sort false None in
explore_term dbg dfs f x1 ge1 pl1 None ref
else x1, convert_ctrl_flag flag1
| Tv_Const _ -> x0, Continue
| Tv_Uvar _ _ -> x0, Continue
| Tv_Let recf attrs bv ty def body ->
(* Binding definition exploration - for the target computation: initially we
* used the type of the definition, however it is often unnecessarily complex.
* Now, we use the type of the binder used for the binding. *)
let def_c = Some (TC_Typ ty [] 0) in
let explore_def x = explore_term dbg dfs f x ge0 pl1 def_c def in
(* Exploration of the following instructions *)
let ge1 = genv_push_bv ge0 bv ty false (Some def) in
let explore_next x = explore_term dbg dfs f x ge1 pl1 c0 body in
(* Perform the exploration in the proper order *)
let expl1, expl2 = if dfs then explore_next, explore_def else explore_def, explore_next in
bind_expl x0 expl1 expl2
| Tv_Match scrutinee _ret_opt branches -> //AR: TODO: need to account for returns annotation here
(* Auxiliary function to explore the branches *)
let explore_branch (x_flag : a & ctrl_flag) (br : branch) : Tac (a & ctrl_flag)=
let x0, flag = x_flag in
if flag = Continue then
let pat, branch_body = br in
(* Explore the pattern *)
let ge1, x1, flag1 = explore_pattern dbg dfs #a f x0 ge0 pat in
if flag1 = Continue then
(* Explore the branch body *)
explore_term dbg dfs #a f x1 ge1 pl1 c0 branch_body
else x1, convert_ctrl_flag flag1
(* Don't convert the flag *)
else x0, flag
in
(* Explore the scrutinee *)
let scrut_c = safe_typ_or_comp dbg ge0.env scrutinee in
let x1 = explore_term dbg dfs #a f x0 ge0 pl1 scrut_c scrutinee in
(* Explore the branches *)
fold_left explore_branch x1 branches
| Tv_AscribedT e ty tac _ ->
let c1 = Some (TC_Typ ty [] 0) in
let x1, flag = explore_term dbg dfs #a f x0 ge0 pl1 None ty in
if flag = Continue then
explore_term dbg dfs #a f x1 ge0 pl1 c1 e
else x1, convert_ctrl_flag flag
| Tv_AscribedC e c1 tac _ ->
(* TODO: explore the comp *)
explore_term dbg dfs #a f x0 ge0 pl1 (Some (TC_Comp c1 [] 0)) e
| _ ->
(* Unknown *)
x0, Continue
end
else x0, convert_ctrl_flag flag
and explore_pattern dbg dfs #a f x ge0 pat =
print_dbg dbg ("[> explore_pattern:");
match pat with
| Pat_Constant _ -> ge0, x, Continue
| Pat_Cons fv us patterns ->
let explore_pat ge_x_flag pat =
let ge0, x, flag = ge_x_flag in
let pat1, _ = pat in
if flag = Continue then
explore_pattern dbg dfs #a f x ge0 pat1
else
(* Don't convert the flag *)
ge0, x, flag
in
fold_left explore_pat (ge0, x, Continue) patterns
| Pat_Var bv st ->
let ge1 = genv_push_bv ge0 bv (unseal st) false None in
ge1, x, Continue
| Pat_Dot_Term _ -> ge0, x, Continue
(*** Variables in a term *)
/// Returns the list of free variables contained in a term | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val free_in : term -> Tac (list bv) | [] | FStar.InteractiveHelpers.ExploreTerm.free_in | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | t: FStar.Reflection.Types.term -> FStar.Tactics.Effect.Tac (Prims.list FStar.Reflection.Types.bv) | {
"end_col": 75,
"end_line": 593,
"start_col": 15,
"start_line": 572
} |
FStar.Tactics.Effect.Tac | val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)}) | [
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers.Base",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Tactics",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.List",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.InteractiveHelpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let safe_arg_typ_or_comp dbg e hd =
print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty) then
begin
print_dbg dbg "no need to unfold the type";
ty
end
else
begin
print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : "^ term_to_string ty);
ty
end
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None | val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)})
let safe_arg_typ_or_comp dbg e hd = | true | null | false | print_dbg dbg ("safe_arg_typ_or_comp: " ^ term_to_string hd);
match safe_tc e hd with
| None -> None
| Some ty ->
print_dbg dbg ("hd type: " ^ term_to_string ty);
let ty =
if Tv_Arrow? (inspect ty)
then
(print_dbg dbg "no need to unfold the type";
ty)
else
(print_dbg dbg "need to unfold the type";
let ty = unfold_until_arrow e ty in
print_dbg dbg ("result of unfolding : " ^ term_to_string ty);
ty)
in
match inspect ty with
| Tv_Arrow b c -> Some (TC_Typ (type_of_binder b) [] 0)
| _ -> None | {
"checked_file": "FStar.InteractiveHelpers.ExploreTerm.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Tactics.fst.checked",
"FStar.Reflection.Const.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.List.Tot.fst.checked",
"FStar.List.fst.checked",
"FStar.InteractiveHelpers.Base.fst.checked"
],
"interface_file": false,
"source_file": "FStar.InteractiveHelpers.ExploreTerm.fst"
} | [] | [
"Prims.bool",
"FStar.Reflection.Types.env",
"FStar.Reflection.Types.term",
"FStar.Pervasives.Native.None",
"FStar.InteractiveHelpers.ExploreTerm.typ_or_comp",
"FStar.Pervasives.Native.option",
"Prims.l_imp",
"Prims.b2t",
"FStar.Pervasives.Native.uu___is_Some",
"FStar.InteractiveHelpers.ExploreTerm.uu___is_TC_Typ",
"FStar.Pervasives.Native.__proj__Some__item__v",
"FStar.Reflection.Types.binder",
"FStar.Reflection.Types.comp",
"FStar.Pervasives.Native.Some",
"FStar.InteractiveHelpers.ExploreTerm.TC_Typ",
"FStar.Reflection.V1.Derived.type_of_binder",
"Prims.Nil",
"FStar.Reflection.V1.Data.term_view",
"FStar.Tactics.V1.Builtins.inspect",
"Prims.unit",
"FStar.InteractiveHelpers.Base.print_dbg",
"Prims.string",
"Prims.op_Hat",
"FStar.Tactics.V1.Builtins.term_to_string",
"FStar.Reflection.Types.typ",
"FStar.InteractiveHelpers.ExploreTerm.unfold_until_arrow",
"FStar.Reflection.V1.Data.uu___is_Tv_Arrow",
"FStar.InteractiveHelpers.ExploreTerm.safe_tc"
] | [] | module FStar.InteractiveHelpers.ExploreTerm
open FStar.List
open FStar.Tactics
open FStar.Mul
open FStar.InteractiveHelpers.Base
#push-options "--z3rlimit 15 --fuel 0 --ifuel 1"
(*** Types and effects *)
/// Define utilities to handle and carry types and effects
(**** Type analysis *)
/// Retrieve and deconstruct a type/effect
/// Some constants
//let prims_true_qn = "Prims.l_True"
//let prims_true_term = `Prims.l_True
let pure_effect_qn = "Prims.PURE"
let pure_hoare_effect_qn = "Prims.Pure"
let stack_effect_qn = "FStar.HyperStack.ST.Stack"
let st_effect_qn = "FStar.HyperStack.ST.ST"
/// Return the qualifier of a comp as a string
val comp_qualifier (c : comp) : Tac string
#push-options "--ifuel 1"
let comp_qualifier (c : comp) : Tac string =
match inspect_comp c with
| C_Total _ -> "C_Total"
| C_GTotal _ -> "C_GTotal"
| C_Lemma _ _ _ -> "C_Lemma"
| C_Eff _ _ _ _ _ -> "C_Eff"
#pop-options
/// Effect information: we list the current supported effects
type effect_type =
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure | E_Stack | E_ST | E_Unknown
val effect_type_to_string : effect_type -> string
#push-options "--ifuel 1"
let effect_type_to_string ety =
match ety with
| E_Total -> "E_Total"
| E_GTotal -> "E_GTotal"
| E_Lemma -> "E_Lemma"
| E_PURE -> "E_PURE"
| E_Pure -> "E_Pure"
| E_Stack -> "E_Stack"
| E_ST -> "E_ST"
| E_Unknown -> "E_Unknown"
#pop-options
val effect_name_to_type (ename : name) : Tot effect_type
let effect_name_to_type (ename : name) : Tot effect_type =
let ename = flatten_name ename in
if ename = pure_effect_qn then E_PURE
else if ename = pure_hoare_effect_qn then E_Pure
else if ename = stack_effect_qn then E_Stack
else if ename = st_effect_qn then E_ST
else E_Unknown
val effect_type_is_pure : effect_type -> Tot bool
let effect_type_is_pure etype =
match etype with
| E_Total | E_GTotal | E_Lemma | E_PURE | E_Pure -> true
| E_Stack | E_ST | E_Unknown -> false
/// Type information
noeq type type_info = {
ty : typ; (* the type without refinement *)
refin : option term;
}
let mk_type_info = Mktype_info
val type_info_to_string : type_info -> Tac string
let type_info_to_string info =
"Mktype_info (" ^
term_to_string info.ty ^ ") (" ^
option_to_string term_to_string info.refin ^ ")"
let unit_type_info = mk_type_info (`unit) None
val safe_tc (e:env) (t:term) : Tac (option term)
let safe_tc e t =
try Some (tc e t) with | _ -> None
val safe_tcc (e:env) (t:term) : Tac (option comp)
let safe_tcc e t =
try Some (tcc e t) with | _ -> None
let get_type_info_from_type (ty:typ) : Tac type_info =
match inspect ty with
| Tv_Refine bv sort refin ->
let raw_type = prettify_term false sort in
let b : binder = mk_binder bv sort in
let refin = prettify_term false refin in
let refin = pack (Tv_Abs b refin) in
mk_type_info raw_type (Some refin)
| _ ->
let ty = prettify_term false ty in
mk_type_info ty None
#push-options "--ifuel 1"
let get_type_info (e:env) (t:term) : Tac (option type_info) =
match safe_tc e t with
| None -> None
| Some ty -> Some (get_type_info_from_type ty)
#pop-options
val get_total_or_gtotal_ret_type : comp -> Tot (option typ)
let get_total_or_gtotal_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty -> Some ret_ty
| _ -> None
val get_comp_ret_type : comp -> Tot typ
let get_comp_ret_type c =
match inspect_comp c with
| C_Total ret_ty | C_GTotal ret_ty
| C_Eff _ _ ret_ty _ _ -> ret_ty
| C_Lemma _ _ _ -> (`unit)
val is_total_or_gtotal : comp -> Tot bool
let is_total_or_gtotal c =
Some? (get_total_or_gtotal_ret_type c)
val is_unit_type : typ -> Tac bool
let is_unit_type ty =
match inspect ty with
| Tv_FVar fv -> fv_eq_name fv Reflection.Const.unit_lid
| _ -> false
(**** typ_or_comp *)
/// This type is used to store typing information.
/// We use it mostly to track what the target type/computation is for a term,
/// while exploring this term. It is especially useful to generate post-conditions,
/// for example. We store the list of abstractions encountered so far at the
/// same time.
/// Note that in order to keep track of the type correctly, whenever we encounter
/// an abstraction in the term, we need to check that the term' type is an arrow,
/// in which case we need to do a substitution (the arrow takes as first parameter
/// which is not the same as the abstraction's binder). As the substitution is costly
/// (we do it by using the normalizer, but the "final" return term is the whole
/// function's body type, which is often super big) we do it lazily: we count how
/// many parameters we have encountered and not substituted, and "flush" when we
/// really need to inspect the typ_or_comp.
// TODO: actually we only need to carry a comp (if typ: consider it total)
(* TODO: remove the instantiation: instantiate incrementally *)
noeq type typ_or_comp =
| TC_Typ : v:typ -> pl:list binder -> num_unflushed:nat -> typ_or_comp
| TC_Comp : v:comp -> pl:list binder -> num_unflushed:nat -> typ_or_comp
let typ_or_comp_to_string (tyc : typ_or_comp) : Tac string =
match tyc with
| TC_Typ v pl num_unflushed ->
"TC_Typ (" ^ term_to_string v ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
| TC_Comp c pl num_unflushed ->
"TC_Comp (" ^ acomp_to_string c ^ ") " ^ list_to_string (fun b -> name_of_binder b) pl ^
" " ^ string_of_int num_unflushed
/// Return the list of parameters stored in a ``typ_or_comp``
let params_of_typ_or_comp (c : typ_or_comp) : list binder =
match c with
| TC_Typ _ pl _ | TC_Comp _ pl _ -> pl
let num_unflushed_of_typ_or_comp (c : typ_or_comp) : nat =
match c with
| TC_Typ _ _ n | TC_Comp _ _ n -> n
/// Compute a ``typ_or_comp`` from the type of a term
// TODO: try to get a more precise comp
val safe_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Comp? (Some?.v opt)})
let safe_typ_or_comp dbg e t =
match safe_tcc e t with
| None ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: None");
None
| Some c ->
print_dbg dbg ("[> safe_typ_or_comp:" ^
"\n-term: " ^ term_to_string t ^
"\n-comp: " ^ acomp_to_string c);
Some (TC_Comp c [] 0)
val subst_bv_in_comp : env -> bv -> typ -> term -> comp -> Tac comp
let subst_bv_in_comp e b sort t c =
apply_subst_in_comp e c [((b, sort), t)]
val subst_binder_in_comp : env -> binder -> term -> comp -> Tac comp
let subst_binder_in_comp e b t c =
subst_bv_in_comp e (bv_of_binder b) (binder_sort b) t c
/// Utility for computations: unfold a type until it is of the form Tv_Arrow _ _,
/// fail otherwise
val unfold_until_arrow : env -> typ -> Tac typ
let rec unfold_until_arrow e ty0 =
if Tv_Arrow? (inspect ty0) then ty0
else
begin
(* Start by normalizing the term - note that this operation is expensive *)
let ty = norm_term_env e [] ty0 in
(* Helper to unfold top-level identifiers *)
let unfold_fv (fv : fv) : Tac term =
let ty = pack (Tv_FVar fv) in
let fvn = flatten_name (inspect_fv fv) in
(* unfold the top level binding, check that it has changed, and recurse *)
let ty' = norm_term_env e [delta_only [fvn]] ty in
(* I'm not confident about using eq_term here *)
begin match inspect ty' with
| Tv_FVar fv' ->
if flatten_name (inspect_fv fv') = fvn
then mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0) else ty'
| _ -> ty'
end
in
(* Inspect *)
match inspect ty with
| Tv_Arrow _ _ -> ty
| Tv_FVar fv ->
(* Try to unfold the top-level identifier and recurse *)
let ty' = unfold_fv fv in
unfold_until_arrow e ty'
| Tv_App _ _ ->
(* Strip all the parameters, try to unfold the head and recurse *)
let hd, args = collect_app ty in
begin match inspect hd with
| Tv_FVar fv ->
let hd' = unfold_fv fv in
let ty' = mk_app hd' args in
unfold_until_arrow e ty'
| _ -> mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
| Tv_Refine bv sort ref ->
unfold_until_arrow e sort
| Tv_AscribedT body _ _ _
| Tv_AscribedC body _ _ _ ->
unfold_until_arrow e body
| _ ->
(* Other situations: don't know what to do *)
mfail ("unfold_until_arrow: could not unfold: " ^ term_to_string ty0)
end
/// Instantiate a comp
val inst_comp_once : env -> comp -> term -> Tac comp
let inst_comp_once e c t =
let ty = get_comp_ret_type c in
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
subst_binder_in_comp e b1 t c1
| _ -> (* Inconsistent state *)
mfail "inst_comp_once: inconsistent state"
end
val inst_comp : env -> comp -> list term -> Tac comp
let rec inst_comp e c tl =
match tl with
| [] -> c
| t :: tl' ->
let c' = try inst_comp_once e c t
with | MetaAnalysis msg -> mfail ("inst_comp: error: " ^ msg)
| err -> raise err
in
inst_comp e c' tl'
/// Update the current ``typ_or_comp`` before going into the body of an abstraction.
/// Explanations:
/// In the case we dive into a term of the form:
/// [> (fun x -> body) : y:ty -> body_type
/// we need to substitute y with x in body_type to get the proper type for body.
/// Note that we checked, and in practice the binders are indeed different.
// TODO: actually, we updated it to do a lazy instantiation
val abs_update_typ_or_comp : binder -> typ_or_comp -> env -> Tac typ_or_comp
let _abs_update_typ (b:binder) (ty:typ) (pl:list binder) (e:env) :
Tac typ_or_comp =
(* Try to reveal an arrow *)
try
let ty' = unfold_until_arrow e ty in
begin match inspect ty' with
| Tv_Arrow b1 c1 ->
let c1' = subst_binder_in_comp e b1 (pack (Tv_Var (bv_of_binder b))) c1 in
TC_Comp c1' (b :: pl) 0
| _ -> (* Inconsistent state *)
mfail "_abs_update_typ: inconsistent state"
end
with
| MetaAnalysis msg ->
mfail ("_abs_update_typ: could not find an arrow in: " ^ term_to_string ty ^ ":\n" ^ msg)
| err -> raise err
let abs_update_typ_or_comp (b:binder) (c : typ_or_comp) (e:env) : Tac typ_or_comp =
match c with
(*| TC_Typ v pl n -> _abs_update_typ b v pl e
| TC_Comp v pl n ->
(* Note that the computation is not necessarily pure, in which case we might
* want to do something with the effect arguments (pre, post...) - for
* now we just ignore them *)
let ty = get_comp_ret_type v in
_abs_update_typ b ty pl e *)
| TC_Typ v pl n -> TC_Typ v (b::pl) (n+1)
| TC_Comp v pl n -> TC_Comp v (b::pl) (n+1)
val abs_update_opt_typ_or_comp : binder -> option typ_or_comp -> env ->
Tac (option typ_or_comp)
let abs_update_opt_typ_or_comp b opt_c e =
match opt_c with
| None -> None
| Some c ->
try
let c = abs_update_typ_or_comp b c e in
Some c
with | MetaAnalysis msg -> None
| err -> raise err
/// Flush the instantiation stored in a ``typ_or_comp``
val flush_typ_or_comp : bool -> env -> typ_or_comp ->
Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0})
/// Strip all the arrows we can without doing any instantiation. When we can't
/// strip arrows anymore, do the instantiation at once.
/// We keep track of two list of binders:
/// - the remaining binders
/// - the instantiation corresponding to the arrows we have stripped so far, and
/// which will be applied all at once
let rec _flush_typ_or_comp_comp (dbg : bool) (e:env) (rem : list binder) (inst : list ((bv & typ) & term))
(c:comp) : Tac comp =
let flush c inst =
let inst = List.rev inst in
apply_subst_in_comp e c inst
in
match rem with
| [] ->
(* No more binders: flush *)
flush c inst
| b :: rem' ->
(* Check if the return type is an arrow, if not flush and normalize *)
let ty = get_comp_ret_type c in
let ty, inst' =
if Tv_Arrow? (inspect ty) then ty, inst
else get_comp_ret_type (flush c inst), []
in
match inspect ty with
| Tv_Arrow b' c' ->
_flush_typ_or_comp_comp dbg e rem' (((bv_of_binder b', binder_sort b'), pack (Tv_Var (bv_of_binder b)))::inst) c'
| _ ->
mfail ("_flush_typ_or_comp: inconsistent state" ^
"\n-comp: " ^ acomp_to_string c ^
"\n-remaning binders: " ^ list_to_string (fun b -> name_of_binder b) rem)
let flush_typ_or_comp dbg e tyc =
let flush_comp pl n c : Tac (tyc:typ_or_comp{num_unflushed_of_typ_or_comp tyc = 0}) =
let pl', _ = List.Tot.splitAt n pl in
let pl' = List.rev pl' in
let c = _flush_typ_or_comp_comp dbg e pl' [] c in
TC_Comp c pl 0
in
try begin match tyc with
| TC_Typ ty pl n ->
let c = pack_comp (C_Total ty) in
flush_comp pl n c
| TC_Comp c pl n -> flush_comp pl n c
end
with | MetaAnalysis msg ->
mfail ("flush_typ_or_comp failed on: " ^ typ_or_comp_to_string tyc ^ ":\n" ^ msg)
| err -> raise err
/// Compute the target ``typ_or_comp`` for an argument by the type of the head:
/// in `hd a`, if `hd` has type `t -> ...`, use `t`
val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)}) | false | false | FStar.InteractiveHelpers.ExploreTerm.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 0,
"initial_ifuel": 1,
"max_fuel": 0,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [],
"z3refresh": false,
"z3rlimit": 15,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val safe_arg_typ_or_comp : bool -> env -> term ->
Tac (opt:option typ_or_comp{Some? opt ==> TC_Typ? (Some?.v opt)}) | [] | FStar.InteractiveHelpers.ExploreTerm.safe_arg_typ_or_comp | {
"file_name": "ulib/experimental/FStar.InteractiveHelpers.ExploreTerm.fst",
"git_rev": "f4cbb7a38d67eeb13fbdb2f4fb8a44a65cbcdc1f",
"git_url": "https://github.com/FStarLang/FStar.git",
"project_name": "FStar"
} | dbg: Prims.bool -> e: FStar.Reflection.Types.env -> hd: FStar.Reflection.Types.term
-> FStar.Tactics.Effect.Tac
(opt:
FStar.Pervasives.Native.option FStar.InteractiveHelpers.ExploreTerm.typ_or_comp
{Some? opt ==> TC_Typ? (Some?.v opt)}) | {
"end_col": 15,
"end_line": 404,
"start_col": 2,
"start_line": 383
} |
Prims.Tot | val backend_flag : EverParse3d.Actions.Base.backend_flag_t | [
{
"abbrev": false,
"full_module": "EverParse3d.Actions",
"short_module": null
},
{
"abbrev": false,
"full_module": "EverParse3d.Actions",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let backend_flag = EverParse3d.Actions.Base.BackendFlagExtern | val backend_flag : EverParse3d.Actions.Base.backend_flag_t
let backend_flag = | false | null | false | EverParse3d.Actions.Base.BackendFlagExtern | {
"checked_file": "EverParse3d.Actions.BackendFlag.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.fsti.checked",
"EverParse3d.Actions.Base.fsti.checked"
],
"interface_file": true,
"source_file": "EverParse3d.Actions.BackendFlag.fst"
} | [
"total"
] | [
"EverParse3d.Actions.Base.BackendFlagExtern"
] | [] | false | true | EverParse3d.Actions.BackendFlag.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 2,
"max_fuel": 0,
"max_ifuel": 2,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": false,
"smtencoding_l_arith_repr": "boxwrap",
"smtencoding_nl_arith_repr": "boxwrap",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": true,
"z3cliopt": [
"smt.qi.eager_threshold=100"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 8,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val backend_flag : EverParse3d.Actions.Base.backend_flag_t | [] | EverParse3d.Actions.BackendFlag.backend_flag | {
"file_name": "src/3d/prelude/extern/EverParse3d.Actions.BackendFlag.fst",
"git_rev": "446a08ce38df905547cf20f28c43776b22b8087a",
"git_url": "https://github.com/project-everest/everparse.git",
"project_name": "everparse"
} | EverParse3d.Actions.Base.backend_flag_t | {
"end_col": 61,
"end_line": 2,
"start_col": 19,
"start_line": 2
} |
|
Prims.Tot | val print (name: string) (oprs: list instr_print_operand) : instr_print | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let print (name:string) (oprs:list instr_print_operand) : instr_print = Print name POpcode oprs | val print (name: string) (oprs: list instr_print_operand) : instr_print
let print (name: string) (oprs: list instr_print_operand) : instr_print = | false | null | false | Print name POpcode oprs | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.string",
"Prims.list",
"Vale.X64.Instruction_s.instr_print_operand",
"Vale.X64.Instruction_s.Print",
"Vale.X64.Instruction_s.POpcode",
"Vale.X64.Instruction_s.instr_print"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
}
let instr_eval #_ #_ #_ ins = ins.i_eval
let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args)
: instr_print =
match args with
| [] -> f
| i::args ->
(
match i with
| IOpEx i ->
// REVIEW: triggers F* -> OCaml bug: let f:arrow (instr_operand_t i) (instr_print_t_args args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args)) #(instr_print_t_args ((IOpEx i)::args)) f o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)
)
let rec instr_printer_outs
(outs:list instr_out) (args:list instr_operand)
(f:instr_print_t outs args) (oprs:instr_operands_t outs args)
: instr_print =
match outs with
| [] -> instr_printer_args args f oprs
// | (_, i)::outs ->
| (b, i)::outs ->
(
match i with
| IOpEx i ->
// let f:arrow (instr_operand_t i) (instr_print_t outs args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_outs outs args
(coerce #(arrow (instr_operand_t i) (instr_print_t outs args)) #(instr_print_t ((b, (IOpEx i))::outs) args) f o)
oprs
| IOpIm _ -> instr_printer_outs outs args (coerce f) (coerce #(instr_operands_t outs args) oprs)
)
let instr_printer #outs #args #_ ins oprs =
instr_printer_outs outs args ins.i_printer oprs
let make_ins
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(#f:normal (instr_eval_t outs args))
(print:normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f =
{i_printer = print; i_eval = f} | false | true | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val print (name: string) (oprs: list instr_print_operand) : instr_print | [] | Vale.X64.Instruction_s.print | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | name: Prims.string -> oprs: Prims.list Vale.X64.Instruction_s.instr_print_operand
-> Vale.X64.Instruction_s.instr_print | {
"end_col": 95,
"end_line": 72,
"start_col": 72,
"start_line": 72
} |
Prims.Tot | val print_s (name: string) (oprs: list instr_print_operand) : instr_print | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let print_s (name:string) (oprs:list instr_print_operand) : instr_print = Print name PSuffix oprs | val print_s (name: string) (oprs: list instr_print_operand) : instr_print
let print_s (name: string) (oprs: list instr_print_operand) : instr_print = | false | null | false | Print name PSuffix oprs | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.string",
"Prims.list",
"Vale.X64.Instruction_s.instr_print_operand",
"Vale.X64.Instruction_s.Print",
"Vale.X64.Instruction_s.PSuffix",
"Vale.X64.Instruction_s.instr_print"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
}
let instr_eval #_ #_ #_ ins = ins.i_eval
let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args)
: instr_print =
match args with
| [] -> f
| i::args ->
(
match i with
| IOpEx i ->
// REVIEW: triggers F* -> OCaml bug: let f:arrow (instr_operand_t i) (instr_print_t_args args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args)) #(instr_print_t_args ((IOpEx i)::args)) f o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)
)
let rec instr_printer_outs
(outs:list instr_out) (args:list instr_operand)
(f:instr_print_t outs args) (oprs:instr_operands_t outs args)
: instr_print =
match outs with
| [] -> instr_printer_args args f oprs
// | (_, i)::outs ->
| (b, i)::outs ->
(
match i with
| IOpEx i ->
// let f:arrow (instr_operand_t i) (instr_print_t outs args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_outs outs args
(coerce #(arrow (instr_operand_t i) (instr_print_t outs args)) #(instr_print_t ((b, (IOpEx i))::outs) args) f o)
oprs
| IOpIm _ -> instr_printer_outs outs args (coerce f) (coerce #(instr_operands_t outs args) oprs)
)
let instr_printer #outs #args #_ ins oprs =
instr_printer_outs outs args ins.i_printer oprs
let make_ins
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(#f:normal (instr_eval_t outs args))
(print:normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f =
{i_printer = print; i_eval = f} | false | true | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val print_s (name: string) (oprs: list instr_print_operand) : instr_print | [] | Vale.X64.Instruction_s.print_s | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | name: Prims.string -> oprs: Prims.list Vale.X64.Instruction_s.instr_print_operand
-> Vale.X64.Instruction_s.instr_print | {
"end_col": 97,
"end_line": 73,
"start_col": 74,
"start_line": 73
} |
Prims.Tot | val instr_eval
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(i:instr_t outs args havoc_flags)
// : normal (instr_eval_t outs args)
: norm [zeta; iota; delta_attr [`%instr_attr]] (instr_eval_t outs args) | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let instr_eval #_ #_ #_ ins = ins.i_eval | val instr_eval
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(i:instr_t outs args havoc_flags)
// : normal (instr_eval_t outs args)
: norm [zeta; iota; delta_attr [`%instr_attr]] (instr_eval_t outs args)
let instr_eval #_ #_ #_ ins = | false | null | false | ins.i_eval | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_out",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.flag_havoc",
"Vale.X64.Instruction_s.instr_t",
"Vale.X64.Instruction_s.__proj__Mkinstr_t__item__i_eval",
"FStar.Pervasives.norm",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.zeta",
"FStar.Pervasives.iota",
"FStar.Pervasives.delta_attr",
"Prims.string",
"Prims.Nil",
"Vale.X64.Instruction_s.instr_eval_t"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
} | false | false | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val instr_eval
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(i:instr_t outs args havoc_flags)
// : normal (instr_eval_t outs args)
: norm [zeta; iota; delta_attr [`%instr_attr]] (instr_eval_t outs args) | [] | Vale.X64.Instruction_s.instr_eval | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | i: Vale.X64.Instruction_s.instr_t outs args havoc_flags
-> FStar.Pervasives.norm [
FStar.Pervasives.zeta;
FStar.Pervasives.iota;
FStar.Pervasives.delta_attr ["Vale.X64.Instruction_s.instr_attr"]
]
(Vale.X64.Instruction_s.instr_eval_t outs args) | {
"end_col": 40,
"end_line": 25,
"start_col": 30,
"start_line": 25
} |
Prims.Tot | val instr_printer
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(i:instr_t outs args havoc_flags) (oprs:normal (instr_operands_t outs args))
: instr_print | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let instr_printer #outs #args #_ ins oprs =
instr_printer_outs outs args ins.i_printer oprs | val instr_printer
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(i:instr_t outs args havoc_flags) (oprs:normal (instr_operands_t outs args))
: instr_print
let instr_printer #outs #args #_ ins oprs = | false | null | false | instr_printer_outs outs args ins.i_printer oprs | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_out",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.flag_havoc",
"Vale.X64.Instruction_s.instr_t",
"Vale.X64.Instruction_s.normal",
"Vale.X64.Instruction_s.instr_operands_t",
"Vale.X64.Instruction_s.instr_printer_outs",
"Vale.X64.Instruction_s.__proj__Mkinstr_t__item__i_printer",
"Vale.X64.Instruction_s.instr_print"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
}
let instr_eval #_ #_ #_ ins = ins.i_eval
let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args)
: instr_print =
match args with
| [] -> f
| i::args ->
(
match i with
| IOpEx i ->
// REVIEW: triggers F* -> OCaml bug: let f:arrow (instr_operand_t i) (instr_print_t_args args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args)) #(instr_print_t_args ((IOpEx i)::args)) f o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)
)
let rec instr_printer_outs
(outs:list instr_out) (args:list instr_operand)
(f:instr_print_t outs args) (oprs:instr_operands_t outs args)
: instr_print =
match outs with
| [] -> instr_printer_args args f oprs
// | (_, i)::outs ->
| (b, i)::outs ->
(
match i with
| IOpEx i ->
// let f:arrow (instr_operand_t i) (instr_print_t outs args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_outs outs args
(coerce #(arrow (instr_operand_t i) (instr_print_t outs args)) #(instr_print_t ((b, (IOpEx i))::outs) args) f o)
oprs
| IOpIm _ -> instr_printer_outs outs args (coerce f) (coerce #(instr_operands_t outs args) oprs)
) | false | false | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val instr_printer
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(i:instr_t outs args havoc_flags) (oprs:normal (instr_operands_t outs args))
: instr_print | [] | Vale.X64.Instruction_s.instr_printer | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
i: Vale.X64.Instruction_s.instr_t outs args havoc_flags ->
oprs: Vale.X64.Instruction_s.normal (Vale.X64.Instruction_s.instr_operands_t outs args)
-> Vale.X64.Instruction_s.instr_print | {
"end_col": 49,
"end_line": 63,
"start_col": 2,
"start_line": 63
} |
Prims.Tot | val make_ins
(#outs: list instr_out)
(#args: list instr_operand)
(#havoc_flags: flag_havoc)
(#f: normal (instr_eval_t outs args))
(print: normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let make_ins
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(#f:normal (instr_eval_t outs args))
(print:normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f =
{i_printer = print; i_eval = f} | val make_ins
(#outs: list instr_out)
(#args: list instr_operand)
(#havoc_flags: flag_havoc)
(#f: normal (instr_eval_t outs args))
(print: normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f
let make_ins
(#outs: list instr_out)
(#args: list instr_operand)
(#havoc_flags: flag_havoc)
(#f: normal (instr_eval_t outs args))
(print: normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f = | false | null | false | { i_printer = print; i_eval = f } | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_out",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.flag_havoc",
"Vale.X64.Instruction_s.normal",
"Vale.X64.Instruction_s.instr_eval_t",
"Vale.X64.Instruction_s.instr_print_t",
"Vale.X64.Instruction_s.Mkinstr_t",
"Vale.X64.Instruction_s.instr_dep"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
}
let instr_eval #_ #_ #_ ins = ins.i_eval
let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args)
: instr_print =
match args with
| [] -> f
| i::args ->
(
match i with
| IOpEx i ->
// REVIEW: triggers F* -> OCaml bug: let f:arrow (instr_operand_t i) (instr_print_t_args args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args)) #(instr_print_t_args ((IOpEx i)::args)) f o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)
)
let rec instr_printer_outs
(outs:list instr_out) (args:list instr_operand)
(f:instr_print_t outs args) (oprs:instr_operands_t outs args)
: instr_print =
match outs with
| [] -> instr_printer_args args f oprs
// | (_, i)::outs ->
| (b, i)::outs ->
(
match i with
| IOpEx i ->
// let f:arrow (instr_operand_t i) (instr_print_t outs args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_outs outs args
(coerce #(arrow (instr_operand_t i) (instr_print_t outs args)) #(instr_print_t ((b, (IOpEx i))::outs) args) f o)
oprs
| IOpIm _ -> instr_printer_outs outs args (coerce f) (coerce #(instr_operands_t outs args) oprs)
)
let instr_printer #outs #args #_ ins oprs =
instr_printer_outs outs args ins.i_printer oprs
let make_ins
(#outs:list instr_out) (#args:list instr_operand) (#havoc_flags:flag_havoc)
(#f:normal (instr_eval_t outs args))
(print:normal (instr_print_t outs args)) | false | false | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val make_ins
(#outs: list instr_out)
(#args: list instr_operand)
(#havoc_flags: flag_havoc)
(#f: normal (instr_eval_t outs args))
(print: normal (instr_print_t outs args))
: instr_dep outs args havoc_flags f | [] | Vale.X64.Instruction_s.make_ins | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | print: Vale.X64.Instruction_s.normal (Vale.X64.Instruction_s.instr_print_t outs args)
-> Vale.X64.Instruction_s.instr_dep outs args havoc_flags f | {
"end_col": 32,
"end_line": 70,
"start_col": 3,
"start_line": 70
} |
Prims.Tot | val instr_print_t (outs: list instr_out) (args: list instr_operand) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args | val instr_print_t (outs: list instr_out) (args: list instr_operand) : Type0
let rec instr_print_t (outs: list instr_out) (args: list instr_operand) : Type0 = | false | null | false | match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i) :: outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _) :: outs -> instr_print_t outs args | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_out",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.instr_print_t_args",
"Vale.X64.Instruction_s.instr_operand_inout",
"Vale.X64.Instruction_s.instr_operand_explicit",
"Vale.X64.Instruction_s.arrow",
"Vale.X64.Instruction_s.instr_operand_t",
"Vale.X64.Instruction_s.instr_print_t",
"Vale.X64.Instruction_s.instr_operand_implicit"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr] | false | true | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val instr_print_t (outs: list instr_out) (args: list instr_operand) : Type0 | [
"recursion"
] | Vale.X64.Instruction_s.instr_print_t | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
outs: Prims.list Vale.X64.Instruction_s.instr_out ->
args: Prims.list Vale.X64.Instruction_s.instr_operand
-> Type0 | {
"end_col": 49,
"end_line": 17,
"start_col": 2,
"start_line": 14
} |
Prims.Tot | val instr_print_t_args (args: list instr_operand) : Type0 | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args | val instr_print_t_args (args: list instr_operand) : Type0
let rec instr_print_t_args (args: list instr_operand) : Type0 = | false | null | false | match args with
| [] -> instr_print
| IOpEx i :: args -> arrow (instr_operand_t i) (instr_print_t_args args)
| IOpIm _ :: args -> instr_print_t_args args | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.instr_print",
"Vale.X64.Instruction_s.instr_operand_explicit",
"Vale.X64.Instruction_s.arrow",
"Vale.X64.Instruction_s.instr_operand_t",
"Vale.X64.Instruction_s.instr_print_t_args",
"Vale.X64.Instruction_s.instr_operand_implicit"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr] | false | true | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val instr_print_t_args (args: list instr_operand) : Type0 | [
"recursion"
] | Vale.X64.Instruction_s.instr_print_t_args | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | args: Prims.list Vale.X64.Instruction_s.instr_operand -> Type0 | {
"end_col": 46,
"end_line": 10,
"start_col": 2,
"start_line": 7
} |
Prims.Tot | val instr_printer_args
(args: list instr_operand)
(f: instr_print_t_args args)
(oprs: instr_operands_t_args args)
: instr_print | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args)
: instr_print =
match args with
| [] -> f
| i::args ->
(
match i with
| IOpEx i ->
// REVIEW: triggers F* -> OCaml bug: let f:arrow (instr_operand_t i) (instr_print_t_args args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args)) #(instr_print_t_args ((IOpEx i)::args)) f o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)
) | val instr_printer_args
(args: list instr_operand)
(f: instr_print_t_args args)
(oprs: instr_operands_t_args args)
: instr_print
let rec instr_printer_args
(args: list instr_operand)
(f: instr_print_t_args args)
(oprs: instr_operands_t_args args)
: instr_print = | false | null | false | match args with
| [] -> f
| i :: args ->
(match i with
| IOpEx i ->
let o, oprs = coerce oprs in
instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args))
#(instr_print_t_args ((IOpEx i) :: args))
f
o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)) | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.instr_print_t_args",
"Vale.X64.Instruction_s.instr_operands_t_args",
"Vale.X64.Instruction_s.instr_operand_explicit",
"Vale.X64.Instruction_s.instr_operand_t",
"Vale.X64.Instruction_s.instr_printer_args",
"Vale.X64.Instruction_s.coerce",
"Vale.X64.Instruction_s.arrow",
"Prims.Cons",
"Vale.X64.Instruction_s.IOpEx",
"Vale.X64.Instruction_s.instr_print",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Instruction_s.instr_operand_implicit"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
}
let instr_eval #_ #_ #_ ins = ins.i_eval
let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args) | false | false | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val instr_printer_args
(args: list instr_operand)
(f: instr_print_t_args args)
(oprs: instr_operands_t_args args)
: instr_print | [
"recursion"
] | Vale.X64.Instruction_s.instr_printer_args | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
args: Prims.list Vale.X64.Instruction_s.instr_operand ->
f: Vale.X64.Instruction_s.instr_print_t_args args ->
oprs: Vale.X64.Instruction_s.instr_operands_t_args args
-> Vale.X64.Instruction_s.instr_print | {
"end_col": 5,
"end_line": 42,
"start_col": 2,
"start_line": 31
} |
Prims.Tot | val instr_printer_outs
(outs: list instr_out)
(args: list instr_operand)
(f: instr_print_t outs args)
(oprs: instr_operands_t outs args)
: instr_print | [
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let rec instr_printer_outs
(outs:list instr_out) (args:list instr_operand)
(f:instr_print_t outs args) (oprs:instr_operands_t outs args)
: instr_print =
match outs with
| [] -> instr_printer_args args f oprs
// | (_, i)::outs ->
| (b, i)::outs ->
(
match i with
| IOpEx i ->
// let f:arrow (instr_operand_t i) (instr_print_t outs args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_outs outs args
(coerce #(arrow (instr_operand_t i) (instr_print_t outs args)) #(instr_print_t ((b, (IOpEx i))::outs) args) f o)
oprs
| IOpIm _ -> instr_printer_outs outs args (coerce f) (coerce #(instr_operands_t outs args) oprs)
) | val instr_printer_outs
(outs: list instr_out)
(args: list instr_operand)
(f: instr_print_t outs args)
(oprs: instr_operands_t outs args)
: instr_print
let rec instr_printer_outs
(outs: list instr_out)
(args: list instr_operand)
(f: instr_print_t outs args)
(oprs: instr_operands_t outs args)
: instr_print = | false | null | false | match outs with
| [] -> instr_printer_args args f oprs
| (b, i) :: outs ->
(match i with
| IOpEx i ->
let o, oprs = coerce oprs in
instr_printer_outs outs
args
(coerce #(arrow (instr_operand_t i) (instr_print_t outs args))
#(instr_print_t ((b, (IOpEx i)) :: outs) args)
f
o)
oprs
| IOpIm _ -> instr_printer_outs outs args (coerce f) (coerce #(instr_operands_t outs args) oprs)
) | {
"checked_file": "Vale.X64.Instruction_s.fst.checked",
"dependencies": [
"prims.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked"
],
"interface_file": true,
"source_file": "Vale.X64.Instruction_s.fst"
} | [
"total"
] | [
"Prims.list",
"Vale.X64.Instruction_s.instr_out",
"Vale.X64.Instruction_s.instr_operand",
"Vale.X64.Instruction_s.instr_print_t",
"Vale.X64.Instruction_s.instr_operands_t",
"Vale.X64.Instruction_s.instr_printer_args",
"Vale.X64.Instruction_s.instr_operand_inout",
"Vale.X64.Instruction_s.instr_operand_explicit",
"Vale.X64.Instruction_s.instr_operand_t",
"Vale.X64.Instruction_s.instr_printer_outs",
"Vale.X64.Instruction_s.coerce",
"Vale.X64.Instruction_s.arrow",
"Prims.Cons",
"FStar.Pervasives.Native.Mktuple2",
"Vale.X64.Instruction_s.IOpEx",
"Vale.X64.Instruction_s.instr_print",
"FStar.Pervasives.Native.tuple2",
"Vale.X64.Instruction_s.instr_operand_implicit"
] | [] | module Vale.X64.Instruction_s
open FStar.Mul
// only trusted specification files should friend this module
[@instr_attr]
let rec instr_print_t_args (args:list instr_operand) : Type0 =
match args with
| [] -> instr_print
| (IOpEx i)::args -> arrow (instr_operand_t i) (instr_print_t_args args)
| (IOpIm _)::args -> instr_print_t_args args
[@instr_attr]
let rec instr_print_t (outs:list instr_out) (args:list instr_operand) : Type0 =
match outs with
| [] -> instr_print_t_args args
| (_, IOpEx i)::outs -> arrow (instr_operand_t i) (instr_print_t outs args)
| (_, IOpIm _)::outs -> instr_print_t outs args
noeq type instr_t (outs:list instr_out) (args:list instr_operand) (havoc_flags:flag_havoc) = {
i_eval:instr_eval_t outs args;
i_printer:instr_print_t outs args;
// havoc_flags isn't used here, but we still need it in the type to track the semantics of each instr_t
}
let instr_eval #_ #_ #_ ins = ins.i_eval
let rec instr_printer_args
(args:list instr_operand)
(f:instr_print_t_args args) (oprs:instr_operands_t_args args)
: instr_print =
match args with
| [] -> f
| i::args ->
(
match i with
| IOpEx i ->
// REVIEW: triggers F* -> OCaml bug: let f:arrow (instr_operand_t i) (instr_print_t_args args) = coerce f in
let (o, oprs) = coerce oprs in instr_printer_args args
(coerce #(arrow (instr_operand_t i) (instr_print_t_args args)) #(instr_print_t_args ((IOpEx i)::args)) f o)
oprs
| IOpIm _ -> instr_printer_args args (coerce f) (coerce #(instr_operands_t_args args) oprs)
)
let rec instr_printer_outs
(outs:list instr_out) (args:list instr_operand)
(f:instr_print_t outs args) (oprs:instr_operands_t outs args) | false | false | Vale.X64.Instruction_s.fst | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val instr_printer_outs
(outs: list instr_out)
(args: list instr_operand)
(f: instr_print_t outs args)
(oprs: instr_operands_t outs args)
: instr_print | [
"recursion"
] | Vale.X64.Instruction_s.instr_printer_outs | {
"file_name": "vale/specs/hardware/Vale.X64.Instruction_s.fst",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
outs: Prims.list Vale.X64.Instruction_s.instr_out ->
args: Prims.list Vale.X64.Instruction_s.instr_operand ->
f: Vale.X64.Instruction_s.instr_print_t outs args ->
oprs: Vale.X64.Instruction_s.instr_operands_t outs args
-> Vale.X64.Instruction_s.instr_print | {
"end_col": 5,
"end_line": 60,
"start_col": 2,
"start_line": 48
} |
Prims.Tot | val va_wp_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (()))) | val va_wp_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRax va_s0)
in_b
(va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0)
out_b
(va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0)
Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` (va_get_reg64 rRdx va_s0) < pow2_64 /\
va_get_reg64 rRdi va_s0 + 16 `op_Multiply` (va_get_reg64 rRdx va_s0) < pow2_64 /\
l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\
va_get_reg64 rRdx va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\
va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\
aes_reqs alg
key
round_keys
keys_b
(va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0)
(va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\
(forall (va_x_mem: vale_heap) (va_x_rbx: nat64) (va_x_r11: nat64) (va_x_r10: nat64)
(va_x_xmm0: quad32) (va_x_xmm1: quad32) (va_x_xmm2: quad32) (va_x_xmm3: quad32)
(va_x_xmm4: quad32) (va_x_xmm5: quad32) (va_x_xmm6: quad32) (va_x_xmm11: quad32)
(va_x_xmm10: quad32) (va_x_heap1: vale_heap) (va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl
(va_upd_mem_heaplet 1
va_x_heap1
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_reg64 rR10
va_x_r10
(va_upd_reg64 rR11
va_x_r11
(va_upd_reg64 rRbx
va_x_rbx
(va_upd_mem va_x_mem va_s0))))))))))))
))
in
va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b
(va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\
Vale.AES.GCTR.gctr_partial alg
(va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b)
key
(va_get_xmm 11 va_s0) /\
va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\
(va_get_reg64 rRdx va_sM == 0 ==>
Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b ==
Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.l_or",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.eq2",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.X64.Machine_s.rRdi",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Prims.nat",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Vale.X64.Machine_s.pow2_32",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.Mkfour",
"Vale.AES.X64.GCMencryptOpt.aes_reqs",
"Vale.X64.Machine_s.rR8",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Memory.nat64",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Vale.X64.Decls.modifies_buffer128",
"Vale.AES.GCTR.gctr_partial",
"Vale.X64.Decls.s128",
"Vale.Def.Types_s.quad32",
"Vale.AES.GCTR.inc32lite",
"Prims.int",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_blocks128 | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 62,
"end_line": 173,
"start_col": 2,
"start_line": 146
} |
Prims.Tot | val aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0 | [
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys | val aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0
let aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0 = | false | null | false | aesni_enabled /\ avx_enabled /\ (alg = AES_128 \/ alg = AES_256) /\ is_aes_key_LE alg key /\
length (round_keys) == nr (alg) + 1 /\ round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\ s128 heap0 keys_b == round_keys | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Prims.int",
"Vale.X64.InsBasic.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Prims.l_or",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Prims.eq2",
"FStar.Seq.Base.length",
"Prims.op_Addition",
"Vale.AES.AES_common_s.nr",
"Vale.Def.Types_s.quad32",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.s128",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0 | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val aes_reqs
(alg: algorithm)
(key: seq nat32)
(round_keys: seq quad32)
(keys_b: buffer128)
(key_ptr: int)
(heap0: vale_heap)
(layout: vale_heap_layout)
: prop0 | [] | Vale.AES.X64.GCMencryptOpt.aes_reqs | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
key_ptr: Prims.int ->
heap0: Vale.X64.InsBasic.vale_heap ->
layout: Vale.Arch.HeapImpl.vale_heap_layout
-> Vale.Def.Prop_s.prop0 | {
"end_col": 33,
"end_line": 55,
"start_col": 2,
"start_line": 49
} |
Prims.Tot | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (()))) | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(sse_enabled /\
va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
aes_reqs alg
key
round_keys
keys_b
(va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0)
(va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0: quad32)
(va_x_xmm1: quad32)
(va_x_xmm2: quad32)
(va_x_xmm8: quad32)
(va_x_efl: Vale.X64.Flags.t)
(va_x_r12: nat64).
let va_sM =
va_upd_reg64 rR12
va_x_r12
(va_upd_flags va_x_efl
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 va_s0)))))
in
va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM)
) ==
Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0)
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8
va_s0)))
alg
key /\
va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))
alg
key
0) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.Mkfour",
"Vale.AES.X64.GCMencryptOpt.aes_reqs",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.va_get_mem_layout",
"Prims.l_Forall",
"Vale.X64.Flags.t",
"Vale.X64.Memory.nat64",
"Prims.l_imp",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.create",
"Vale.AES.GCTR_s.gctr_encrypt_LE",
"Vale.Def.Types_s.le_quad32_to_bytes",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.Def.Types_s.quad32",
"Vale.AES.GCTR_s.gctr_encrypt_block",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_xmm"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32)) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_register | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 58,
"end_line": 91,
"start_col": 2,
"start_line": 80
} |
Prims.Tot | val va_quick_Gcm_make_length_quad: Prims.unit
-> (va_quickCode unit (va_code_Gcm_make_length_quad ())) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad) | val va_quick_Gcm_make_length_quad: Prims.unit
-> (va_quickCode unit (va_code_Gcm_make_length_quad ()))
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) = | false | null | false | (va_QProc (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad
va_wpProof_Gcm_make_length_quad) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.unit",
"Vale.X64.QuickCode.va_QProc",
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_make_length_quad",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_xmm",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_make_length_quad",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gcm_make_length_quad",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr] | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gcm_make_length_quad: Prims.unit
-> (va_quickCode unit (va_code_Gcm_make_length_quad ())) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Gcm_make_length_quad | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | _: Prims.unit
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gcm_make_length_quad ()) | {
"end_col": 63,
"end_line": 230,
"start_col": 2,
"start_line": 229
} |
Prims.Tot | val va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ())) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads)) | val va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ()))
let va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ())) = | false | null | false | (va_QProc (va_code_Ghash_extra_bytes ())
([
va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx
])
(va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads)
(va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE completed_quads)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer128",
"Prims.nat",
"Vale.X64.Decls.quad32",
"FStar.Seq.Base.seq",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Ghash_extra_bytes",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRcx",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Ghash_extra_bytes",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Ghash_extra_bytes",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ())) | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
: (va_quickCode unit (va_code_Ghash_extra_bytes ())) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Ghash_extra_bytes | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
hkeys_b: Vale.X64.Memory.buffer128 ->
total_bytes: Prims.nat ->
old_hash: Vale.X64.Decls.quad32 ->
h_LE: Vale.X64.Decls.quad32 ->
completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Ghash_extra_bytes ()) | {
"end_col": 21,
"end_line": 313,
"start_col": 2,
"start_line": 309
} |
Prims.Tot | val va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (()))) | val va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0
let va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 = | false | null | false | (va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` (va_get_reg64 rR13 va_s0) < pow2_64 /\
8 `op_Multiply` (va_get_reg64 rR11 va_s0) < pow2_64) /\
(forall (va_x_xmm0: quad32) (va_x_rax: nat64) (va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax (va_upd_xmm 0 va_x_xmm0 va_s0))
in
va_get_ok va_sM /\
(8 `op_Multiply` (va_get_reg64 rR13 va_s0) < pow2_64 /\
8 `op_Multiply` (va_get_reg64 rR11 va_s0) < pow2_64 /\
va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0)
(8 `op_Multiply` (va_get_reg64 rR11 va_s0))
1)
(8 `op_Multiply` (va_get_reg64 rR13 va_s0))
0) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.op_LessThan",
"Prims.op_Multiply",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Machine_s.rR11",
"Prims.l_Forall",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.nat64",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Prims.eq2",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Types_s.insert_nat64",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_upd_xmm"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr] | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gcm_make_length_quad (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_make_length_quad | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | va_s0: Vale.X64.Decls.va_state -> va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 53,
"end_line": 220,
"start_col": 2,
"start_line": 213
} |
Prims.Tot | val va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b)) | val va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg))
let va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg)) = | false | null | false | (va_QProc (va_code_Gctr_blocks128 alg)
([
va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5;
va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10;
va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem
])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b)
(va_wpProof_Gctr_blocks128 alg in_b out_b key round_keys keys_b)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Gctr_blocks128",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_blocks128",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gctr_blocks128",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32)) | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gctr_blocks128
(alg: algorithm)
(in_b out_b: buffer128)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_blocks128 alg)) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Gctr_blocks128 | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
in_b: Vale.X64.Memory.buffer128 ->
out_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gctr_blocks128 alg) | {
"end_col": 33,
"end_line": 192,
"start_col": 2,
"start_line": 188
} |
Prims.Tot | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b)) | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg))
let va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) = | false | null | false | (va_QProc (va_code_Gctr_register alg)
([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0])
(va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.AES.AES_common_s.algorithm",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.quad32",
"Vale.X64.Memory.buffer128",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Gctr_register",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR12",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_xmm",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gctr_register",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gctr_register",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32)) | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gctr_register
(alg: algorithm)
(key: (seq nat32))
(round_keys: (seq quad32))
(keys_b: buffer128)
: (va_quickCode unit (va_code_Gctr_register alg)) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Gctr_register | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
alg: Vale.AES.AES_common_s.algorithm ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
round_keys: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
keys_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gctr_register alg) | {
"end_col": 57,
"end_line": 105,
"start_col": 2,
"start_line": 103
} |
Prims.Tot | val va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)) | val va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ()))
let va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) = | false | null | false | (va_QProc (va_code_Gcm_blocks_auth ())
([
va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15;
va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx
])
(va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer128",
"Vale.X64.Decls.quad32",
"Vale.X64.QuickCode.va_QProc",
"FStar.Seq.Base.seq",
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_auth",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRdx",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_blocks_auth",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gcm_blocks_auth",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gcm_blocks_auth (auth_b abytes_b hkeys_b: buffer128) (h_LE: quad32)
: (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Gcm_blocks_auth | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
auth_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128 ->
h_LE: Vale.X64.Decls.quad32
-> Vale.X64.QuickCode.va_quickCode (FStar.Seq.Base.seq Vale.X64.Decls.quad32)
(Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_auth ()) | {
"end_col": 62,
"end_line": 407,
"start_col": 2,
"start_line": 403
} |
Prims.Tot | val va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (()))) | val va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\
va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 ==
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
old_hash
completed_quads) /\
Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR9 va_s0 - 32)
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\ FStar.Seq.Base.length #quad32 completed_quads == total_bytes `op_Division` 16 /\
total_bytes < 16 `op_Multiply` (FStar.Seq.Base.length #quad32 completed_quads) + 16 /\
va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes `op_Modulus` 16 =!= 0 /\
(0 < total_bytes /\
total_bytes < 16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes)) /\
16 `op_Multiply` (Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\
(forall (va_x_rcx: nat64) (va_x_r11: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)
(va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)
(va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_efl: Vale.X64.Flags.t).
let va_sM =
va_upd_flags va_x_efl
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_reg64 rR11
va_x_r11
(va_upd_reg64 rRcx va_x_rcx va_s0)))))))))))
in
va_get_ok va_sM /\
(let raw_quads =
FStar.Seq.Base.append #quad32
completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0))
in
let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0
total_bytes
in
let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in
let input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in
total_bytes > 0 ==>
l_and (FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) ==
Vale.AES.GHash.ghash_incremental h_LE old_hash input_quads)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer128",
"Prims.nat",
"Vale.X64.Decls.quad32",
"FStar.Seq.Base.seq",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.quad32",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.AES.GHash.ghash_incremental0",
"Vale.AES.GHash.hkeys_reqs_priv",
"Vale.X64.Decls.s128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.validSrcAddrs128",
"Prims.op_Subtraction",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Prims.int",
"FStar.Seq.Base.length",
"Prims.op_Division",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.op_Multiply",
"Vale.X64.Machine_s.rR10",
"Prims.op_Modulus",
"Prims.l_not",
"Vale.AES.GCM_helpers.bytes_to_quad_size",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Prims.op_GreaterThan",
"Vale.AES.GHash.ghash_incremental",
"Vale.Def.Types_s.le_bytes_to_seq_quad32",
"Vale.Def.Words_s.nat8",
"Vale.AES.GCTR_s.pad_to_128_bits",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.append",
"FStar.Seq.Base.create",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rRcx"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Ghash_extra_bytes
(hkeys_b: buffer128)
(total_bytes: nat)
(old_hash h_LE: quad32)
(completed_quads: (seq quad32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Ghash_extra_bytes | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
hkeys_b: Vale.X64.Memory.buffer128 ->
total_bytes: Prims.nat ->
old_hash: Vale.X64.Decls.quad32 ->
h_LE: Vale.X64.Decls.quad32 ->
completed_quads: FStar.Seq.Base.seq Vale.X64.Decls.quad32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 53,
"end_line": 294,
"start_col": 2,
"start_line": 269
} |
Prims.Tot | val va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win)) | val va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win))
let va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win)) = | false | null | false | (va_QProc (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax])
(va_wp_Save_registers win)
(va_wpProof_Save_registers win)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Save_registers",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRax",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Save_registers",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Save_registers",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr] | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Save_registers (win: bool) : (va_quickCode unit (va_code_Save_registers win)) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Save_registers | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | win: Prims.bool
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Save_registers win) | {
"end_col": 9,
"end_line": 542,
"start_col": 2,
"start_line": 540
} |
Prims.Tot | val va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15)) | val va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win))
let va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win)) = | false | null | false | (va_QProc (va_code_Restore_registers win)
([
va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15;
va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14;
va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax
])
(va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11
old_xmm12 old_xmm13 old_xmm14 old_xmm15)
(va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10
old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Prims.nat",
"Vale.X64.Decls.quad32",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Restore_registers",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Restore_registers",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Restore_registers",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
: (va_quickCode unit (va_code_Restore_registers win)) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Restore_registers | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
old_rsp: Prims.nat ->
old_xmm6: Vale.X64.Decls.quad32 ->
old_xmm7: Vale.X64.Decls.quad32 ->
old_xmm8: Vale.X64.Decls.quad32 ->
old_xmm9: Vale.X64.Decls.quad32 ->
old_xmm10: Vale.X64.Decls.quad32 ->
old_xmm11: Vale.X64.Decls.quad32 ->
old_xmm12: Vale.X64.Decls.quad32 ->
old_xmm13: Vale.X64.Decls.quad32 ->
old_xmm14: Vale.X64.Decls.quad32 ->
old_xmm15: Vale.X64.Decls.quad32
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Restore_registers win) | {
"end_col": 65,
"end_line": 718,
"start_col": 2,
"start_line": 711
} |
Prims.Tot | val va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(()))) | val va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0
let va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 = | false | null | false | (va_get_ok va_s0 /\ sse_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
(forall (va_x_rax: nat64)
(va_x_rsp: nat64)
(va_x_stack: vale_stack)
(va_x_efl: Vale.X64.Flags.t)
(va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_flags va_x_efl
(va_upd_stack va_x_stack
(va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRax va_x_rax va_s0))))
in
va_get_ok va_sM /\
va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 -
8
`op_Multiply`
(8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) /\
Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM)
(8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0))
(va_get_stack va_sM)
Secret
(va_get_stackTaint va_sM) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0)
(va_get_stack va_s0)
(va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0)
(va_get_stackTaint va_sM) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 6 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 7 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 8 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 9 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 10 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 11 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 12 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 13 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 14 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) ==
Vale.Arch.Types.hi64 (va_get_xmm 15 va_sM)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack va_sM) ==
Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 0 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRdi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 48 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_sM) ==
va_get_reg64 rR15 va_sM ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Flags.t",
"Vale.X64.Memory.memtaint",
"Prims.l_imp",
"Prims.int",
"Prims.op_Subtraction",
"Prims.op_Multiply",
"Prims.op_Addition",
"Vale.X64.Decls.va_if",
"Prims.l_not",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Stack_i.valid_stack_slot64s",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.va_get_stackTaint",
"Vale.X64.Stack_i.modifies_stack",
"Vale.X64.Stack_i.modifies_stacktaint",
"Vale.X64.Stack_i.load_stack64",
"Vale.Arch.Types.hi64",
"Vale.X64.Decls.va_get_xmm",
"Vale.Arch.Types.lo64",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rRax"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr] | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Save_registers (win: bool) (va_s0: va_state) (va_k: (va_state -> unit -> Type0)) : Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Save_registers | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 10,
"end_line": 530,
"start_col": 2,
"start_line": 474
} |
Prims.Tot | val va_wp_Gcm_blocks_auth
(auth_b abytes_b hkeys_b: buffer128)
(h_LE: quad32)
(va_s0: va_state)
(va_k: (va_state -> (seq quad32) -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq)))) | val va_wp_Gcm_blocks_auth
(auth_b abytes_b hkeys_b: buffer128)
(h_LE: quad32)
(va_s0: va_state)
(va_k: (va_state -> (seq quad32) -> Type0))
: Type0
let va_wp_Gcm_blocks_auth
(auth_b abytes_b hkeys_b: buffer128)
(h_LE: quad32)
(va_s0: va_state)
(va_k: (va_state -> (seq quad32) -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(sse_enabled /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0)
auth_b
(va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0)
(va_get_reg64 rRbx va_s0)
abytes_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0)
(va_get_reg64 rR9 va_s0 - 32)
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` (va_get_reg64 rRdx va_s0) < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
(va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) <= va_get_reg64 rRsi va_s0 /\
va_get_reg64 rRsi va_s0 <
va_mul_nat (va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\
(pclmulqdq_enabled /\ avx_enabled) /\
Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0 va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\
(forall (va_x_rdx: nat64) (va_x_r11: nat64) (va_x_r10: nat64) (va_x_rcx: nat64) (va_x_r15: nat64)
(va_x_efl: Vale.X64.Flags.t) (va_x_xmm0: quad32) (va_x_xmm1: quad32) (va_x_xmm2: quad32)
(va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32) (va_x_xmm6: quad32)
(va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32) (auth_quad_seq: (seq quad32)).
let va_sM =
va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_flags va_x_efl
(va_upd_reg64 rR15
va_x_r15
(va_upd_reg64 rRcx
va_x_rcx
(va_upd_reg64 rR10
va_x_r10
(va_upd_reg64 rR11
va_x_r11
(va_upd_reg64 rRdx va_x_rdx va_s0)
))))))))))))))
in
va_get_ok va_sM /\
(va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\
va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
(let raw_auth_quads:(seq quad32) =
va_if (va_get_reg64 rRsi va_s0 >
((va_get_reg64 rRdx va_s0) `op_Multiply` 128)
`op_Division`
8)
(fun _ ->
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 7 va_s0) abytes_b))
(fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b)
in
let auth_input_bytes:(seq nat8) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads)
0
(va_get_reg64 rRsi va_s0)
in
let padded_auth_bytes:(seq nat8) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\
va_get_xmm 8 va_sM ==
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
auth_quad_seq))) ==>
va_k va_sM ((auth_quad_seq)))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Memory.buffer128",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_state",
"FStar.Seq.Base.seq",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.X64.Decls.va_get_mem_heaplet",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Machine_s.rRbx",
"Prims.op_Subtraction",
"Vale.X64.Machine_s.rR9",
"Prims.op_LessThan",
"Prims.op_Addition",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Prims.eq2",
"Prims.nat",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.int",
"Prims.op_LessThanOrEqual",
"Vale.X64.Decls.va_mul_nat",
"Prims.op_Division",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.AES.GHash.hkeys_reqs_priv",
"Vale.X64.Decls.s128",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.Flags.t",
"Prims.l_imp",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rR15",
"Vale.Def.Words_s.four",
"Vale.Def.Types_s.nat32",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.quad32",
"Vale.Def.Types_s.le_bytes_to_seq_quad32",
"Vale.AES.GHash.ghash_incremental0",
"Vale.Def.Words_s.nat8",
"Vale.AES.GCTR_s.pad_to_128_bits",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.X64.Decls.va_if",
"Prims.op_GreaterThan",
"Prims.unit",
"FStar.Seq.Base.append",
"Prims.l_not",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR11"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gcm_blocks_auth
(auth_b abytes_b hkeys_b: buffer128)
(h_LE: quad32)
(va_s0: va_state)
(va_k: (va_state -> (seq quad32) -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_blocks_auth | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
auth_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128 ->
h_LE: Vale.X64.Decls.quad32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: FStar.Seq.Base.seq Vale.X64.Decls.quad32 -> Type0)
-> Type0 | {
"end_col": 58,
"end_line": 390,
"start_col": 2,
"start_line": 358
} |
Prims.Tot | val va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (()))) | val va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0)
(8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0))
(va_get_stack va_s0)
Secret
(va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 ==
old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\
(win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\
(forall (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rbp: nat64) (va_x_rdi: nat64) (va_x_rsi: nat64)
(va_x_r12: nat64) (va_x_r13: nat64) (va_x_r14: nat64) (va_x_r15: nat64) (va_x_xmm6: quad32)
(va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32) (va_x_xmm10: quad32)
(va_x_xmm11: quad32) (va_x_xmm12: quad32) (va_x_xmm13: quad32) (va_x_xmm14: quad32)
(va_x_xmm15: quad32) (va_x_stack: vale_stack) (va_x_rsp: nat64) (va_x_efl: Vale.X64.Flags.t)
(va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_flags va_x_efl
(va_upd_reg64 rRsp
va_x_rsp
(va_upd_stack va_x_stack
(va_upd_xmm 15
va_x_xmm15
(va_upd_xmm 14
va_x_xmm14
(va_upd_xmm 13
va_x_xmm13
(va_upd_xmm 12
va_x_xmm12
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_reg64 rR15
va_x_r15
(va_upd_reg64 rR14
va_x_r14
(va_upd_reg64 rR13
va_x_r13
(va_upd_reg64 rR12
va_x_r12
(va_upd_reg64 rRsi
va_x_rsi
(va_upd_reg64 rRdi
va_x_rdi
(va_upd_reg64 rRbp
va_x_rbp
(va_upd_reg64
rRbx
va_x_rbx
(va_upd_reg64
rRax
va_x_rax
va_s0
))))
))))))))))))))))))
in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\
Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0)
(va_get_reg64 rRsp va_sM)
(va_get_stack va_s0)
(va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRbx va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 +
va_if win (fun _ -> 160) (fun _ -> 0))
(va_get_stack va_s0) ==
va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM == old_xmm6) /\
(win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM == old_xmm8) /\
(win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM == old_xmm10) /\
(win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM == old_xmm12) /\
(win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM == old_xmm14) /\
(win ==> va_get_xmm 15 va_sM == old_xmm15) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Prims.nat",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.sse_enabled",
"Prims.eq2",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Stack_i.valid_stack_slot64s",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Prims.op_Addition",
"Vale.X64.Decls.va_if",
"Prims.int",
"Prims.op_Multiply",
"Prims.l_not",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.op_Subtraction",
"Prims.l_imp",
"Vale.Def.Words_s.nat64",
"Vale.X64.Stack_i.load_stack64",
"Vale.Arch.Types.hi64",
"Vale.Arch.Types.lo64",
"Prims.l_Forall",
"Vale.X64.Memory.nat64",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Flags.t",
"Vale.X64.Memory.memtaint",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Vale.X64.Stack_i.modifies_stack",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.va_get_xmm",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Machine_s.rRax"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Restore_registers
(win: bool)
(old_rsp: nat)
(old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15:
quad32)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Restore_registers | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
old_rsp: Prims.nat ->
old_xmm6: Vale.X64.Decls.quad32 ->
old_xmm7: Vale.X64.Decls.quad32 ->
old_xmm8: Vale.X64.Decls.quad32 ->
old_xmm9: Vale.X64.Decls.quad32 ->
old_xmm10: Vale.X64.Decls.quad32 ->
old_xmm11: Vale.X64.Decls.quad32 ->
old_xmm12: Vale.X64.Decls.quad32 ->
old_xmm13: Vale.X64.Decls.quad32 ->
old_xmm14: Vale.X64.Decls.quad32 ->
old_xmm15: Vale.X64.Decls.quad32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 82,
"end_line": 691,
"start_col": 2,
"start_line": 623
} |
Prims.Tot | val va_quick_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gcm_blocks_stdcall win alg)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) : (va_quickCode unit
(va_code_Gcm_blocks_stdcall win alg)) =
(va_QProc (va_code_Gcm_blocks_stdcall win alg) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3;
va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14;
va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_stdcall win alg auth_b
auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b
out128_b len128_num inout_b plain_num scratch_b tag_b key) (va_wpProof_Gcm_blocks_stdcall win
alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b
len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b key)) | val va_quick_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gcm_blocks_stdcall win alg))
let va_quick_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gcm_blocks_stdcall win alg)) = | false | null | false | (va_QProc (va_code_Gcm_blocks_stdcall win alg)
([
va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;
va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;
va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5;
va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11;
va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp;
va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem
])
(va_wp_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b
in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b
tag_b key)
(va_wpProof_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.AES.GCM_s.supported_iv_LE",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_stdcall",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_blocks_stdcall",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Gcm_blocks_stdcall",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gcm_blocks_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
auth_b:buffer128 -> auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 ->
iv:supported_iv_LE -> hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 ->
out128x6_b:buffer128 -> len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 ->
len128_num:nat64 -> inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 ->
tag_b:buffer128 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let
(auth_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx
va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(xip:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR9 va_s0)
in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let
(in128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in
let (len128x6:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in
let (in128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in
let (out128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in
let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in
let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in
let (tag_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
auth_len == auth_num /\ auth_num_bytes == auth_bytes /\ len128x6 == len128x6_num /\ len128 ==
len128_num /\ plain_num_bytes == plain_num /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) auth_ptr auth_b auth_len (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) iv_ptr iv_b 1
(va_get_mem_layout va_s0) Public /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128x6_ptr out128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) inout_ptr inout_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) scratch_ptr scratch_b 9
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem
va_s0) tag_ptr tag_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128
tag_b ([keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b;
auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap1:vale_heap) (va_x_heap2:vale_heap) (va_x_heap3:vale_heap) (va_x_heap4:vale_heap)
(va_x_heap5:vale_heap) (va_x_heap6:vale_heap) (va_x_efl:Vale.X64.Flags.t)
(va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_stack va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 6
va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 4 va_x_heap4
(va_upd_mem_heaplet 3 va_x_heap3 (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_mem_layout va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14
va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11
(va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7
va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3
va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64
rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12
va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9
(va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx
va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (auth_len:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let (out128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let (len128x6:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in let (len128:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in let (scratch_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let (tag_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (())))
val va_wpProof_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> auth_b:buffer128 ->
auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->
hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->
len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 -> len128_num:nat64 ->
inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 -> tag_b:buffer128 -> key:(seq nat32)
-> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num
keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num
inout_b plain_num scratch_b tag_b key va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_stdcall win alg)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;
va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;
va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm
4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64
rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64
rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem])
va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) : (va_quickCode unit | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: (va_quickCode unit (va_code_Gcm_blocks_stdcall win alg)) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Gcm_blocks_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
auth_b: Vale.X64.Memory.buffer128 ->
auth_bytes: Vale.X64.Memory.nat64 ->
auth_num: Vale.X64.Memory.nat64 ->
keys_b: Vale.X64.Memory.buffer128 ->
iv_b: Vale.X64.Memory.buffer128 ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
hkeys_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
in128x6_b: Vale.X64.Memory.buffer128 ->
out128x6_b: Vale.X64.Memory.buffer128 ->
len128x6_num: Vale.X64.Memory.nat64 ->
in128_b: Vale.X64.Memory.buffer128 ->
out128_b: Vale.X64.Memory.buffer128 ->
len128_num: Vale.X64.Memory.nat64 ->
inout_b: Vale.X64.Memory.buffer128 ->
plain_num: Vale.X64.Memory.nat64 ->
scratch_b: Vale.X64.Memory.buffer128 ->
tag_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_stdcall win alg) | {
"end_col": 84,
"end_line": 1533,
"start_col": 2,
"start_line": 1521
} |
Prims.Tot | val va_quick_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: (va_quickCode unit (va_code_Compute_iv_stdcall win)) | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_quick_Compute_iv_stdcall (win:bool) (iv:supported_iv_LE) (iv_b:buffer128) (num_bytes:nat64)
(len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit
(va_code_Compute_iv_stdcall win)) =
(va_QProc (va_code_Compute_iv_stdcall win) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_heaplet 7; va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13;
va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7;
va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm
0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64
rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64
rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64
rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Compute_iv_stdcall win iv iv_b num_bytes len j0_b
iv_extra_b hkeys_b) (va_wpProof_Compute_iv_stdcall win iv iv_b num_bytes len j0_b iv_extra_b
hkeys_b)) | val va_quick_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: (va_quickCode unit (va_code_Compute_iv_stdcall win))
let va_quick_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: (va_quickCode unit (va_code_Compute_iv_stdcall win)) = | false | null | false | (va_QProc (va_code_Compute_iv_stdcall win)
([
va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 7; va_Mod_mem_layout;
va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;
va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14;
va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9;
va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem
])
(va_wp_Compute_iv_stdcall win iv iv_b num_bytes len j0_b iv_extra_b hkeys_b)
(va_wpProof_Compute_iv_stdcall win iv iv_b num_bytes len j0_b iv_extra_b hkeys_b)) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.X64.QuickCode.va_QProc",
"Prims.unit",
"Vale.AES.X64.GCMencryptOpt.va_code_Compute_iv_stdcall",
"Prims.Cons",
"Vale.X64.QuickCode.mod_t",
"Vale.X64.QuickCode.va_Mod_stackTaint",
"Vale.X64.QuickCode.va_Mod_stack",
"Vale.X64.QuickCode.va_Mod_flags",
"Vale.X64.QuickCode.va_Mod_mem_heaplet",
"Vale.X64.QuickCode.va_Mod_mem_layout",
"Vale.X64.QuickCode.va_Mod_xmm",
"Vale.X64.QuickCode.va_Mod_reg64",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRax",
"Vale.X64.QuickCode.va_Mod_mem",
"Prims.Nil",
"Vale.AES.X64.GCMencryptOpt.va_wp_Compute_iv_stdcall",
"Vale.AES.X64.GCMencryptOpt.va_wpProof_Compute_iv_stdcall",
"Vale.X64.QuickCode.va_quickCode"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gcm_blocks_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
auth_b:buffer128 -> auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 ->
iv:supported_iv_LE -> hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 ->
out128x6_b:buffer128 -> len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 ->
len128_num:nat64 -> inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 ->
tag_b:buffer128 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let
(auth_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx
va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(xip:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR9 va_s0)
in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let
(in128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in
let (len128x6:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in
let (in128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in
let (out128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in
let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in
let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in
let (tag_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
auth_len == auth_num /\ auth_num_bytes == auth_bytes /\ len128x6 == len128x6_num /\ len128 ==
len128_num /\ plain_num_bytes == plain_num /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) auth_ptr auth_b auth_len (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) iv_ptr iv_b 1
(va_get_mem_layout va_s0) Public /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128x6_ptr out128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) inout_ptr inout_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) scratch_ptr scratch_b 9
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem
va_s0) tag_ptr tag_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128
tag_b ([keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b;
auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap1:vale_heap) (va_x_heap2:vale_heap) (va_x_heap3:vale_heap) (va_x_heap4:vale_heap)
(va_x_heap5:vale_heap) (va_x_heap6:vale_heap) (va_x_efl:Vale.X64.Flags.t)
(va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_stack va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 6
va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 4 va_x_heap4
(va_upd_mem_heaplet 3 va_x_heap3 (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_mem_layout va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14
va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11
(va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7
va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3
va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64
rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12
va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9
(va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx
va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (auth_len:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let (out128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let (len128x6:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in let (len128:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in let (scratch_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let (tag_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (())))
val va_wpProof_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> auth_b:buffer128 ->
auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->
hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->
len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 -> len128_num:nat64 ->
inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 -> tag_b:buffer128 -> key:(seq nat32)
-> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num
keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num
inout_b plain_num scratch_b tag_b key va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_stdcall win alg)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;
va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;
va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm
4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64
rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64
rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem])
va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) : (va_quickCode unit
(va_code_Gcm_blocks_stdcall win alg)) =
(va_QProc (va_code_Gcm_blocks_stdcall win alg) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3;
va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14;
va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_stdcall win alg auth_b
auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b
out128_b len128_num inout_b plain_num scratch_b tag_b key) (va_wpProof_Gcm_blocks_stdcall win
alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b
len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b key))
//--
//-- Compute_iv_stdcall
val va_code_Compute_iv_stdcall : win:bool -> Tot va_code
val va_codegen_success_Compute_iv_stdcall : win:bool -> Tot va_pbool
let va_req_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv)))
let va_ens_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Compute_iv_stdcall va_b0 va_s0 win iv iv_b num_bytes len j0_b iv_extra_b hkeys_b /\
va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in
let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0
else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint
va_sM (va_update_stack va_sM (va_update_flags va_sM (va_update_mem_heaplet 7 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))
val va_lemma_Compute_iv_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> iv:supported_iv_LE
-> iv_b:buffer128 -> num_bytes:nat64 -> len:nat64 -> j0_b:buffer128 -> iv_extra_b:buffer128 ->
hkeys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint
va_sM (va_update_stack va_sM (va_update_flags va_sM (va_update_mem_heaplet 7 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Compute_iv_stdcall (win:bool) (iv:supported_iv_LE) (iv_b:buffer128) (num_bytes:nat64)
(len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv)) /\ (forall (va_x_mem:vale_heap)
(va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)
(va_x_rsi:nat64) (va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64)
(va_x_r10:nat64) (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32)
(va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32)
(va_x_xmm9:quad32) (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32)
(va_x_xmm13:quad32) (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap7:vale_heap) (va_x_efl:Vale.X64.Flags.t) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 7 va_x_heap7 (va_upd_mem_layout
va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13
(va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9
va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5
va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1
va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14
(va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11
(va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64
rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi
va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64 rRbx va_x_rbx
(va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))))))))))))))))))))))))))))))) in
va_get_ok va_sM /\ (let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) ==> va_k va_sM (())))
val va_wpProof_Compute_iv_stdcall : win:bool -> iv:supported_iv_LE -> iv_b:buffer128 ->
num_bytes:nat64 -> len:nat64 -> j0_b:buffer128 -> iv_extra_b:buffer128 -> hkeys_b:buffer128 ->
va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Compute_iv_stdcall win iv iv_b num_bytes len j0_b
iv_extra_b hkeys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Compute_iv_stdcall win)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 7; va_Mod_mem_layout;
va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10;
va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm
3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64
rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64
rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64
rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Compute_iv_stdcall (win:bool) (iv:supported_iv_LE) (iv_b:buffer128) (num_bytes:nat64)
(len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) (hkeys_b:buffer128) : (va_quickCode unit | false | false | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_quick_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: (va_quickCode unit (va_code_Compute_iv_stdcall win)) | [] | Vale.AES.X64.GCMencryptOpt.va_quick_Compute_iv_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
iv_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
len: Vale.X64.Memory.nat64 ->
j0_b: Vale.X64.Memory.buffer128 ->
iv_extra_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128
-> Vale.X64.QuickCode.va_quickCode Prims.unit
(Vale.AES.X64.GCMencryptOpt.va_code_Compute_iv_stdcall win) | {
"end_col": 13,
"end_line": 1833,
"start_col": 2,
"start_line": 1824
} |
Prims.Tot | val va_ens_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Compute_iv_stdcall va_b0 va_s0 win iv iv_b num_bytes len j0_b iv_extra_b hkeys_b /\
va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in
let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0
else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint
va_sM (va_update_stack va_sM (va_update_flags va_sM (va_update_mem_heaplet 7 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))) | val va_ens_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Compute_iv_stdcall va_b0 va_s0 win iv iv_b num_bytes len j0_b iv_extra_b hkeys_b /\
va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let iv_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let bytes_reg:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let len_reg:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let j0_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let extra_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0)
in
let h_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0)
in
let h_LE:Vale.Def.Types_s.quad32 =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b
2
(va_get_mem va_s0))
in
Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) == Vale.AES.GCM_s.compute_iv_BE h_LE iv /\
Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem va_s0) (va_get_mem va_sM) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) /\
va_state_eq va_sM
(va_update_stackTaint va_sM
(va_update_stack va_sM
(va_update_flags va_sM
(va_update_mem_heaplet 7
va_sM
(va_update_mem_layout va_sM
(va_update_xmm 15
va_sM
(va_update_xmm 14
va_sM
(va_update_xmm 13
va_sM
(va_update_xmm 12
va_sM
(va_update_xmm 11
va_sM
(va_update_xmm 10
va_sM
(va_update_xmm 9
va_sM
(va_update_xmm 8
va_sM
(va_update_xmm 7
va_sM
(va_update_xmm 6
va_sM
(va_update_xmm 5
va_sM
(va_update_xmm 4
va_sM
(va_update_xmm 3
va_sM
(va_update_xmm 2
va_sM
(va_update_xmm 1
va_sM
(va_update_xmm 0
va_sM
(va_update_reg64
rR15
va_sM
(va_update_reg64
rR14
va_sM
(va_update_reg64
rR13
va_sM
(
va_update_reg64
rR12
va_sM
(
va_update_reg64
rR11
va_sM
(
va_update_reg64
rR10
va_sM
(
va_update_reg64
rR9
va_sM
(
va_update_reg64
rR8
va_sM
(
va_update_reg64
rRbp
va_sM
(
va_update_reg64
rRsp
va_sM
(
va_update_reg64
rRsi
va_sM
(
va_update_reg64
rRdi
va_sM
(
va_update_reg64
rRdx
va_sM
(
va_update_reg64
rRcx
va_sM
(
va_update_reg64
rRbx
va_sM
(
va_update_reg64
rRax
va_sM
(
va_update_ok
va_sM
(
va_update_mem
va_sM
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))))
))))))))))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.AES.X64.GCMencryptOpt.va_req_Compute_iv_stdcall",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.buffer128_read",
"Vale.X64.Decls.va_get_mem",
"Vale.AES.GCM_s.compute_iv_BE",
"Vale.X64.Decls.modifies_buffer128",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Prims.l_imp",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_get_xmm",
"Prims.l_not",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Stack_i.load_stack64",
"Prims.op_Addition",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_stackTaint",
"Vale.X64.Decls.va_update_stack",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_xmm",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gcm_blocks_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
auth_b:buffer128 -> auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 ->
iv:supported_iv_LE -> hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 ->
out128x6_b:buffer128 -> len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 ->
len128_num:nat64 -> inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 ->
tag_b:buffer128 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let
(auth_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx
va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(xip:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR9 va_s0)
in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let
(in128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in
let (len128x6:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in
let (in128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in
let (out128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in
let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in
let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in
let (tag_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
auth_len == auth_num /\ auth_num_bytes == auth_bytes /\ len128x6 == len128x6_num /\ len128 ==
len128_num /\ plain_num_bytes == plain_num /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) auth_ptr auth_b auth_len (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) iv_ptr iv_b 1
(va_get_mem_layout va_s0) Public /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128x6_ptr out128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) inout_ptr inout_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) scratch_ptr scratch_b 9
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem
va_s0) tag_ptr tag_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128
tag_b ([keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b;
auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap1:vale_heap) (va_x_heap2:vale_heap) (va_x_heap3:vale_heap) (va_x_heap4:vale_heap)
(va_x_heap5:vale_heap) (va_x_heap6:vale_heap) (va_x_efl:Vale.X64.Flags.t)
(va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_stack va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 6
va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 4 va_x_heap4
(va_upd_mem_heaplet 3 va_x_heap3 (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_mem_layout va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14
va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11
(va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7
va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3
va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64
rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12
va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9
(va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx
va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (auth_len:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let (out128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let (len128x6:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in let (len128:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in let (scratch_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let (tag_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (())))
val va_wpProof_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> auth_b:buffer128 ->
auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->
hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->
len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 -> len128_num:nat64 ->
inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 -> tag_b:buffer128 -> key:(seq nat32)
-> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num
keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num
inout_b plain_num scratch_b tag_b key va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_stdcall win alg)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;
va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;
va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm
4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64
rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64
rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem])
va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) : (va_quickCode unit
(va_code_Gcm_blocks_stdcall win alg)) =
(va_QProc (va_code_Gcm_blocks_stdcall win alg) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3;
va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14;
va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_stdcall win alg auth_b
auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b
out128_b len128_num inout_b plain_num scratch_b tag_b key) (va_wpProof_Gcm_blocks_stdcall win
alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b
len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b key))
//--
//-- Compute_iv_stdcall
val va_code_Compute_iv_stdcall : win:bool -> Tot va_code
val va_codegen_success_Compute_iv_stdcall : win:bool -> Tot va_pbool
let va_req_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv)))
let va_ens_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.AES.X64.GCMencryptOpt.va_ens_Compute_iv_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
iv_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
len: Vale.X64.Memory.nat64 ->
j0_b: Vale.X64.Memory.buffer128 ->
iv_extra_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 55,
"end_line": 1626,
"start_col": 2,
"start_line": 1583
} |
Prims.Tot | val va_req_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: prop | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv))) | val va_req_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: prop
let va_req_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: prop = | false | null | false | (va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let iv_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let bytes_reg:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let len_reg:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let j0_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let extra_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0)
in
let h_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0)
in
let h_LE:Vale.Def.Types_s.quad32 =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b
2
(va_get_mem va_s0))
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\ bytes_reg == num_bytes /\ len_reg == len /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
iv_ptr
iv_b
len
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr
iv_extra_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
j0_ptr
j0_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
h_ptr
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\
Vale.X64.Decls.buffers_disjoint128 iv_extra_b hkeys_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\
(Vale.X64.Decls.buffers_disjoint128 j0_b iv_extra_b \/ j0_b == iv_extra_b) /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_b == len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\
iv_ptr + 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\
(va_mul_nat len (128 `op_Division` 8) <= num_bytes /\
num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled) /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
(let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b)
in
let iv_bytes_LE:supported_iv_LE =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes iv_raw_quads)
0
num_bytes
in
iv_bytes_LE == iv))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.AES.X64.GCMencryptOpt.va_code_Compute_iv_stdcall",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.X64.Stack_i.valid_stack_slot64",
"Prims.op_Addition",
"Vale.Arch.HeapTypes_s.Public",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.int",
"Prims.l_or",
"Prims.op_LessThanOrEqual",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.nat",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Decls.va_mul_nat",
"Prims.op_Division",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Vale.AES.OptPublic.hkeys_reqs_pub",
"Vale.X64.Decls.s128",
"Vale.Def.Types_s.reverse_bytes_quad32",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.append",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.buffer128_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Stack_i.load_stack64",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Prims.prop"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gcm_blocks_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
auth_b:buffer128 -> auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 ->
iv:supported_iv_LE -> hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 ->
out128x6_b:buffer128 -> len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 ->
len128_num:nat64 -> inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 ->
tag_b:buffer128 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let
(auth_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx
va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(xip:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR9 va_s0)
in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let
(in128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in
let (len128x6:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in
let (in128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in
let (out128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in
let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in
let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in
let (tag_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
auth_len == auth_num /\ auth_num_bytes == auth_bytes /\ len128x6 == len128x6_num /\ len128 ==
len128_num /\ plain_num_bytes == plain_num /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) auth_ptr auth_b auth_len (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) iv_ptr iv_b 1
(va_get_mem_layout va_s0) Public /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128x6_ptr out128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) inout_ptr inout_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) scratch_ptr scratch_b 9
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem
va_s0) tag_ptr tag_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128
tag_b ([keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b;
auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap1:vale_heap) (va_x_heap2:vale_heap) (va_x_heap3:vale_heap) (va_x_heap4:vale_heap)
(va_x_heap5:vale_heap) (va_x_heap6:vale_heap) (va_x_efl:Vale.X64.Flags.t)
(va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_stack va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 6
va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 4 va_x_heap4
(va_upd_mem_heaplet 3 va_x_heap3 (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_mem_layout va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14
va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11
(va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7
va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3
va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64
rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12
va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9
(va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx
va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (auth_len:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let (out128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let (len128x6:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in let (len128:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in let (scratch_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let (tag_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (())))
val va_wpProof_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> auth_b:buffer128 ->
auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->
hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->
len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 -> len128_num:nat64 ->
inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 -> tag_b:buffer128 -> key:(seq nat32)
-> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num
keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num
inout_b plain_num scratch_b tag_b key va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_stdcall win alg)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;
va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;
va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm
4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64
rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64
rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem])
va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) : (va_quickCode unit
(va_code_Gcm_blocks_stdcall win alg)) =
(va_QProc (va_code_Gcm_blocks_stdcall win alg) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3;
va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14;
va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_stdcall win alg auth_b
auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b
out128_b len128_num inout_b plain_num scratch_b tag_b key) (va_wpProof_Gcm_blocks_stdcall win
alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b
len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b key))
//--
//-- Compute_iv_stdcall
val va_code_Compute_iv_stdcall : win:bool -> Tot va_code
val va_codegen_success_Compute_iv_stdcall : win:bool -> Tot va_pbool
let va_req_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Compute_iv_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
: prop | [] | Vale.AES.X64.GCMencryptOpt.va_req_Compute_iv_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
iv_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
len: Vale.X64.Memory.nat64 ->
j0_b: Vale.X64.Memory.buffer128 ->
iv_extra_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128
-> Prims.prop | {
"end_col": 53,
"end_line": 1579,
"start_col": 2,
"start_line": 1543
} |
Prims.Tot | val va_ens_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))) | val va_ens_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop
let va_ens_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop = | false | null | false | (va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let auth_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let auth_num_bytes:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let auth_len:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let iv_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0)
in
let xip:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0)
in
let abytes_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0))
in
let in128x6_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0))
in
let out128x6_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0))
in
let len128x6:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0))
in
let in128_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0))
in
let out128_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0))
in
let len128:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0))
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0))
in
let plain_num_bytes:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0))
in
let scratch_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0))
in
let tag_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0))
in
Vale.X64.Decls.modifies_mem (Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b))))))
(va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\
(let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in
let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b)
in
let auth_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads)
0
auth_num_bytes
in
let plain_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in128x6_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b))
(Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in
let plain_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads)
0
plain_num_bytes
in
let cipher_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) out128_b))
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b)
in
let cipher_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads)
0
plain_num_bytes
in
l_and (l_and (l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes ==
__proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
(Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)
iv
plain_bytes
auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b
0
(va_get_mem va_sM)) ==
__proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
(Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)
iv
plain_bytes
auth_bytes)) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM
(va_update_stackTaint va_sM
(va_update_stack va_sM
(va_update_flags va_sM
(va_update_mem_heaplet 6
va_sM
(va_update_mem_heaplet 5
va_sM
(va_update_mem_heaplet 4
va_sM
(va_update_mem_heaplet 3
va_sM
(va_update_mem_heaplet 2
va_sM
(va_update_mem_heaplet 1
va_sM
(va_update_mem_layout va_sM
(va_update_xmm 15
va_sM
(va_update_xmm 14
va_sM
(va_update_xmm 13
va_sM
(va_update_xmm 12
va_sM
(va_update_xmm 11
va_sM
(va_update_xmm 10
va_sM
(va_update_xmm 9
va_sM
(va_update_xmm 8
va_sM
(va_update_xmm 7
va_sM
(va_update_xmm 6
va_sM
(va_update_xmm 5
va_sM
(va_update_xmm
4
va_sM
(va_update_xmm
3
va_sM
(va_update_xmm
2
va_sM
(
va_update_xmm
1
va_sM
(
va_update_xmm
0
va_sM
(
va_update_reg64
rR15
va_sM
(
va_update_reg64
rR14
va_sM
(
va_update_reg64
rR13
va_sM
(
va_update_reg64
rR12
va_sM
(
va_update_reg64
rR11
va_sM
(
va_update_reg64
rR10
va_sM
(
va_update_reg64
rR9
va_sM
(
va_update_reg64
rR8
va_sM
(
va_update_reg64
rRbp
va_sM
(
va_update_reg64
rRsp
va_sM
(
va_update_reg64
rRsi
va_sM
(
va_update_reg64
rRdi
va_sM
(
va_update_reg64
rRdx
va_sM
(
va_update_reg64
rRcx
va_sM
(
va_update_reg64
rRbx
va_sM
(
va_update_reg64
rRax
va_sM
(
va_update_ok
va_sM
(
va_update_mem
va_sM
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))))
))))))))))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.AES.GCM_s.supported_iv_LE",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.va_fuel",
"Prims.l_and",
"Vale.AES.X64.GCMencryptOpt.va_req_Gcm_blocks_stdcall",
"Vale.X64.Decls.va_ensure_total",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.Decls.modifies_mem",
"Vale.X64.Decls.loc_union",
"Vale.X64.Decls.loc_buffer",
"Vale.X64.Memory.vuint128",
"Vale.X64.Decls.va_get_mem",
"Prims.op_LessThan",
"Vale.X64.Machine_s.pow2_32",
"FStar.Seq.Base.length",
"Vale.Def.Types_s.nat8",
"Vale.AES.AES_common_s.is_aes_key",
"Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE",
"Prims.eq2",
"FStar.Pervasives.Native.__proj__Mktuple2__item___1",
"Vale.AES.GCM_s.gcm_encrypt_LE",
"Vale.Def.Types_s.le_quad32_to_bytes",
"Vale.X64.Decls.buffer128_read",
"FStar.Pervasives.Native.__proj__Mktuple2__item___2",
"Vale.Def.Types_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Prims.l_imp",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rRdi",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.va_get_xmm",
"Prims.l_not",
"Vale.Def.Words_s.nat8",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.append",
"Vale.X64.Decls.s128",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Stack_i.load_stack64",
"Prims.op_Addition",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Decls.va_state_eq",
"Vale.X64.Decls.va_update_stackTaint",
"Vale.X64.Decls.va_update_stack",
"Vale.X64.Decls.va_update_flags",
"Vale.X64.Decls.va_update_mem_heaplet",
"Vale.X64.Decls.va_update_mem_layout",
"Vale.X64.Decls.va_update_xmm",
"Vale.X64.Decls.va_update_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_update_ok",
"Vale.X64.Decls.va_update_mem",
"Prims.prop"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_ens_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_sM: va_state)
(va_fM: va_fuel)
: prop | [] | Vale.AES.X64.GCMencryptOpt.va_ens_Gcm_blocks_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
auth_b: Vale.X64.Memory.buffer128 ->
auth_bytes: Vale.X64.Memory.nat64 ->
auth_num: Vale.X64.Memory.nat64 ->
keys_b: Vale.X64.Memory.buffer128 ->
iv_b: Vale.X64.Memory.buffer128 ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
hkeys_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
in128x6_b: Vale.X64.Memory.buffer128 ->
out128x6_b: Vale.X64.Memory.buffer128 ->
len128x6_num: Vale.X64.Memory.nat64 ->
in128_b: Vale.X64.Memory.buffer128 ->
out128_b: Vale.X64.Memory.buffer128 ->
len128_num: Vale.X64.Memory.nat64 ->
inout_b: Vale.X64.Memory.buffer128 ->
plain_num: Vale.X64.Memory.nat64 ->
scratch_b: Vale.X64.Memory.buffer128 ->
tag_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
va_sM: Vale.X64.Decls.va_state ->
va_fM: Vale.X64.Decls.va_fuel
-> Prims.prop | {
"end_col": 60,
"end_line": 979,
"start_col": 2,
"start_line": 876
} |
Prims.Tot | val va_wp_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Compute_iv_stdcall (win:bool) (iv:supported_iv_LE) (iv_b:buffer128) (num_bytes:nat64)
(len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state)
(va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv)) /\ (forall (va_x_mem:vale_heap)
(va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64)
(va_x_rsi:nat64) (va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64)
(va_x_r10:nat64) (va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32)
(va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32)
(va_x_xmm9:quad32) (va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32)
(va_x_xmm13:quad32) (va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap7:vale_heap) (va_x_efl:Vale.X64.Flags.t) (va_x_stack:vale_stack)
(va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint va_x_stackTaint (va_upd_stack
va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 7 va_x_heap7 (va_upd_mem_layout
va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13
(va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9
va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5
va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1
va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14
(va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12 (va_upd_reg64 rR11 va_x_r11
(va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9 (va_upd_reg64 rR8 va_x_r8 (va_upd_reg64
rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi
va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64 rRbx va_x_rbx
(va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem va_s0))))))))))))))))))))))))))))))))))))) in
va_get_ok va_sM /\ (let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) ==> va_k va_sM (()))) | val va_wp_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let iv_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let bytes_reg:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let len_reg:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let j0_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let extra_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR8 va_s0)
in
let h_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR9 va_s0)
in
let h_LE:Vale.Def.Types_s.quad32 =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b
2
(va_get_mem va_s0))
in
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\ bytes_reg == num_bytes /\ len_reg == len /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
iv_ptr
iv_b
len
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr
iv_extra_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
j0_ptr
j0_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
h_ptr
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\
Vale.X64.Decls.buffers_disjoint128 iv_extra_b hkeys_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\
(Vale.X64.Decls.buffers_disjoint128 j0_b iv_extra_b \/ j0_b == iv_extra_b) /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_b == len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\
iv_ptr + 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\
(va_mul_nat len (128 `op_Division` 8) <= num_bytes /\
num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled) /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
(let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b)
in
let iv_bytes_LE:supported_iv_LE =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes iv_raw_quads)
0
num_bytes
in
iv_bytes_LE == iv)) /\
(forall (va_x_mem: vale_heap) (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rcx: nat64)
(va_x_rdx: nat64) (va_x_rdi: nat64) (va_x_rsi: nat64) (va_x_rsp: nat64) (va_x_rbp: nat64)
(va_x_r8: nat64) (va_x_r9: nat64) (va_x_r10: nat64) (va_x_r11: nat64) (va_x_r12: nat64)
(va_x_r13: nat64) (va_x_r14: nat64) (va_x_r15: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)
(va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)
(va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32)
(va_x_xmm10: quad32) (va_x_xmm11: quad32) (va_x_xmm12: quad32) (va_x_xmm13: quad32)
(va_x_xmm14: quad32) (va_x_xmm15: quad32) (va_x_memLayout: vale_heap_layout)
(va_x_heap7: vale_heap) (va_x_efl: Vale.X64.Flags.t) (va_x_stack: vale_stack)
(va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_stack va_x_stack
(va_upd_flags va_x_efl
(va_upd_mem_heaplet 7
va_x_heap7
(va_upd_mem_layout va_x_memLayout
(va_upd_xmm 15
va_x_xmm15
(va_upd_xmm 14
va_x_xmm14
(va_upd_xmm 13
va_x_xmm13
(va_upd_xmm 12
va_x_xmm12
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm 4
va_x_xmm4
(va_upd_xmm 3
va_x_xmm3
(va_upd_xmm 2
va_x_xmm2
(va_upd_xmm 1
va_x_xmm1
(va_upd_xmm 0
va_x_xmm0
(va_upd_reg64
rR15
va_x_r15
(va_upd_reg64
rR14
va_x_r14
(va_upd_reg64
rR13
va_x_r13
(
va_upd_reg64
rR12
va_x_r12
(
va_upd_reg64
rR11
va_x_r11
(
va_upd_reg64
rR10
va_x_r10
(
va_upd_reg64
rR9
va_x_r9
(
va_upd_reg64
rR8
va_x_r8
(
va_upd_reg64
rRbp
va_x_rbp
(
va_upd_reg64
rRsp
va_x_rsp
(
va_upd_reg64
rRsi
va_x_rsi
(
va_upd_reg64
rRdi
va_x_rdi
(
va_upd_reg64
rRdx
va_x_rdx
(
va_upd_reg64
rRcx
va_x_rcx
(
va_upd_reg64
rRbx
va_x_rbx
(
va_upd_reg64
rRax
va_x_rax
(
va_upd_mem
va_x_mem
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))
)))))))))))
in
va_get_ok va_sM /\
(let iv_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let bytes_reg:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let len_reg:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let j0_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let extra_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR8 va_s0)
in
let h_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR9 va_s0)
in
let h_LE:Vale.Def.Types_s.quad32 =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b
2
(va_get_mem va_s0))
in
Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\
Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem va_s0) (va_get_mem va_sM) /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.AES.GCM_s.supported_iv_LE",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Vale.X64.Stack_i.valid_stack_slot64",
"Prims.op_Addition",
"Vale.Arch.HeapTypes_s.Public",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.int",
"Prims.l_or",
"Prims.op_LessThanOrEqual",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.nat",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Decls.va_mul_nat",
"Prims.op_Division",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Vale.AES.OptPublic.hkeys_reqs_pub",
"Vale.X64.Decls.s128",
"Vale.Def.Types_s.reverse_bytes_quad32",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.nat8",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.seq",
"Vale.Def.Types_s.quad32",
"FStar.Seq.Base.append",
"Vale.X64.Decls.quad32",
"Vale.X64.Decls.buffer128_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Decls.va_if",
"Vale.X64.Stack_i.load_stack64",
"Prims.l_not",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.Flags.t",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Memory.memtaint",
"Vale.AES.GCM_s.compute_iv_BE",
"Vale.X64.Decls.modifies_buffer128",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.va_get_xmm",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gcm_blocks_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
auth_b:buffer128 -> auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 ->
iv:supported_iv_LE -> hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 ->
out128x6_b:buffer128 -> len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 ->
len128_num:nat64 -> inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 ->
tag_b:buffer128 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let
(auth_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx
va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(xip:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR9 va_s0)
in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let
(in128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in
let (len128x6:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in
let (in128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in
let (out128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in
let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in
let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in
let (tag_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
auth_len == auth_num /\ auth_num_bytes == auth_bytes /\ len128x6 == len128x6_num /\ len128 ==
len128_num /\ plain_num_bytes == plain_num /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) auth_ptr auth_b auth_len (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) iv_ptr iv_b 1
(va_get_mem_layout va_s0) Public /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128x6_ptr out128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) inout_ptr inout_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) scratch_ptr scratch_b 9
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem
va_s0) tag_ptr tag_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128
tag_b ([keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b;
auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap1:vale_heap) (va_x_heap2:vale_heap) (va_x_heap3:vale_heap) (va_x_heap4:vale_heap)
(va_x_heap5:vale_heap) (va_x_heap6:vale_heap) (va_x_efl:Vale.X64.Flags.t)
(va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_stack va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 6
va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 4 va_x_heap4
(va_upd_mem_heaplet 3 va_x_heap3 (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_mem_layout va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14
va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11
(va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7
va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3
va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64
rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12
va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9
(va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx
va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (auth_len:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let (out128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let (len128x6:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in let (len128:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in let (scratch_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let (tag_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (())))
val va_wpProof_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> auth_b:buffer128 ->
auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 -> iv:supported_iv_LE ->
hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 -> out128x6_b:buffer128 ->
len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 -> len128_num:nat64 ->
inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 -> tag_b:buffer128 -> key:(seq nat32)
-> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_stdcall win alg auth_b auth_bytes auth_num
keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num
inout_b plain_num scratch_b tag_b key va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_stdcall win alg)
([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags; va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5;
va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3; va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1;
va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm
4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64
rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64
rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp; va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64
rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx; va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem])
va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) : (va_quickCode unit
(va_code_Gcm_blocks_stdcall win alg)) =
(va_QProc (va_code_Gcm_blocks_stdcall win alg) ([va_Mod_stackTaint; va_Mod_stack; va_Mod_flags;
va_Mod_mem_heaplet 6; va_Mod_mem_heaplet 5; va_Mod_mem_heaplet 4; va_Mod_mem_heaplet 3;
va_Mod_mem_heaplet 2; va_Mod_mem_heaplet 1; va_Mod_mem_layout; va_Mod_xmm 15; va_Mod_xmm 14;
va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12;
va_Mod_reg64 rR11; va_Mod_reg64 rR10; va_Mod_reg64 rR9; va_Mod_reg64 rR8; va_Mod_reg64 rRbp;
va_Mod_reg64 rRsp; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRdx; va_Mod_reg64 rRcx;
va_Mod_reg64 rRbx; va_Mod_reg64 rRax; va_Mod_mem]) (va_wp_Gcm_blocks_stdcall win alg auth_b
auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b len128x6_num in128_b
out128_b len128_num inout_b plain_num scratch_b tag_b key) (va_wpProof_Gcm_blocks_stdcall win
alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b abytes_b in128x6_b out128x6_b
len128x6_num in128_b out128_b len128_num inout_b plain_num scratch_b tag_b key))
//--
//-- Compute_iv_stdcall
val va_code_Compute_iv_stdcall : win:bool -> Tot va_code
val va_codegen_success_Compute_iv_stdcall : win:bool -> Tot va_pbool
let va_req_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) : prop =
(va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\ (let
(iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv)))
let va_ens_Compute_iv_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (iv:supported_iv_LE)
(iv_b:buffer128) (num_bytes:nat64) (len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128)
(hkeys_b:buffer128) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Compute_iv_stdcall va_b0 va_s0 win iv iv_b num_bytes len j0_b iv_extra_b hkeys_b /\
va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0) in
let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRdx va_s0
else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in let (j0_ptr:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in
let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint
va_sM (va_update_stack va_sM (va_update_flags va_sM (va_update_mem_heaplet 7 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))
val va_lemma_Compute_iv_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> iv:supported_iv_LE
-> iv_b:buffer128 -> num_bytes:nat64 -> len:nat64 -> j0_b:buffer128 -> iv_extra_b:buffer128 ->
hkeys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Compute_iv_stdcall win) va_s0 /\ va_get_ok va_s0 /\
(let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64
rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\ bytes_reg ==
num_bytes /\ len_reg == len /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) iv_ptr iv_b
len (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
extra_ptr iv_extra_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) j0_ptr j0_b 1 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) h_ptr hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.buffers_disjoint128 iv_b iv_extra_b /\
Vale.X64.Decls.buffers_disjoint128 iv_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128
iv_extra_b hkeys_b /\ Vale.X64.Decls.buffers_disjoint128 j0_b iv_b /\
Vale.X64.Decls.buffers_disjoint128 j0_b hkeys_b /\ (Vale.X64.Decls.buffers_disjoint128 j0_b
iv_extra_b \/ j0_b == iv_extra_b) /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
iv_b == len /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 iv_extra_b == 1 /\ iv_ptr
+ 16 `op_Multiply` len < pow2_64 /\ h_ptr + 32 < pow2_64 /\ (va_mul_nat len (128 `op_Division`
8) <= num_bytes /\ num_bytes < va_mul_nat len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(0 < 8 `op_Multiply` num_bytes /\ 8 `op_Multiply` num_bytes < pow2_64) /\ (pclmulqdq_enabled /\
avx_enabled /\ sse_enabled) /\ Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128
(va_get_mem va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\ (let iv_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) iv_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) iv_extra_b) in let (iv_bytes_LE:supported_iv_LE) =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes
iv_raw_quads) 0 num_bytes in iv_bytes_LE == iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (bytes_reg:(va_int_range 0 18446744073709551615)) = (if win
then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (len_reg:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (j0_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (extra_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0) in let (h_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0) in let (h_LE:Vale.Def.Types_s.quad32) =
Vale.Def.Types_s.reverse_bytes_quad32 (Vale.X64.Decls.buffer128_read hkeys_b 2 (va_get_mem
va_s0)) in Vale.X64.Decls.buffer128_read j0_b 0 (va_get_mem va_sM) ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv /\ Vale.X64.Decls.modifies_buffer128 j0_b (va_get_mem
va_s0) (va_get_mem va_sM) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==>
va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\ (win ==> va_get_reg64 rRbp va_sM ==
va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\ (win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (win ==> va_get_reg64
rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==> va_get_xmm 8 va_sM == va_get_xmm
8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\ (win ==> va_get_xmm 10 va_sM
== va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\ (win ==>
va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==> va_get_xmm 13 va_sM == va_get_xmm 13
va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\ (win ==> va_get_xmm 15 va_sM
== va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (~win ==> va_get_reg64 rR12
va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13
va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\ (~win ==>
va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0)) /\ va_state_eq va_sM (va_update_stackTaint
va_sM (va_update_stack va_sM (va_update_flags va_sM (va_update_mem_heaplet 7 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Compute_iv_stdcall (win:bool) (iv:supported_iv_LE) (iv_b:buffer128) (num_bytes:nat64)
(len:nat64) (j0_b:buffer128) (iv_extra_b:buffer128) (hkeys_b:buffer128) (va_s0:va_state) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Compute_iv_stdcall
(win: bool)
(iv: supported_iv_LE)
(iv_b: buffer128)
(num_bytes len: nat64)
(j0_b iv_extra_b hkeys_b: buffer128)
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Compute_iv_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
iv_b: Vale.X64.Memory.buffer128 ->
num_bytes: Vale.X64.Memory.nat64 ->
len: Vale.X64.Memory.nat64 ->
j0_b: Vale.X64.Memory.buffer128 ->
iv_extra_b: Vale.X64.Memory.buffer128 ->
hkeys_b: Vale.X64.Memory.buffer128 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 78,
"end_line": 1803,
"start_col": 2,
"start_line": 1716
} |
Prims.Tot | val va_req_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: prop | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))) | val va_req_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: prop
let va_req_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: prop = | false | null | false | (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\
(let auth_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRcx va_s0 else va_get_reg64 rRdi va_s0)
in
let auth_num_bytes:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0)
in
let auth_len:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
(if win then va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0)
in
let iv_ptr:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)
else va_get_reg64 rR8 va_s0)
in
let xip:(va_int_range 0 18446744073709551615) =
(if win
then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)
else va_get_reg64 rR9 va_s0)
in
let abytes_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0))
in
let in128x6_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0))
in
let out128x6_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0))
in
let len128x6:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0))
in
let in128_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0))
in
let out128_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0))
in
let len128:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0))
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0))
in
let plain_num_bytes:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0))
in
let scratch_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0))
in
let tag_ptr:(va_int_range 0 18446744073709551615) =
(if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0))
in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes == auth_bytes /\
len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
auth_ptr
auth_b
auth_len
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr
abytes_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
iv_ptr
iv_b
1
(va_get_mem_layout va_s0)
Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr
in128x6_b
len128x6
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr
out128x6_b
len128x6
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr
in128_b
len128
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128_ptr
out128_b
len128
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr
inout_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
scratch_ptr
scratch_b
9
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
xip
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
tag_ptr
tag_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.buffer_disjoints128 tag_b
([
keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b
]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b
([
keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b;
hkeys_b
]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b
([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 inout_b
([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b
([keys_b; auth_b; abytes_b; hkeys_b; in128_b; inout_b]) /\
Vale.X64.Decls.buffer_disjoints128 in128x6_b
([keys_b; auth_b; abytes_b; hkeys_b; in128_b; inout_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\
auth_ptr + 16 `op_Multiply` auth_len < pow2_64 /\
in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\
inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b == len128x6 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == len128 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\ plain_num_bytes < pow2_32 /\
auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64 /\
len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 < pow2_32 /\
(va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) <=
plain_num_bytes /\
plain_num_bytes <
va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) +
128
`op_Division`
8) /\
(va_mul_nat auth_len (128 `op_Division` 8) <= auth_num_bytes /\
auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
keys_ptr
keys_b
(Vale.AES.AES_common_s.nr alg + 1)
(va_get_mem_layout va_s0)
Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg
key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\
(let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg
key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in
let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in
iv_BE == Vale.AES.GCM_s.compute_iv_BE h_LE iv))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Vale.X64.Decls.va_code",
"Vale.X64.Decls.va_state",
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.AES.GCM_s.supported_iv_LE",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Prims.l_and",
"Vale.X64.Decls.va_require_total",
"Vale.AES.X64.GCMencryptOpt.va_code_Gcm_blocks_stdcall",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Vale.X64.CPU_Features_s.movbe_enabled",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Prims.l_not",
"Vale.X64.Stack_i.valid_stack_slot64",
"Prims.op_Addition",
"Vale.Arch.HeapTypes_s.Public",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Prims.op_LessThanOrEqual",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.X64.Decls.buffer_disjoints128",
"Prims.Cons",
"Prims.Nil",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.nat",
"Vale.X64.Machine_s.pow2_32",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Modulus",
"Prims.op_GreaterThan",
"Vale.X64.Decls.va_mul_nat",
"Prims.op_Division",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.AES.AES_common_s.nr",
"Vale.AES.OptPublic.hkeys_reqs_pub",
"Vale.X64.Decls.s128",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.AES.AES_s.aes_encrypt_LE",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.compute_iv_BE",
"Vale.X64.Decls.buffer128_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Stack_i.load_stack64",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Prims.prop"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128) | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_req_Gcm_blocks_stdcall
(va_b0: va_code)
(va_s0: va_state)
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
: prop | [] | Vale.AES.X64.GCMencryptOpt.va_req_Gcm_blocks_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
va_b0: Vale.X64.Decls.va_code ->
va_s0: Vale.X64.Decls.va_state ->
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
auth_b: Vale.X64.Memory.buffer128 ->
auth_bytes: Vale.X64.Memory.nat64 ->
auth_num: Vale.X64.Memory.nat64 ->
keys_b: Vale.X64.Memory.buffer128 ->
iv_b: Vale.X64.Memory.buffer128 ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
hkeys_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
in128x6_b: Vale.X64.Memory.buffer128 ->
out128x6_b: Vale.X64.Memory.buffer128 ->
len128x6_num: Vale.X64.Memory.nat64 ->
in128_b: Vale.X64.Memory.buffer128 ->
out128_b: Vale.X64.Memory.buffer128 ->
len128_num: Vale.X64.Memory.nat64 ->
inout_b: Vale.X64.Memory.buffer128 ->
plain_num: Vale.X64.Memory.nat64 ->
scratch_b: Vale.X64.Memory.buffer128 ->
tag_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32
-> Prims.prop | {
"end_col": 43,
"end_line": 869,
"start_col": 2,
"start_line": 732
} |
Prims.Tot | val va_wp_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [
{
"abbrev": false,
"full_module": "Vale.Lib.Basic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.OptPublic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Lib.Meta",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt2",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESGCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AESopt",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Math.Poly2.Bits_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.CPU_Features_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GF128_Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCodes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.QuickCode",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsAes",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsStack",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsVector",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsMem",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.InsBasic",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Decls",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Stack_i",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Memory",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.X64.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_helpers",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Poly1305.Math",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GF128_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64.AES",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GHash_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCM",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.GCTR_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.AES_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.Types",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Types_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words.Seq_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Words_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Seq",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Opaque_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.AES.X64",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state ->
unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0) in let
(auth_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx
va_s0) (fun _ -> va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64
rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR8 va_s0) in let
(xip:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0)) (fun _ -> va_get_reg64 rR9 va_s0)
in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let
(in128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in
let (len128x6:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in
let (in128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in
let (out128_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in
let (inout_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in
let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in
let (tag_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _
-> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
auth_len == auth_num /\ auth_num_bytes == auth_bytes /\ len128x6 == len128x6_num /\ len128 ==
len128_num /\ plain_num_bytes == plain_num /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) auth_ptr auth_b auth_len (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) abytes_ptr abytes_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) iv_ptr iv_b 1
(va_get_mem_layout va_s0) Public /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr in128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128x6_ptr out128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr in128_b len128 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) out128_ptr out128_b len128 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) inout_ptr inout_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) scratch_ptr scratch_b 9
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem
va_s0) tag_ptr tag_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128
tag_b ([keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b;
auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\ (forall (va_x_mem:vale_heap) (va_x_rax:nat64)
(va_x_rbx:nat64) (va_x_rcx:nat64) (va_x_rdx:nat64) (va_x_rdi:nat64) (va_x_rsi:nat64)
(va_x_rsp:nat64) (va_x_rbp:nat64) (va_x_r8:nat64) (va_x_r9:nat64) (va_x_r10:nat64)
(va_x_r11:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64) (va_x_r15:nat64)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_memLayout:vale_heap_layout)
(va_x_heap1:vale_heap) (va_x_heap2:vale_heap) (va_x_heap3:vale_heap) (va_x_heap4:vale_heap)
(va_x_heap5:vale_heap) (va_x_heap6:vale_heap) (va_x_efl:Vale.X64.Flags.t)
(va_x_stack:vale_stack) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_stack va_x_stack (va_upd_flags va_x_efl (va_upd_mem_heaplet 6
va_x_heap6 (va_upd_mem_heaplet 5 va_x_heap5 (va_upd_mem_heaplet 4 va_x_heap4
(va_upd_mem_heaplet 3 va_x_heap3 (va_upd_mem_heaplet 2 va_x_heap2 (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_mem_layout va_x_memLayout (va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14
va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12 va_x_xmm12 (va_upd_xmm 11 va_x_xmm11
(va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7
va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3
va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64
rR15 va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12
va_x_r12 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR9 va_x_r9
(va_upd_reg64 rR8 va_x_r8 (va_upd_reg64 rRbp va_x_rbp (va_upd_reg64 rRsp va_x_rsp (va_upd_reg64
rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRdx va_x_rdx (va_upd_reg64 rRcx
va_x_rcx (va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax (va_upd_mem va_x_mem
va_s0)))))))))))))))))))))))))))))))))))))))))) in va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRcx va_s0)
(fun _ -> va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64
rRsi va_s0) in let (auth_len:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64
rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = va_if win (fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0)) (fun
_ -> va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = va_if win
(fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack
va_s0)) (fun _ -> va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 16) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 24) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let (out128x6_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 32) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let (len128x6:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 40) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 48) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 56) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in let (len128:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 64) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 72) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 80) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in let (scratch_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 88) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let (tag_ptr:(va_int_range 0
18446744073709551615)) = va_if win (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40 + 96) (va_get_stack va_s0)) (fun _ -> Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (()))) | val va_wp_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0
let va_wp_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 = | false | null | false | (va_get_ok va_s0 /\
(let auth_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let auth_num_bytes:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let auth_len:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let iv_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR8 va_s0)
in
let xip:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR9 va_s0)
in
let abytes_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0))
in
let in128x6_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0))
in
let out128x6_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0))
in
let len128x6:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0))
in
let in128_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0))
in
let out128_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0))
in
let len128:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0))
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack va_s0))
in
let plain_num_bytes:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0))
in
let scratch_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0))
in
let tag_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0))
in
aesni_enabled /\ pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ movbe_enabled /\
va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Memory.is_initial_heap (va_get_mem_layout va_s0) (va_get_mem va_s0) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 16)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 32)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 48)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 64)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(~win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 80)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 8)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 24)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 40)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 56)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 72)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 88)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\
(win ==>
Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96)
(va_get_stack va_s0)
Public
(va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes == auth_bytes /\
len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
auth_ptr
auth_b
auth_len
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr
abytes_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
iv_ptr
iv_b
1
(va_get_mem_layout va_s0)
Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128x6_ptr
in128x6_b
len128x6
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr
out128x6_b
len128x6
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
in128_ptr
in128_b
len128
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128_ptr
out128_b
len128
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr
inout_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
scratch_ptr
scratch_b
9
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
xip
hkeys_b
8
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
tag_ptr
tag_b
1
(va_get_mem_layout va_s0)
Secret /\
Vale.X64.Decls.buffer_disjoints128 tag_b
([
keys_b; auth_b; abytes_b; iv_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
scratch_b; hkeys_b
]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b
([
keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b;
hkeys_b
]) /\
Vale.X64.Decls.buffer_disjoints128 scratch_b
([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 inout_b
([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b; abytes_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b
([keys_b; auth_b; abytes_b; hkeys_b; in128_b; inout_b]) /\
Vale.X64.Decls.buffer_disjoints128 in128x6_b
([keys_b; auth_b; abytes_b; hkeys_b; in128_b; inout_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\
auth_ptr + 16 `op_Multiply` auth_len < pow2_64 /\
in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
in128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\
inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b == len128x6 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == len128 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\ plain_num_bytes < pow2_32 /\
auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64 /\
len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 < pow2_32 /\
(va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) <=
plain_num_bytes /\
plain_num_bytes <
va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8) +
128
`op_Division`
8) /\
(va_mul_nat auth_len (128 `op_Division` 8) <= auth_num_bytes /\
auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128 `op_Division` 8) /\
(alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
keys_ptr
keys_b
(Vale.AES.AES_common_s.nr alg + 1)
(va_get_mem_layout va_s0)
Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg
key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\
(let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg
key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in
let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in
iv_BE == Vale.AES.GCM_s.compute_iv_BE h_LE iv)) /\
(forall (va_x_mem: vale_heap) (va_x_rax: nat64) (va_x_rbx: nat64) (va_x_rcx: nat64)
(va_x_rdx: nat64) (va_x_rdi: nat64) (va_x_rsi: nat64) (va_x_rsp: nat64) (va_x_rbp: nat64)
(va_x_r8: nat64) (va_x_r9: nat64) (va_x_r10: nat64) (va_x_r11: nat64) (va_x_r12: nat64)
(va_x_r13: nat64) (va_x_r14: nat64) (va_x_r15: nat64) (va_x_xmm0: quad32) (va_x_xmm1: quad32)
(va_x_xmm2: quad32) (va_x_xmm3: quad32) (va_x_xmm4: quad32) (va_x_xmm5: quad32)
(va_x_xmm6: quad32) (va_x_xmm7: quad32) (va_x_xmm8: quad32) (va_x_xmm9: quad32)
(va_x_xmm10: quad32) (va_x_xmm11: quad32) (va_x_xmm12: quad32) (va_x_xmm13: quad32)
(va_x_xmm14: quad32) (va_x_xmm15: quad32) (va_x_memLayout: vale_heap_layout)
(va_x_heap1: vale_heap) (va_x_heap2: vale_heap) (va_x_heap3: vale_heap)
(va_x_heap4: vale_heap) (va_x_heap5: vale_heap) (va_x_heap6: vale_heap)
(va_x_efl: Vale.X64.Flags.t) (va_x_stack: vale_stack) (va_x_stackTaint: memtaint).
let va_sM =
va_upd_stackTaint va_x_stackTaint
(va_upd_stack va_x_stack
(va_upd_flags va_x_efl
(va_upd_mem_heaplet 6
va_x_heap6
(va_upd_mem_heaplet 5
va_x_heap5
(va_upd_mem_heaplet 4
va_x_heap4
(va_upd_mem_heaplet 3
va_x_heap3
(va_upd_mem_heaplet 2
va_x_heap2
(va_upd_mem_heaplet 1
va_x_heap1
(va_upd_mem_layout va_x_memLayout
(va_upd_xmm 15
va_x_xmm15
(va_upd_xmm 14
va_x_xmm14
(va_upd_xmm 13
va_x_xmm13
(va_upd_xmm 12
va_x_xmm12
(va_upd_xmm 11
va_x_xmm11
(va_upd_xmm 10
va_x_xmm10
(va_upd_xmm 9
va_x_xmm9
(va_upd_xmm 8
va_x_xmm8
(va_upd_xmm 7
va_x_xmm7
(va_upd_xmm 6
va_x_xmm6
(va_upd_xmm 5
va_x_xmm5
(va_upd_xmm
4
va_x_xmm4
(va_upd_xmm
3
va_x_xmm3
(va_upd_xmm
2
va_x_xmm2
(
va_upd_xmm
1
va_x_xmm1
(
va_upd_xmm
0
va_x_xmm0
(
va_upd_reg64
rR15
va_x_r15
(
va_upd_reg64
rR14
va_x_r14
(
va_upd_reg64
rR13
va_x_r13
(
va_upd_reg64
rR12
va_x_r12
(
va_upd_reg64
rR11
va_x_r11
(
va_upd_reg64
rR10
va_x_r10
(
va_upd_reg64
rR9
va_x_r9
(
va_upd_reg64
rR8
va_x_r8
(
va_upd_reg64
rRbp
va_x_rbp
(
va_upd_reg64
rRsp
va_x_rsp
(
va_upd_reg64
rRsi
va_x_rsi
(
va_upd_reg64
rRdi
va_x_rdi
(
va_upd_reg64
rRdx
va_x_rdx
(
va_upd_reg64
rRcx
va_x_rcx
(
va_upd_reg64
rRbx
va_x_rbx
(
va_upd_reg64
rRax
va_x_rax
(
va_upd_mem
va_x_mem
va_s0
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
)
))
))))))))))
)))))))))))
in
va_get_ok va_sM /\
(let auth_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRcx va_s0) (fun _ -> va_get_reg64 rRdi va_s0)
in
let auth_num_bytes:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rRdx va_s0) (fun _ -> va_get_reg64 rRsi va_s0)
in
let auth_len:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR8 va_s0) (fun _ -> va_get_reg64 rRdx va_s0)
in
let keys_ptr:(va_int_range 0 18446744073709551615) =
va_if win (fun _ -> va_get_reg64 rR9 va_s0) (fun _ -> va_get_reg64 rRcx va_s0)
in
let iv_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR8 va_s0)
in
let xip:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8)
(va_get_stack va_s0))
(fun _ -> va_get_reg64 rR9 va_s0)
in
let abytes_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)
)
in
let in128x6_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)
)
in
let out128x6_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16)
(va_get_stack va_s0))
in
let len128x6:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 40)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 24)
(va_get_stack va_s0))
in
let in128_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 48)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 32)
(va_get_stack va_s0))
in
let out128_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40)
(va_get_stack va_s0))
in
let len128:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 64)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 48)
(va_get_stack va_s0))
in
let inout_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 72)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 56)
(va_get_stack va_s0))
in
let plain_num_bytes:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64)
(va_get_stack va_s0))
in
let scratch_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72)
(va_get_stack va_s0))
in
let tag_ptr:(va_int_range 0 18446744073709551615) =
va_if win
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 96)
(va_get_stack va_s0))
(fun _ ->
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 80)
(va_get_stack va_s0))
in
Vale.X64.Decls.modifies_mem (Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128
out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b))))))
(va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\
(let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in
let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b)
in
let auth_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads)
0
auth_num_bytes
in
let plain_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_s0) in128x6_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b))
(Vale.X64.Decls.s128 (va_get_mem va_s0) inout_b)
in
let plain_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads)
0
plain_num_bytes
in
let cipher_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b)
(Vale.X64.Decls.s128 (va_get_mem va_sM) out128_b))
(Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b)
in
let cipher_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads)
0
plain_num_bytes
in
l_and (l_and (l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes <
pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes ==
__proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
(Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)
iv
plain_bytes
auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b
0
(va_get_mem va_sM)) ==
__proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
(Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)
iv
plain_bytes
auth_bytes)) /\ va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 /\
(win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(win ==> va_get_reg64 rRdi va_sM == va_get_reg64 rRdi va_s0) /\
(win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi va_s0) /\
(win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\
(win ==> va_get_xmm 6 va_sM == va_get_xmm 6 va_s0) /\
(win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\
(win ==> va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\
(win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0) /\
(win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\
(win ==> va_get_xmm 11 va_sM == va_get_xmm 11 va_s0) /\
(win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\
(win ==> va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\
(win ==> va_get_xmm 14 va_sM == va_get_xmm 14 va_s0) /\
(win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\
(~win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx va_s0) /\
(~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\
(~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\
(~win ==> va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\
(~win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14 va_s0) /\
(~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) ==>
va_k va_sM (()))) | {
"checked_file": "Vale.AES.X64.GCMencryptOpt.fsti.checked",
"dependencies": [
"Vale.X64.State.fsti.checked",
"Vale.X64.Stack_i.fsti.checked",
"Vale.X64.Stack.fsti.checked",
"Vale.X64.QuickCodes.fsti.checked",
"Vale.X64.QuickCode.fst.checked",
"Vale.X64.Memory.fsti.checked",
"Vale.X64.Machine_s.fst.checked",
"Vale.X64.InsVector.fsti.checked",
"Vale.X64.InsStack.fsti.checked",
"Vale.X64.InsMem.fsti.checked",
"Vale.X64.InsBasic.fsti.checked",
"Vale.X64.InsAes.fsti.checked",
"Vale.X64.Flags.fsti.checked",
"Vale.X64.Decls.fsti.checked",
"Vale.X64.CPU_Features_s.fst.checked",
"Vale.Poly1305.Math.fsti.checked",
"Vale.Math.Poly2.Bits_s.fsti.checked",
"Vale.Lib.Meta.fsti.checked",
"Vale.Def.Words_s.fsti.checked",
"Vale.Def.Words.Seq_s.fsti.checked",
"Vale.Def.Types_s.fst.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Def.Opaque_s.fsti.checked",
"Vale.Arch.Types.fsti.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.AES.X64.GHash.fsti.checked",
"Vale.AES.X64.GF128_Mul.fsti.checked",
"Vale.AES.X64.GCTR.fsti.checked",
"Vale.AES.X64.AESopt2.fsti.checked",
"Vale.AES.X64.AESopt.fsti.checked",
"Vale.AES.X64.AESGCM.fsti.checked",
"Vale.AES.X64.AES.fsti.checked",
"Vale.AES.OptPublic.fsti.checked",
"Vale.AES.GHash_s.fst.checked",
"Vale.AES.GHash.fsti.checked",
"Vale.AES.GF128_s.fsti.checked",
"Vale.AES.GF128.fsti.checked",
"Vale.AES.GCTR_s.fst.checked",
"Vale.AES.GCTR.fsti.checked",
"Vale.AES.GCM_s.fst.checked",
"Vale.AES.GCM_helpers.fsti.checked",
"Vale.AES.GCM.fsti.checked",
"Vale.AES.AES_s.fst.checked",
"Vale.AES.AES_common_s.fst.checked",
"prims.fst.checked",
"FStar.Seq.Base.fsti.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked"
],
"interface_file": false,
"source_file": "Vale.AES.X64.GCMencryptOpt.fsti"
} | [
"total"
] | [
"Prims.bool",
"Vale.AES.AES_common_s.algorithm",
"Vale.X64.Memory.buffer128",
"Vale.X64.Memory.nat64",
"Vale.AES.GCM_s.supported_iv_LE",
"FStar.Seq.Base.seq",
"Vale.X64.Memory.nat32",
"Vale.X64.Decls.va_state",
"Prims.unit",
"Prims.l_and",
"Prims.b2t",
"Vale.X64.Decls.va_get_ok",
"Vale.X64.CPU_Features_s.aesni_enabled",
"Vale.X64.CPU_Features_s.pclmulqdq_enabled",
"Vale.X64.CPU_Features_s.avx_enabled",
"Vale.X64.CPU_Features_s.sse_enabled",
"Vale.X64.CPU_Features_s.movbe_enabled",
"Prims.eq2",
"Vale.Def.Words_s.nat64",
"Vale.X64.Decls.va_get_reg64",
"Vale.X64.Machine_s.rRsp",
"Vale.X64.Stack_i.init_rsp",
"Vale.X64.Decls.va_get_stack",
"Vale.X64.Memory.is_initial_heap",
"Vale.X64.Decls.va_get_mem_layout",
"Vale.X64.Decls.va_get_mem",
"Prims.l_imp",
"Prims.l_not",
"Vale.X64.Stack_i.valid_stack_slot64",
"Prims.op_Addition",
"Vale.Arch.HeapTypes_s.Public",
"Vale.X64.Decls.va_get_stackTaint",
"Prims.int",
"Prims.l_or",
"Prims.op_GreaterThanOrEqual",
"Prims.op_LessThan",
"Vale.Def.Words_s.pow2_64",
"Prims.op_LessThanOrEqual",
"Vale.X64.Decls.validSrcAddrs128",
"Vale.Arch.HeapTypes_s.Secret",
"Vale.X64.Decls.validDstAddrs128",
"Vale.X64.Decls.buffer_disjoints128",
"Prims.Cons",
"Prims.Nil",
"Vale.X64.Decls.buffers_disjoint128",
"Prims.op_Multiply",
"Vale.X64.Machine_s.pow2_64",
"Vale.X64.Decls.buffer_length",
"Vale.X64.Memory.vuint128",
"Prims.nat",
"Vale.X64.Machine_s.pow2_32",
"Vale.X64.Memory.buffer_addr",
"Prims.op_Modulus",
"Prims.op_GreaterThan",
"Vale.X64.Decls.va_mul_nat",
"Prims.op_Division",
"Prims.op_Equality",
"Vale.AES.AES_common_s.AES_128",
"Vale.AES.AES_common_s.AES_256",
"Vale.AES.AES_s.is_aes_key_LE",
"Vale.Def.Types_s.quad32",
"Vale.X64.Decls.buffer128_as_seq",
"Vale.AES.AES_s.key_to_round_keys_LE",
"Vale.AES.AES_common_s.nr",
"Vale.AES.OptPublic.hkeys_reqs_pub",
"Vale.X64.Decls.s128",
"Vale.Def.Types_s.reverse_bytes_quad32",
"Vale.AES.AES_s.aes_encrypt_LE",
"Vale.Def.Words_s.Mkfour",
"Vale.Def.Types_s.nat32",
"Vale.AES.GCM_s.compute_iv_BE",
"Vale.X64.Decls.buffer128_read",
"Vale.X64.Decls.va_int_range",
"Vale.X64.Decls.va_if",
"Vale.X64.Stack_i.load_stack64",
"Vale.X64.Machine_s.rR9",
"Vale.X64.Machine_s.rR8",
"Vale.Def.Types_s.nat64",
"Vale.X64.Machine_s.rRcx",
"Vale.X64.Machine_s.rRdx",
"Vale.X64.Machine_s.rRsi",
"Vale.X64.Machine_s.rRdi",
"Prims.l_Forall",
"Vale.X64.InsBasic.vale_heap",
"Vale.X64.Decls.quad32",
"Vale.Arch.HeapImpl.vale_heap_layout",
"Vale.X64.Flags.t",
"Vale.X64.InsBasic.vale_stack",
"Vale.X64.Memory.memtaint",
"Vale.X64.Decls.modifies_mem",
"Vale.X64.Decls.loc_union",
"Vale.X64.Decls.loc_buffer",
"FStar.Seq.Base.length",
"Vale.Def.Types_s.nat8",
"Vale.AES.AES_common_s.is_aes_key",
"Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE",
"FStar.Pervasives.Native.__proj__Mktuple2__item___1",
"Vale.AES.GCM_s.gcm_encrypt_LE",
"Vale.Def.Types_s.le_quad32_to_bytes",
"FStar.Pervasives.Native.__proj__Mktuple2__item___2",
"Vale.X64.Machine_s.rRbx",
"Vale.X64.Machine_s.rRbp",
"Vale.X64.Machine_s.rR12",
"Vale.X64.Machine_s.rR13",
"Vale.X64.Machine_s.rR14",
"Vale.X64.Machine_s.rR15",
"Vale.X64.Decls.va_get_xmm",
"Vale.Def.Words_s.nat8",
"FStar.Seq.Base.slice",
"Vale.Def.Types_s.le_seq_quad32_to_bytes",
"FStar.Seq.Base.append",
"Vale.X64.State.vale_state",
"Vale.X64.Decls.va_upd_stackTaint",
"Vale.X64.Decls.va_upd_stack",
"Vale.X64.Decls.va_upd_flags",
"Vale.X64.Decls.va_upd_mem_heaplet",
"Vale.X64.Decls.va_upd_mem_layout",
"Vale.X64.Decls.va_upd_xmm",
"Vale.X64.Decls.va_upd_reg64",
"Vale.X64.Machine_s.rR11",
"Vale.X64.Machine_s.rR10",
"Vale.X64.Machine_s.rRax",
"Vale.X64.Decls.va_upd_mem"
] | [] | module Vale.AES.X64.GCMencryptOpt
open Vale.Def.Prop_s
open Vale.Def.Opaque_s
open FStar.Seq
open Vale.Def.Words_s
open Vale.Def.Words.Seq_s
open Vale.Def.Types_s
open Vale.Arch.Types
open Vale.Arch.HeapImpl
open Vale.AES.AES_s
open Vale.AES.GCTR_s
open Vale.AES.GCTR
open Vale.AES.GCM
open Vale.AES.GHash_s
open Vale.AES.GHash
open Vale.AES.GCM_s
open Vale.AES.X64.AES
open Vale.AES.GF128_s
open Vale.AES.GF128
open Vale.Poly1305.Math
open Vale.AES.GCM_helpers
open Vale.AES.X64.GHash
open Vale.AES.X64.GCTR
open Vale.X64.Machine_s
open Vale.X64.Memory
open Vale.X64.Stack_i
open Vale.X64.State
open Vale.X64.Decls
open Vale.X64.InsBasic
open Vale.X64.InsMem
open Vale.X64.InsVector
open Vale.X64.InsStack
open Vale.X64.InsAes
open Vale.X64.QuickCode
open Vale.X64.QuickCodes
open Vale.AES.X64.GF128_Mul
open Vale.X64.Stack
open Vale.X64.CPU_Features_s
open Vale.Math.Poly2.Bits_s
open Vale.AES.X64.AESopt
open Vale.AES.X64.AESGCM
open Vale.AES.X64.AESopt2
open Vale.Lib.Meta
open Vale.AES.OptPublic
let aes_reqs
(alg:algorithm) (key:seq nat32) (round_keys:seq quad32) (keys_b:buffer128)
(key_ptr:int) (heap0:vale_heap) (layout:vale_heap_layout) : prop0
=
aesni_enabled /\ avx_enabled /\
(alg = AES_128 \/ alg = AES_256) /\
is_aes_key_LE alg key /\
length(round_keys) == nr(alg) + 1 /\
round_keys == key_to_round_keys_LE alg key /\
validSrcAddrs128 heap0 key_ptr keys_b (nr alg + 1) layout Secret /\
s128 heap0 keys_b == round_keys
//-- Gctr_register
val va_code_Gctr_register : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_register : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_register : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> key:(seq nat32) ->
round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_register alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159
134810123 67438087 66051 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8 va_s0)
(va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) /\ va_state_eq va_sM (va_update_reg64 rR12 va_sM
(va_update_flags va_sM (va_update_xmm 8 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM
(va_update_xmm 0 va_sM (va_update_ok va_sM va_s0)))))))))
[@ va_qattr]
let va_wp_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ va_get_xmm 9 va_s0 == Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ aes_reqs alg key round_keys
keys_b (va_get_reg64 rR8 va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0)) /\
(forall (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm8:quad32)
(va_x_efl:Vale.X64.Flags.t) (va_x_r12:nat64) . let va_sM = va_upd_reg64 rR12 va_x_r12
(va_upd_flags va_x_efl (va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1
(va_upd_xmm 0 va_x_xmm0 va_s0))))) in va_get_ok va_sM /\
(Vale.Def.Types_s.le_seq_quad32_to_bytes (FStar.Seq.Base.create #quad32 1 (va_get_xmm 8 va_sM))
== Vale.AES.GCTR_s.gctr_encrypt_LE (va_get_xmm 0 va_s0) (Vale.Def.Types_s.le_quad32_to_bytes
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_s0))) alg key /\ va_get_xmm 8 va_sM ==
Vale.AES.GCTR_s.gctr_encrypt_block (va_get_xmm 0 va_s0) (Vale.Def.Types_s.reverse_bytes_quad32
(va_get_xmm 8 va_s0)) alg key 0) ==> va_k va_sM (())))
val va_wpProof_Gctr_register : alg:algorithm -> key:(seq nat32) -> round_keys:(seq quad32) ->
keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_register alg key round_keys keys_b va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_register alg) ([va_Mod_reg64
rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0]) va_s0 va_k
((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_register (alg:algorithm) (key:(seq nat32)) (round_keys:(seq quad32))
(keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_register alg)) =
(va_QProc (va_code_Gctr_register alg) ([va_Mod_reg64 rR12; va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0]) (va_wp_Gctr_register alg key round_keys keys_b)
(va_wpProof_Gctr_register alg key round_keys keys_b))
//--
//-- Gctr_blocks128
val va_code_Gctr_blocks128 : alg:algorithm -> Tot va_code
val va_codegen_success_Gctr_blocks128 : alg:algorithm -> Tot va_pbool
val va_lemma_Gctr_blocks128 : va_b0:va_code -> va_s0:va_state -> alg:algorithm -> in_b:buffer128 ->
out_b:buffer128 -> key:(seq nat32) -> round_keys:(seq quad32) -> keys_b:buffer128
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gctr_blocks128 alg) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b == out_b) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax va_s0) in_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b (va_get_reg64 rRdx va_s0)
(va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16 `op_Multiply` va_get_reg64
rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 <
pow2_64 /\ l_and (Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b) (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx va_s0 ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0) (va_get_mem_heaplet 1
va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b) key
(va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM == Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0)
(va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx va_sM == 0 ==> Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) out_b))
/\ va_state_eq va_sM (va_update_flags va_sM (va_update_mem_heaplet 1 va_sM (va_update_xmm 10
va_sM (va_update_xmm 11 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4
va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0
va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM (va_update_reg64 rRbx va_sM
(va_update_ok va_sM (va_update_mem va_sM va_s0))))))))))))))))))
[@ va_qattr]
let va_wp_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) (va_s0:va_state) (va_k:(va_state -> unit -> Type0))
: Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ (Vale.X64.Decls.buffers_disjoint128 in_b out_b \/ in_b ==
out_b) /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRax
va_s0) in_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validDstAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi va_s0) out_b
(va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\ va_get_reg64 rRax va_s0 + 16
`op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ va_get_reg64 rRdi va_s0 + 16 `op_Multiply`
va_get_reg64 rRdx va_s0 < pow2_64 /\ l_and (Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 in_b == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out_b)
(Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b < pow2_32) /\ va_get_reg64 rRdx
va_s0 == Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in_b /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_reg64 rRdx va_s0 < pow2_32 /\ aes_reqs alg key round_keys keys_b (va_get_reg64 rR8
va_s0) (va_get_mem_heaplet 0 va_s0) (va_get_mem_layout va_s0) /\ pclmulqdq_enabled) /\ (forall
(va_x_mem:vale_heap) (va_x_rbx:nat64) (va_x_r11:nat64) (va_x_r10:nat64) (va_x_xmm0:quad32)
(va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32)
(va_x_xmm6:quad32) (va_x_xmm11:quad32) (va_x_xmm10:quad32) (va_x_heap1:vale_heap)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_mem_heaplet 1
va_x_heap1 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 6 va_x_xmm6
(va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2
(va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0 (va_upd_reg64 rR10 va_x_r10 (va_upd_reg64 rR11
va_x_r11 (va_upd_reg64 rRbx va_x_rbx (va_upd_mem va_x_mem va_s0)))))))))))))) in va_get_ok
va_sM /\ (Vale.X64.Decls.modifies_buffer128 out_b (va_get_mem_heaplet 1 va_s0)
(va_get_mem_heaplet 1 va_sM) /\ Vale.AES.GCTR.gctr_partial alg (va_get_reg64 rRdx va_sM)
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_s0) in_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_sM) out_b) key (va_get_xmm 11 va_s0) /\ va_get_xmm 11 va_sM ==
Vale.AES.GCTR.inc32lite (va_get_xmm 11 va_s0) (va_get_reg64 rRdx va_s0) /\ (va_get_reg64 rRdx
va_sM == 0 ==> Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) out_b == Vale.X64.Decls.s128
(va_get_mem_heaplet 1 va_s0) out_b)) ==> va_k va_sM (())))
val va_wpProof_Gctr_blocks128 : alg:algorithm -> in_b:buffer128 -> out_b:buffer128 -> key:(seq
nat32) -> round_keys:(seq quad32) -> keys_b:buffer128 -> va_s0:va_state -> va_k:(va_state -> unit
-> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b va_s0
va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gctr_blocks128 alg) ([va_Mod_flags;
va_Mod_mem_heaplet 1; va_Mod_xmm 10; va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4;
va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRbx; va_Mod_mem]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gctr_blocks128 (alg:algorithm) (in_b:buffer128) (out_b:buffer128) (key:(seq nat32))
(round_keys:(seq quad32)) (keys_b:buffer128) : (va_quickCode unit (va_code_Gctr_blocks128 alg)) =
(va_QProc (va_code_Gctr_blocks128 alg) ([va_Mod_flags; va_Mod_mem_heaplet 1; va_Mod_xmm 10;
va_Mod_xmm 11; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm
1; va_Mod_xmm 0; va_Mod_reg64 rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRbx; va_Mod_mem])
(va_wp_Gctr_blocks128 alg in_b out_b key round_keys keys_b) (va_wpProof_Gctr_blocks128 alg in_b
out_b key round_keys keys_b))
//--
//-- Gcm_make_length_quad
val va_code_Gcm_make_length_quad : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_make_length_quad : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_make_length_quad : va_b0:va_code -> va_s0:va_state
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_make_length_quad ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply`
va_get_reg64 rR11 va_s0 < pow2_64)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (8
`op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 <
pow2_64 /\ va_get_xmm 0 va_sM == Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11
va_s0) 1) (8 `op_Multiply` va_get_reg64 rR13 va_s0) 0) /\ va_state_eq va_sM (va_update_flags
va_sM (va_update_reg64 rRax va_sM (va_update_xmm 0 va_sM (va_update_ok va_sM va_s0))))))
[@ va_qattr]
let va_wp_Gcm_make_length_quad (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ 8 `op_Multiply` va_get_reg64 rR13 va_s0 < pow2_64 /\ 8
`op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64) /\ (forall (va_x_xmm0:quad32) (va_x_rax:nat64)
(va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_reg64 rRax va_x_rax
(va_upd_xmm 0 va_x_xmm0 va_s0)) in va_get_ok va_sM /\ (8 `op_Multiply` va_get_reg64 rR13 va_s0
< pow2_64 /\ 8 `op_Multiply` va_get_reg64 rR11 va_s0 < pow2_64 /\ va_get_xmm 0 va_sM ==
Vale.Def.Types_s.insert_nat64 (Vale.Def.Types_s.insert_nat64 (Vale.Def.Words_s.Mkfour
#Vale.Def.Types_s.nat32 0 0 0 0) (8 `op_Multiply` va_get_reg64 rR11 va_s0) 1) (8 `op_Multiply`
va_get_reg64 rR13 va_s0) 0) ==> va_k va_sM (())))
val va_wpProof_Gcm_make_length_quad : va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Gcm_make_length_quad va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_make_length_quad ())
([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_make_length_quad () : (va_quickCode unit (va_code_Gcm_make_length_quad ())) =
(va_QProc (va_code_Gcm_make_length_quad ()) ([va_Mod_flags; va_Mod_reg64 rRax; va_Mod_xmm 0])
va_wp_Gcm_make_length_quad va_wpProof_Gcm_make_length_quad)
//--
//-- Ghash_extra_bytes
val va_code_Ghash_extra_bytes : va_dummy:unit -> Tot va_code
val va_codegen_success_Ghash_extra_bytes : va_dummy:unit -> Tot va_pbool
val va_lemma_Ghash_extra_bytes : va_b0:va_code -> va_s0:va_state -> hkeys_b:buffer128 ->
total_bytes:nat -> old_hash:quad32 -> h_LE:quad32 -> completed_quads:(seq quad32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Ghash_extra_bytes ()) va_s0 /\ va_get_ok va_s0 /\
(pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let raw_quads = FStar.Seq.Base.append #quad32 completed_quads (FStar.Seq.Base.create #quad32 1
(va_get_xmm 0 va_s0)) in let input_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads) 0 total_bytes in let padded_bytes =
Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let input_quads =
Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) /\ va_state_eq va_sM (va_update_flags va_sM (va_update_xmm 8 va_sM
(va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5 va_sM (va_update_xmm 4 va_sM
(va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1 va_sM (va_update_xmm 0 va_sM
(va_update_reg64 rR11 va_sM (va_update_reg64 rRcx va_sM (va_update_ok va_sM va_s0)))))))))))))))
[@ va_qattr]
let va_wp_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32) (h_LE:quad32)
(completed_quads:(seq quad32)) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (pclmulqdq_enabled /\ avx_enabled /\ sse_enabled /\ va_get_xmm 9 va_s0 ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\
va_get_xmm 8 va_s0 == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0
h_LE old_hash completed_quads) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128
(va_get_mem_heaplet 0 va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE) /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0 va_s0) (va_get_reg64 rR9 va_s0 - 32)
hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ FStar.Seq.Base.length #quad32 completed_quads ==
total_bytes `op_Division` 16 /\ total_bytes < 16 `op_Multiply` FStar.Seq.Base.length #quad32
completed_quads + 16 /\ va_get_reg64 rR10 va_s0 == total_bytes `op_Modulus` 16 /\ total_bytes
`op_Modulus` 16 =!= 0 /\ (0 < total_bytes /\ total_bytes < 16 `op_Multiply`
Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes) /\ 16 `op_Multiply`
(Vale.AES.GCM_helpers.bytes_to_quad_size total_bytes - 1) < total_bytes) /\ (forall
(va_x_rcx:nat64) (va_x_r11:nat64) (va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32)
(va_x_xmm3:quad32) (va_x_xmm4:quad32) (va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32)
(va_x_xmm8:quad32) (va_x_efl:Vale.X64.Flags.t) . let va_sM = va_upd_flags va_x_efl (va_upd_xmm
8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm
4 va_x_xmm4 (va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm
0 va_x_xmm0 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRcx va_x_rcx va_s0))))))))))) in
va_get_ok va_sM /\ (let raw_quads = FStar.Seq.Base.append #quad32 completed_quads
(FStar.Seq.Base.create #quad32 1 (va_get_xmm 0 va_s0)) in let input_bytes =
FStar.Seq.Base.slice #Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_quads)
0 total_bytes in let padded_bytes = Vale.AES.GCTR_s.pad_to_128_bits input_bytes in let
input_quads = Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_bytes in total_bytes > 0 ==> l_and
(FStar.Seq.Base.length #Vale.Def.Types_s.quad32 input_quads > 0)
(Vale.Def.Types_s.reverse_bytes_quad32 (va_get_xmm 8 va_sM) == Vale.AES.GHash.ghash_incremental
h_LE old_hash input_quads)) ==> va_k va_sM (())))
val va_wpProof_Ghash_extra_bytes : hkeys_b:buffer128 -> total_bytes:nat -> old_hash:quad32 ->
h_LE:quad32 -> completed_quads:(seq quad32) -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Ghash_extra_bytes ()) ([va_Mod_flags;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) va_s0 va_k ((va_sM,
va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Ghash_extra_bytes (hkeys_b:buffer128) (total_bytes:nat) (old_hash:quad32)
(h_LE:quad32) (completed_quads:(seq quad32)) : (va_quickCode unit (va_code_Ghash_extra_bytes ()))
=
(va_QProc (va_code_Ghash_extra_bytes ()) ([va_Mod_flags; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm
6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_reg64 rR11; va_Mod_reg64 rRcx]) (va_wp_Ghash_extra_bytes hkeys_b total_bytes old_hash
h_LE completed_quads) (va_wpProof_Ghash_extra_bytes hkeys_b total_bytes old_hash h_LE
completed_quads))
//--
//-- Gcm_blocks_auth
val va_code_Gcm_blocks_auth : va_dummy:unit -> Tot va_code
val va_codegen_success_Gcm_blocks_auth : va_dummy:unit -> Tot va_pbool
val va_lemma_Gcm_blocks_auth : va_b0:va_code -> va_s0:va_state -> auth_b:buffer128 ->
abytes_b:buffer128 -> hkeys_b:buffer128 -> h_LE:quad32
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_require_total va_b0 (va_code_Gcm_blocks_auth ()) va_s0 /\ va_get_ok va_s0 /\
(sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0) (va_get_reg64 rRdi
va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE))))
(ensures (fun (va_sM, va_fM, auth_quad_seq) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = (if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) then FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b) else Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM)
auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64 rRsi va_s0) in let
(padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits auth_input_bytes in
auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes /\ va_get_xmm 8
va_sM == Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.GHash.ghash_incremental0 h_LE
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0) auth_quad_seq))) /\ va_state_eq va_sM
(va_update_xmm 9 va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM
(va_update_xmm 5 va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM
(va_update_xmm 1 va_sM (va_update_xmm 0 va_sM (va_update_flags va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rRcx va_sM (va_update_reg64 rR10 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rRdx va_sM (va_update_ok va_sM va_s0)))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128) (h_LE:quad32)
(va_s0:va_state) (va_k:(va_state -> (seq quad32) -> Type0)) : Type0 =
(va_get_ok va_s0 /\ (sse_enabled /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 1 va_s0)
(va_get_reg64 rRdi va_s0) auth_b (va_get_reg64 rRdx va_s0) (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 7 va_s0) (va_get_reg64 rRbx va_s0) abytes_b
1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem_heaplet 0
va_s0) (va_get_reg64 rR9 va_s0 - 32) hkeys_b 8 (va_get_mem_layout va_s0) Secret /\ va_get_reg64
rRdi va_s0 + 16 `op_Multiply` va_get_reg64 rRdx va_s0 < pow2_64 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 auth_b == va_get_reg64 rRdx va_s0 /\ Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 abytes_b == 1 /\ (va_mul_nat (va_get_reg64 rRdx va_s0) (128
`op_Division` 8) <= va_get_reg64 rRsi va_s0 /\ va_get_reg64 rRsi va_s0 < va_mul_nat
(va_get_reg64 rRdx va_s0) (128 `op_Division` 8) + 128 `op_Division` 8) /\ (pclmulqdq_enabled /\
avx_enabled) /\ Vale.AES.GHash.hkeys_reqs_priv (Vale.X64.Decls.s128 (va_get_mem_heaplet 0
va_s0) hkeys_b) (Vale.Def.Types_s.reverse_bytes_quad32 h_LE)) /\ (forall (va_x_rdx:nat64)
(va_x_r11:nat64) (va_x_r10:nat64) (va_x_rcx:nat64) (va_x_r15:nat64) (va_x_efl:Vale.X64.Flags.t)
(va_x_xmm0:quad32) (va_x_xmm1:quad32) (va_x_xmm2:quad32) (va_x_xmm3:quad32) (va_x_xmm4:quad32)
(va_x_xmm5:quad32) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(auth_quad_seq:(seq quad32)) . let va_sM = va_upd_xmm 9 va_x_xmm9 (va_upd_xmm 8 va_x_xmm8
(va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_xmm 5 va_x_xmm5 (va_upd_xmm 4 va_x_xmm4
(va_upd_xmm 3 va_x_xmm3 (va_upd_xmm 2 va_x_xmm2 (va_upd_xmm 1 va_x_xmm1 (va_upd_xmm 0 va_x_xmm0
(va_upd_flags va_x_efl (va_upd_reg64 rR15 va_x_r15 (va_upd_reg64 rRcx va_x_rcx (va_upd_reg64
rR10 va_x_r10 (va_upd_reg64 rR11 va_x_r11 (va_upd_reg64 rRdx va_x_rdx va_s0))))))))))))))) in
va_get_ok va_sM /\ (va_get_reg64 rR15 va_sM == va_get_reg64 rRsi va_sM /\ va_get_xmm 9 va_sM ==
Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 202182159 134810123 67438087 66051 /\ (let
(raw_auth_quads:(seq quad32)) = va_if (va_get_reg64 rRsi va_s0 > va_get_reg64 rRdx va_s0
`op_Multiply` 128 `op_Division` 8) (fun _ -> FStar.Seq.Base.append #Vale.X64.Decls.quad32
(Vale.X64.Decls.s128 (va_get_mem_heaplet 1 va_sM) auth_b) (Vale.X64.Decls.s128
(va_get_mem_heaplet 7 va_s0) abytes_b)) (fun _ -> Vale.X64.Decls.s128 (va_get_mem_heaplet 1
va_sM) auth_b) in let (auth_input_bytes:(seq nat8)) = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes raw_auth_quads) 0 (va_get_reg64
rRsi va_s0) in let (padded_auth_bytes:(seq nat8)) = Vale.AES.GCTR_s.pad_to_128_bits
auth_input_bytes in auth_quad_seq == Vale.Def.Types_s.le_bytes_to_seq_quad32 padded_auth_bytes
/\ va_get_xmm 8 va_sM == Vale.Def.Types_s.reverse_bytes_quad32
(Vale.AES.GHash.ghash_incremental0 h_LE (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0
0) auth_quad_seq))) ==> va_k va_sM ((auth_quad_seq))))
val va_wpProof_Gcm_blocks_auth : auth_b:buffer128 -> abytes_b:buffer128 -> hkeys_b:buffer128 ->
h_LE:quad32 -> va_s0:va_state -> va_k:(va_state -> (seq quad32) -> Type0)
-> Ghost (va_state & va_fuel & (seq quad32))
(requires (va_t_require va_s0 /\ va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9;
va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm
2; va_Mod_xmm 1; va_Mod_xmm 0; va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64
rR10; va_Mod_reg64 rR11; va_Mod_reg64 rRdx]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Gcm_blocks_auth (auth_b:buffer128) (abytes_b:buffer128) (hkeys_b:buffer128)
(h_LE:quad32) : (va_quickCode (seq quad32) (va_code_Gcm_blocks_auth ())) =
(va_QProc (va_code_Gcm_blocks_auth ()) ([va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6;
va_Mod_xmm 5; va_Mod_xmm 4; va_Mod_xmm 3; va_Mod_xmm 2; va_Mod_xmm 1; va_Mod_xmm 0;
va_Mod_flags; va_Mod_reg64 rR15; va_Mod_reg64 rRcx; va_Mod_reg64 rR10; va_Mod_reg64 rR11;
va_Mod_reg64 rRdx]) (va_wp_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE)
(va_wpProof_Gcm_blocks_auth auth_b abytes_b hkeys_b h_LE))
//--
//-- Save_registers
val va_code_Save_registers : win:bool -> Tot va_code
val va_codegen_success_Save_registers : win:bool -> Tot va_pbool
val va_lemma_Save_registers : va_b0:va_code -> va_s0:va_state -> win:bool
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Save_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + (if win then (10
`op_Multiply` 2) else 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s
(va_get_reg64 rRsp va_sM) (8 + (if win then (10 `op_Multiply` 2) else 0)) (va_get_stack va_sM)
Secret (va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + (if win then 160 else 0)) (va_get_stack va_sM) == va_get_reg64
rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + (if win then 160
else 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM /\ va_state_eq va_sM
(va_update_stackTaint va_sM (va_update_flags va_sM (va_update_stack va_sM (va_update_reg64 rRsp
va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM va_s0))))))))
[@ va_qattr]
let va_wp_Save_registers (win:bool) (va_s0:va_state) (va_k:(va_state -> unit -> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ va_get_reg64 rRsp va_s0 == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ (forall (va_x_rax:nat64) (va_x_rsp:nat64) (va_x_stack:vale_stack)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_stack va_x_stack (va_upd_reg64 rRsp va_x_rsp
(va_upd_reg64 rRax va_x_rax va_s0)))) in va_get_ok va_sM /\ va_get_reg64 rRsp va_sM ==
va_get_reg64 rRsp va_s0 - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply` 2) (fun _
-> 0)) /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp
(va_get_stack va_s0) /\ Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_sM) (8 +
va_if win (fun _ -> 10 `op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_sM) Secret
(va_get_stackTaint va_sM) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_sM)
(va_get_reg64 rRsp va_s0) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.modifies_stacktaint (va_get_reg64 rRsp va_sM) (va_get_reg64 rRsp va_s0)
(va_get_stackTaint va_s0) (va_get_stackTaint va_sM) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 6
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 6 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 16) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 7
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 24) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 7 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 32) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 8
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 8 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 9
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 9 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 64) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 10
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 72) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 10 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 80) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 11
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 88) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 11 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 96) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 12
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 104) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 12 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 112) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 13
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 120) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 13 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 128) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 14
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 136) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 14 va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 144) (va_get_stack va_sM) == Vale.Arch.Types.hi64 (va_get_xmm 15
va_sM)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 152) (va_get_stack
va_sM) == Vale.Arch.Types.lo64 (va_get_xmm 15 va_sM)) /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 8 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 16 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 24 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rRsi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 32 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR12 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 40 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_sM) == va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_sM + 48 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) ==
va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_sM + 56 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_sM) == va_get_reg64 rR15 va_sM ==> va_k va_sM
(())))
val va_wpProof_Save_registers : win:bool -> va_s0:va_state -> va_k:(va_state -> unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Save_registers win va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Save_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack; va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) va_s0
va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Save_registers (win:bool) : (va_quickCode unit (va_code_Save_registers win)) =
(va_QProc (va_code_Save_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_stack;
va_Mod_reg64 rRsp; va_Mod_reg64 rRax]) (va_wp_Save_registers win) (va_wpProof_Save_registers
win))
//--
//-- Restore_registers
val va_code_Restore_registers : win:bool -> Tot va_code
val va_codegen_success_Restore_registers : win:bool -> Tot va_pbool
val va_lemma_Restore_registers : va_b0:va_code -> va_s0:va_state -> win:bool -> old_rsp:nat ->
old_xmm6:quad32 -> old_xmm7:quad32 -> old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 ->
old_xmm11:quad32 -> old_xmm12:quad32 -> old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Restore_registers win) va_s0 /\ va_get_ok va_s0 /\
sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + (if win then (10
`op_Multiply` 2) else 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\ va_get_reg64
rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + (if win then (10 `op_Multiply` 2) else 0)) /\ (win
==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 8) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 24) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 40) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 56) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 64) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 72) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 80) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 88) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 96) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 104) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 112) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 120) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 128) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 136) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 144) (va_get_stack va_s0) ==
Vale.Arch.Types.hi64 old_xmm15) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 152) (va_get_stack va_s0) == Vale.Arch.Types.lo64 old_xmm15)))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp (va_get_stack va_sM) ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Stack_i.modifies_stack (va_get_reg64
rRsp va_s0) (va_get_reg64 rRsp va_sM) (va_get_stack va_s0) (va_get_stack va_sM) /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRbx va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 8 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 16 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 24 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 40 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR13 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + (if win then 160 else 0))
(va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64
rRsp va_s0 + 56 + (if win then 160 else 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\
(win ==> va_get_xmm 6 va_sM == old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win
==> va_get_xmm 8 va_sM == old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==>
va_get_xmm 10 va_sM == old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==>
va_get_xmm 12 va_sM == old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==>
va_get_xmm 14 va_sM == old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) /\ va_state_eq
va_sM (va_update_stackTaint va_sM (va_update_flags va_sM (va_update_reg64 rRsp va_sM
(va_update_stack va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13 va_sM
(va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9 va_sM
(va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_reg64 rR15
va_sM (va_update_reg64 rR14 va_sM (va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM
(va_update_reg64 rRsi va_sM (va_update_reg64 rRdi va_sM (va_update_reg64 rRbp va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM
va_s0))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) (va_s0:va_state) (va_k:(va_state -> unit
-> Type0)) : Type0 =
(va_get_ok va_s0 /\ sse_enabled /\ old_rsp == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.valid_stack_slot64s (va_get_reg64 rRsp va_s0) (8 + va_if win (fun _ -> 10
`op_Multiply` 2) (fun _ -> 0)) (va_get_stack va_s0) Secret (va_get_stackTaint va_s0) /\
va_get_reg64 rRsp va_s0 == old_rsp - 8 `op_Multiply` (8 + va_if win (fun _ -> 10 `op_Multiply`
2) (fun _ -> 0)) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 0)
(va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm6) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm6) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 16) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm7) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm7) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 32) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm8) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm8) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 48) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm9) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm9) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 64) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm10) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 72) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm10) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 80) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm11) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 88) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm11) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 96) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm12) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 104) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm12) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 112) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm13) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 120) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm13) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 128) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm14) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 136) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm14) /\ (win ==> Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 144) (va_get_stack va_s0) == Vale.Arch.Types.hi64 old_xmm15) /\ (win ==>
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 152) (va_get_stack va_s0) ==
Vale.Arch.Types.lo64 old_xmm15) /\ (forall (va_x_rax:nat64) (va_x_rbx:nat64) (va_x_rbp:nat64)
(va_x_rdi:nat64) (va_x_rsi:nat64) (va_x_r12:nat64) (va_x_r13:nat64) (va_x_r14:nat64)
(va_x_r15:nat64) (va_x_xmm6:quad32) (va_x_xmm7:quad32) (va_x_xmm8:quad32) (va_x_xmm9:quad32)
(va_x_xmm10:quad32) (va_x_xmm11:quad32) (va_x_xmm12:quad32) (va_x_xmm13:quad32)
(va_x_xmm14:quad32) (va_x_xmm15:quad32) (va_x_stack:vale_stack) (va_x_rsp:nat64)
(va_x_efl:Vale.X64.Flags.t) (va_x_stackTaint:memtaint) . let va_sM = va_upd_stackTaint
va_x_stackTaint (va_upd_flags va_x_efl (va_upd_reg64 rRsp va_x_rsp (va_upd_stack va_x_stack
(va_upd_xmm 15 va_x_xmm15 (va_upd_xmm 14 va_x_xmm14 (va_upd_xmm 13 va_x_xmm13 (va_upd_xmm 12
va_x_xmm12 (va_upd_xmm 11 va_x_xmm11 (va_upd_xmm 10 va_x_xmm10 (va_upd_xmm 9 va_x_xmm9
(va_upd_xmm 8 va_x_xmm8 (va_upd_xmm 7 va_x_xmm7 (va_upd_xmm 6 va_x_xmm6 (va_upd_reg64 rR15
va_x_r15 (va_upd_reg64 rR14 va_x_r14 (va_upd_reg64 rR13 va_x_r13 (va_upd_reg64 rR12 va_x_r12
(va_upd_reg64 rRsi va_x_rsi (va_upd_reg64 rRdi va_x_rdi (va_upd_reg64 rRbp va_x_rbp
(va_upd_reg64 rRbx va_x_rbx (va_upd_reg64 rRax va_x_rax va_s0)))))))))))))))))))))) in
va_get_ok va_sM /\ va_get_reg64 rRsp va_sM == old_rsp /\ Vale.X64.Stack_i.init_rsp
(va_get_stack va_sM) == Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\
Vale.X64.Stack_i.modifies_stack (va_get_reg64 rRsp va_s0) (va_get_reg64 rRsp va_sM)
(va_get_stack va_s0) (va_get_stack va_sM) /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp
va_s0 + 0 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbx
va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + va_if win (fun _ -> 160)
(fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRbp va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 16 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rRdi va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 24 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rRsi va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR12 va_sM /\ Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + va_if win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) ==
va_get_reg64 rR13 va_sM /\ Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 48 + va_if
win (fun _ -> 160) (fun _ -> 0)) (va_get_stack va_s0) == va_get_reg64 rR14 va_sM /\
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 56 + va_if win (fun _ -> 160) (fun _
-> 0)) (va_get_stack va_s0) == va_get_reg64 rR15 va_sM /\ (win ==> va_get_xmm 6 va_sM ==
old_xmm6) /\ (win ==> va_get_xmm 7 va_sM == old_xmm7) /\ (win ==> va_get_xmm 8 va_sM ==
old_xmm8) /\ (win ==> va_get_xmm 9 va_sM == old_xmm9) /\ (win ==> va_get_xmm 10 va_sM ==
old_xmm10) /\ (win ==> va_get_xmm 11 va_sM == old_xmm11) /\ (win ==> va_get_xmm 12 va_sM ==
old_xmm12) /\ (win ==> va_get_xmm 13 va_sM == old_xmm13) /\ (win ==> va_get_xmm 14 va_sM ==
old_xmm14) /\ (win ==> va_get_xmm 15 va_sM == old_xmm15) ==> va_k va_sM (())))
val va_wpProof_Restore_registers : win:bool -> old_rsp:nat -> old_xmm6:quad32 -> old_xmm7:quad32 ->
old_xmm8:quad32 -> old_xmm9:quad32 -> old_xmm10:quad32 -> old_xmm11:quad32 -> old_xmm12:quad32 ->
old_xmm13:quad32 -> old_xmm14:quad32 -> old_xmm15:quad32 -> va_s0:va_state -> va_k:(va_state ->
unit -> Type0)
-> Ghost (va_state & va_fuel & unit)
(requires (va_t_require va_s0 /\ va_wp_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8
old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15 va_s0 va_k))
(ensures (fun (va_sM, va_f0, va_g) -> va_t_ensure (va_code_Restore_registers win)
([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp; va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm
14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11; va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8;
va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15; va_Mod_reg64 rR14; va_Mod_reg64 rR13;
va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi; va_Mod_reg64 rRbp; va_Mod_reg64 rRbx;
va_Mod_reg64 rRax]) va_s0 va_k ((va_sM, va_f0, va_g))))
[@ "opaque_to_smt" va_qattr]
let va_quick_Restore_registers (win:bool) (old_rsp:nat) (old_xmm6:quad32) (old_xmm7:quad32)
(old_xmm8:quad32) (old_xmm9:quad32) (old_xmm10:quad32) (old_xmm11:quad32) (old_xmm12:quad32)
(old_xmm13:quad32) (old_xmm14:quad32) (old_xmm15:quad32) : (va_quickCode unit
(va_code_Restore_registers win)) =
(va_QProc (va_code_Restore_registers win) ([va_Mod_stackTaint; va_Mod_flags; va_Mod_reg64 rRsp;
va_Mod_stack; va_Mod_xmm 15; va_Mod_xmm 14; va_Mod_xmm 13; va_Mod_xmm 12; va_Mod_xmm 11;
va_Mod_xmm 10; va_Mod_xmm 9; va_Mod_xmm 8; va_Mod_xmm 7; va_Mod_xmm 6; va_Mod_reg64 rR15;
va_Mod_reg64 rR14; va_Mod_reg64 rR13; va_Mod_reg64 rR12; va_Mod_reg64 rRsi; va_Mod_reg64 rRdi;
va_Mod_reg64 rRbp; va_Mod_reg64 rRbx; va_Mod_reg64 rRax]) (va_wp_Restore_registers win old_rsp
old_xmm6 old_xmm7 old_xmm8 old_xmm9 old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14
old_xmm15) (va_wpProof_Restore_registers win old_rsp old_xmm6 old_xmm7 old_xmm8 old_xmm9
old_xmm10 old_xmm11 old_xmm12 old_xmm13 old_xmm14 old_xmm15))
//--
#reset-options "--z3rlimit 100"
//-- Gcm_blocks_stdcall
val va_code_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_code
val va_codegen_success_Gcm_blocks_stdcall : win:bool -> alg:algorithm -> Tot va_pbool
let va_req_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) : prop =
(va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0 /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv)))
let va_ens_Gcm_blocks_stdcall (va_b0:va_code) (va_s0:va_state) (win:bool) (alg:algorithm)
(auth_b:buffer128) (auth_bytes:nat64) (auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128)
(iv:supported_iv_LE) (hkeys_b:buffer128) (abytes_b:buffer128) (in128x6_b:buffer128)
(out128x6_b:buffer128) (len128x6_num:nat64) (in128_b:buffer128) (out128_b:buffer128)
(len128_num:nat64) (inout_b:buffer128) (plain_num:nat64) (scratch_b:buffer128) (tag_b:buffer128)
(key:(seq nat32)) (va_sM:va_state) (va_fM:va_fuel) : prop =
(va_req_Gcm_blocks_stdcall va_b0 va_s0 win alg auth_b auth_bytes auth_num keys_b iv_b iv hkeys_b
abytes_b in128x6_b out128x6_b len128x6_num in128_b out128_b len128_num inout_b plain_num
scratch_b tag_b key /\ va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\ (let
(auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0 else
va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let (auth_len:(va_int_range 0
18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else va_get_reg64 rRdx va_s0) in
let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR9 va_s0 else
va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 0) (va_get_stack va_s0) else
va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 32 + 8 + 8) (va_get_stack va_s0) else
va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0)) in
let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in let
(out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0)))))))))))))))))))))))))))))))))))))))))))))
val va_lemma_Gcm_blocks_stdcall : va_b0:va_code -> va_s0:va_state -> win:bool -> alg:algorithm ->
auth_b:buffer128 -> auth_bytes:nat64 -> auth_num:nat64 -> keys_b:buffer128 -> iv_b:buffer128 ->
iv:supported_iv_LE -> hkeys_b:buffer128 -> abytes_b:buffer128 -> in128x6_b:buffer128 ->
out128x6_b:buffer128 -> len128x6_num:nat64 -> in128_b:buffer128 -> out128_b:buffer128 ->
len128_num:nat64 -> inout_b:buffer128 -> plain_num:nat64 -> scratch_b:buffer128 ->
tag_b:buffer128 -> key:(seq nat32)
-> Ghost (va_state & va_fuel)
(requires (va_require_total va_b0 (va_code_Gcm_blocks_stdcall win alg) va_s0 /\ va_get_ok va_s0
/\ (let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in aesni_enabled /\ pclmulqdq_enabled
/\ avx_enabled /\ sse_enabled /\ movbe_enabled /\ va_get_reg64 rRsp va_s0 ==
Vale.X64.Stack_i.init_rsp (va_get_stack va_s0) /\ Vale.X64.Memory.is_initial_heap
(va_get_mem_layout va_s0) (va_get_mem va_s0) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 0) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 32) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 56) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(~win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (~win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 0) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 8) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 16) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 48) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 72) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ (win ==> Vale.X64.Stack_i.valid_stack_slot64
(va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) Public (va_get_stackTaint va_s0)) /\
(win ==> Vale.X64.Stack_i.valid_stack_slot64 (va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack
va_s0) Public (va_get_stackTaint va_s0)) /\ auth_len == auth_num /\ auth_num_bytes ==
auth_bytes /\ len128x6 == len128x6_num /\ len128 == len128_num /\ plain_num_bytes == plain_num
/\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) auth_ptr auth_b auth_len
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0)
abytes_ptr abytes_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) iv_ptr iv_b 1 (va_get_mem_layout va_s0) Public /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128x6_ptr in128x6_b len128x6
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
out128x6_ptr out128x6_b len128x6 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) in128_ptr in128_b len128 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) out128_ptr out128_b len128
(va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0)
inout_ptr inout_b 1 (va_get_mem_layout va_s0) Secret /\ Vale.X64.Decls.validDstAddrs128
(va_get_mem va_s0) scratch_ptr scratch_b 9 (va_get_mem_layout va_s0) Secret /\
Vale.X64.Decls.validSrcAddrs128 (va_get_mem va_s0) xip hkeys_b 8 (va_get_mem_layout va_s0)
Secret /\ Vale.X64.Decls.validDstAddrs128 (va_get_mem va_s0) tag_ptr tag_b 1 (va_get_mem_layout
va_s0) Secret /\ Vale.X64.Decls.buffer_disjoints128 tag_b ([keys_b; auth_b; abytes_b; iv_b;
in128x6_b; out128x6_b; in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 iv_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b;
in128_b; out128_b; inout_b; scratch_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128
scratch_b ([keys_b; auth_b; abytes_b; in128x6_b; out128x6_b; in128_b; out128_b; inout_b;
hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 inout_b ([keys_b; auth_b; abytes_b; in128x6_b;
out128x6_b; in128_b; out128_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 auth_b ([keys_b;
abytes_b; hkeys_b]) /\ Vale.X64.Decls.buffer_disjoints128 abytes_b ([keys_b; hkeys_b]) /\
Vale.X64.Decls.buffer_disjoints128 out128x6_b ([keys_b; auth_b; abytes_b; hkeys_b; in128_b;
inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128x6_b ([keys_b; auth_b; abytes_b; hkeys_b;
in128_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 out128_b ([keys_b; auth_b; abytes_b;
hkeys_b; in128x6_b; out128x6_b; inout_b]) /\ Vale.X64.Decls.buffer_disjoints128 in128_b
([keys_b; auth_b; abytes_b; hkeys_b; in128x6_b; out128x6_b; inout_b]) /\
(Vale.X64.Decls.buffers_disjoint128 in128x6_b out128x6_b \/ in128x6_b == out128x6_b) /\
(Vale.X64.Decls.buffers_disjoint128 in128_b out128_b \/ in128_b == out128_b) /\ auth_ptr + 16
`op_Multiply` auth_len < pow2_64 /\ in128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\
out128x6_ptr + 16 `op_Multiply` len128x6 < pow2_64 /\ in128_ptr + 16 `op_Multiply` len128 <
pow2_64 /\ out128_ptr + 16 `op_Multiply` len128 < pow2_64 /\ inout_ptr + 16 < pow2_64 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 auth_b == auth_len /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 abytes_b == 1 /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128x6_b ==
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 out128x6_b /\
Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b == Vale.X64.Decls.buffer_length
#Vale.X64.Memory.vuint128 out128_b /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128
in128x6_b == len128x6 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 in128_b ==
len128 /\ Vale.X64.Decls.buffer_length #Vale.X64.Memory.vuint128 inout_b == 1 /\
plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ xip + 32 < pow2_64 /\
Vale.X64.Memory.buffer_addr #Vale.X64.Memory.vuint128 keys_b (va_get_mem va_s0) + 128 < pow2_64
/\ len128x6 `op_Modulus` 6 == 0 /\ (len128x6 > 0 ==> len128x6 >= 18) /\ 12 + len128x6 + 6 <
pow2_32 /\ (va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat len128 (128 `op_Division` 8)
<= plain_num_bytes /\ plain_num_bytes < va_mul_nat len128x6 (128 `op_Division` 8) + va_mul_nat
len128 (128 `op_Division` 8) + 128 `op_Division` 8) /\ (va_mul_nat auth_len (128 `op_Division`
8) <= auth_num_bytes /\ auth_num_bytes < va_mul_nat auth_len (128 `op_Division` 8) + 128
`op_Division` 8) /\ (alg = AES_128 \/ alg = AES_256) /\ Vale.AES.AES_s.is_aes_key_LE alg key /\
Vale.X64.Decls.buffer128_as_seq (va_get_mem va_s0) keys_b ==
Vale.AES.AES_s.key_to_round_keys_LE alg key /\ Vale.X64.Decls.validSrcAddrs128 (va_get_mem
va_s0) keys_ptr keys_b (Vale.AES.AES_common_s.nr alg + 1) (va_get_mem_layout va_s0) Secret /\
Vale.AES.OptPublic.hkeys_reqs_pub (Vale.X64.Decls.s128 (va_get_mem va_s0) hkeys_b)
(Vale.Def.Types_s.reverse_bytes_quad32 (Vale.AES.AES_s.aes_encrypt_LE alg key
(Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0))) /\ (let h_LE =
Vale.AES.AES_s.aes_encrypt_LE alg key (Vale.Def.Words_s.Mkfour #Vale.Def.Types_s.nat32 0 0 0 0)
in let iv_BE = Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in iv_BE ==
Vale.AES.GCM_s.compute_iv_BE h_LE iv))))
(ensures (fun (va_sM, va_fM) -> va_ensure_total va_b0 va_s0 va_sM va_fM /\ va_get_ok va_sM /\
(let (auth_ptr:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rRcx va_s0
else va_get_reg64 rRdi va_s0) in let (auth_num_bytes:(va_int_range 0 18446744073709551615)) =
(if win then va_get_reg64 rRdx va_s0 else va_get_reg64 rRsi va_s0) in let
(auth_len:(va_int_range 0 18446744073709551615)) = (if win then va_get_reg64 rR8 va_s0 else
va_get_reg64 rRdx va_s0) in let (keys_ptr:(va_int_range 0 18446744073709551615)) = (if win then
va_get_reg64 rR9 va_s0 else va_get_reg64 rRcx va_s0) in let (iv_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 0) (va_get_stack va_s0) else va_get_reg64 rR8 va_s0) in let (xip:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
32 + 8 + 8) (va_get_stack va_s0) else va_get_reg64 rR9 va_s0) in let (abytes_ptr:(va_int_range
0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0
+ 40 + 16) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8
+ 0) (va_get_stack va_s0)) in let (in128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 24) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 8) (va_get_stack va_s0)) in
let (out128x6_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 32) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 16) (va_get_stack va_s0)) in let
(len128x6:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 40) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 24) (va_get_stack va_s0)) in let (in128_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 48) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
32) (va_get_stack va_s0)) in let (out128_ptr:(va_int_range 0 18446744073709551615)) = (if win
then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 56) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 40) (va_get_stack va_s0)) in
let (len128:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 64) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 48) (va_get_stack va_s0)) in let (inout_ptr:(va_int_range 0
18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 +
40 + 72) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 +
56) (va_get_stack va_s0)) in let (plain_num_bytes:(va_int_range 0 18446744073709551615)) = (if
win then Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 80) (va_get_stack va_s0)
else Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 64) (va_get_stack va_s0)) in
let (scratch_ptr:(va_int_range 0 18446744073709551615)) = (if win then
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 40 + 88) (va_get_stack va_s0) else
Vale.X64.Stack_i.load_stack64 (va_get_reg64 rRsp va_s0 + 8 + 72) (va_get_stack va_s0)) in let
(tag_ptr:(va_int_range 0 18446744073709551615)) = (if win then Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 40 + 96) (va_get_stack va_s0) else Vale.X64.Stack_i.load_stack64
(va_get_reg64 rRsp va_s0 + 8 + 80) (va_get_stack va_s0)) in Vale.X64.Decls.modifies_mem
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 tag_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 iv_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 scratch_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128x6_b)
(Vale.X64.Decls.loc_union (Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 out128_b)
(Vale.X64.Decls.loc_buffer #Vale.X64.Memory.vuint128 inout_b)))))) (va_get_mem va_s0)
(va_get_mem va_sM) /\ plain_num_bytes < pow2_32 /\ auth_num_bytes < pow2_32 /\ (let iv_BE =
Vale.X64.Decls.buffer128_read iv_b 0 (va_get_mem va_s0) in let auth_raw_quads =
FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0) auth_b)
(Vale.X64.Decls.s128 (va_get_mem va_s0) abytes_b) in let auth_bytes = FStar.Seq.Base.slice
#Vale.Def.Types_s.nat8 (Vale.Def.Types_s.le_seq_quad32_to_bytes auth_raw_quads) 0
auth_num_bytes in let plain_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32
(FStar.Seq.Base.append #Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_s0)
in128x6_b) (Vale.X64.Decls.s128 (va_get_mem va_s0) in128_b)) (Vale.X64.Decls.s128 (va_get_mem
va_s0) inout_b) in let plain_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes plain_raw_quads) 0 plain_num_bytes in let
cipher_raw_quads = FStar.Seq.Base.append #Vale.X64.Decls.quad32 (FStar.Seq.Base.append
#Vale.X64.Decls.quad32 (Vale.X64.Decls.s128 (va_get_mem va_sM) out128x6_b) (Vale.X64.Decls.s128
(va_get_mem va_sM) out128_b)) (Vale.X64.Decls.s128 (va_get_mem va_sM) inout_b) in let
cipher_bytes = FStar.Seq.Base.slice #Vale.Def.Types_s.nat8
(Vale.Def.Types_s.le_seq_quad32_to_bytes cipher_raw_quads) 0 plain_num_bytes in l_and (l_and
(l_and (l_and (FStar.Seq.Base.length #Vale.Def.Types_s.nat8 auth_bytes < pow2_32)
(FStar.Seq.Base.length #Vale.Def.Types_s.nat8 plain_bytes < pow2_32))
(Vale.AES.AES_common_s.is_aes_key alg (Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key)))
(cipher_bytes == __proj__Mktuple2__item___1 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8)
#(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)))
(Vale.Def.Types_s.le_quad32_to_bytes (Vale.X64.Decls.buffer128_read tag_b 0 (va_get_mem va_sM))
== __proj__Mktuple2__item___2 #(FStar.Seq.Base.seq Vale.Def.Types_s.nat8) #(FStar.Seq.Base.seq
Vale.Def.Types_s.nat8) (Vale.AES.GCM_s.gcm_encrypt_LE alg
(Vale.Def.Words.Seq_s.seq_nat32_to_seq_nat8_LE key) iv plain_bytes auth_bytes)) /\ va_get_reg64
rRsp va_sM == va_get_reg64 rRsp va_s0 /\ (win ==> va_get_reg64 rRbx va_sM == va_get_reg64 rRbx
va_s0) /\ (win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp va_s0) /\ (win ==> va_get_reg64
rRdi va_sM == va_get_reg64 rRdi va_s0) /\ (win ==> va_get_reg64 rRsi va_sM == va_get_reg64 rRsi
va_s0) /\ (win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (win ==> va_get_reg64
rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (win ==> va_get_reg64 rR14 va_sM == va_get_reg64 rR14
va_s0) /\ (win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0) /\ (win ==> va_get_xmm 6
va_sM == va_get_xmm 6 va_s0) /\ (win ==> va_get_xmm 7 va_sM == va_get_xmm 7 va_s0) /\ (win ==>
va_get_xmm 8 va_sM == va_get_xmm 8 va_s0) /\ (win ==> va_get_xmm 9 va_sM == va_get_xmm 9 va_s0)
/\ (win ==> va_get_xmm 10 va_sM == va_get_xmm 10 va_s0) /\ (win ==> va_get_xmm 11 va_sM ==
va_get_xmm 11 va_s0) /\ (win ==> va_get_xmm 12 va_sM == va_get_xmm 12 va_s0) /\ (win ==>
va_get_xmm 13 va_sM == va_get_xmm 13 va_s0) /\ (win ==> va_get_xmm 14 va_sM == va_get_xmm 14
va_s0) /\ (win ==> va_get_xmm 15 va_sM == va_get_xmm 15 va_s0) /\ (~win ==> va_get_reg64 rRbx
va_sM == va_get_reg64 rRbx va_s0) /\ (~win ==> va_get_reg64 rRbp va_sM == va_get_reg64 rRbp
va_s0) /\ (~win ==> va_get_reg64 rR12 va_sM == va_get_reg64 rR12 va_s0) /\ (~win ==>
va_get_reg64 rR13 va_sM == va_get_reg64 rR13 va_s0) /\ (~win ==> va_get_reg64 rR14 va_sM ==
va_get_reg64 rR14 va_s0) /\ (~win ==> va_get_reg64 rR15 va_sM == va_get_reg64 rR15 va_s0))) /\
va_state_eq va_sM (va_update_stackTaint va_sM (va_update_stack va_sM (va_update_flags va_sM
(va_update_mem_heaplet 6 va_sM (va_update_mem_heaplet 5 va_sM (va_update_mem_heaplet 4 va_sM
(va_update_mem_heaplet 3 va_sM (va_update_mem_heaplet 2 va_sM (va_update_mem_heaplet 1 va_sM
(va_update_mem_layout va_sM (va_update_xmm 15 va_sM (va_update_xmm 14 va_sM (va_update_xmm 13
va_sM (va_update_xmm 12 va_sM (va_update_xmm 11 va_sM (va_update_xmm 10 va_sM (va_update_xmm 9
va_sM (va_update_xmm 8 va_sM (va_update_xmm 7 va_sM (va_update_xmm 6 va_sM (va_update_xmm 5
va_sM (va_update_xmm 4 va_sM (va_update_xmm 3 va_sM (va_update_xmm 2 va_sM (va_update_xmm 1
va_sM (va_update_xmm 0 va_sM (va_update_reg64 rR15 va_sM (va_update_reg64 rR14 va_sM
(va_update_reg64 rR13 va_sM (va_update_reg64 rR12 va_sM (va_update_reg64 rR11 va_sM
(va_update_reg64 rR10 va_sM (va_update_reg64 rR9 va_sM (va_update_reg64 rR8 va_sM
(va_update_reg64 rRbp va_sM (va_update_reg64 rRsp va_sM (va_update_reg64 rRsi va_sM
(va_update_reg64 rRdi va_sM (va_update_reg64 rRdx va_sM (va_update_reg64 rRcx va_sM
(va_update_reg64 rRbx va_sM (va_update_reg64 rRax va_sM (va_update_ok va_sM (va_update_mem
va_sM va_s0))))))))))))))))))))))))))))))))))))))))))))))
[@ va_qattr]
let va_wp_Gcm_blocks_stdcall (win:bool) (alg:algorithm) (auth_b:buffer128) (auth_bytes:nat64)
(auth_num:nat64) (keys_b:buffer128) (iv_b:buffer128) (iv:supported_iv_LE) (hkeys_b:buffer128)
(abytes_b:buffer128) (in128x6_b:buffer128) (out128x6_b:buffer128) (len128x6_num:nat64)
(in128_b:buffer128) (out128_b:buffer128) (len128_num:nat64) (inout_b:buffer128) (plain_num:nat64)
(scratch_b:buffer128) (tag_b:buffer128) (key:(seq nat32)) (va_s0:va_state) (va_k:(va_state -> | false | true | Vale.AES.X64.GCMencryptOpt.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 100,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_wp_Gcm_blocks_stdcall
(win: bool)
(alg: algorithm)
(auth_b: buffer128)
(auth_bytes auth_num: nat64)
(keys_b iv_b: buffer128)
(iv: supported_iv_LE)
(hkeys_b abytes_b in128x6_b out128x6_b: buffer128)
(len128x6_num: nat64)
(in128_b out128_b: buffer128)
(len128_num: nat64)
(inout_b: buffer128)
(plain_num: nat64)
(scratch_b tag_b: buffer128)
(key: (seq nat32))
(va_s0: va_state)
(va_k: (va_state -> unit -> Type0))
: Type0 | [] | Vale.AES.X64.GCMencryptOpt.va_wp_Gcm_blocks_stdcall | {
"file_name": "obj/Vale.AES.X64.GCMencryptOpt.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
win: Prims.bool ->
alg: Vale.AES.AES_common_s.algorithm ->
auth_b: Vale.X64.Memory.buffer128 ->
auth_bytes: Vale.X64.Memory.nat64 ->
auth_num: Vale.X64.Memory.nat64 ->
keys_b: Vale.X64.Memory.buffer128 ->
iv_b: Vale.X64.Memory.buffer128 ->
iv: Vale.AES.GCM_s.supported_iv_LE ->
hkeys_b: Vale.X64.Memory.buffer128 ->
abytes_b: Vale.X64.Memory.buffer128 ->
in128x6_b: Vale.X64.Memory.buffer128 ->
out128x6_b: Vale.X64.Memory.buffer128 ->
len128x6_num: Vale.X64.Memory.nat64 ->
in128_b: Vale.X64.Memory.buffer128 ->
out128_b: Vale.X64.Memory.buffer128 ->
len128_num: Vale.X64.Memory.nat64 ->
inout_b: Vale.X64.Memory.buffer128 ->
plain_num: Vale.X64.Memory.nat64 ->
scratch_b: Vale.X64.Memory.buffer128 ->
tag_b: Vale.X64.Memory.buffer128 ->
key: FStar.Seq.Base.seq Vale.X64.Memory.nat32 ->
va_s0: Vale.X64.Decls.va_state ->
va_k: (_: Vale.X64.Decls.va_state -> _: Prims.unit -> Type0)
-> Type0 | {
"end_col": 21,
"end_line": 1492,
"start_col": 2,
"start_line": 1235
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vale_heap = M.vale_heap | let vale_heap = | false | null | false | M.vale_heap | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.vale_heap"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vale_heap : Type | [] | Vale.PPC64LE.Decls.vale_heap | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type | {
"end_col": 34,
"end_line": 26,
"start_col": 23,
"start_line": 26
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let heaplet_id = M.heaplet_id | let heaplet_id = | false | null | false | M.heaplet_id | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Memory.heaplet_id"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val heaplet_id : Type0 | [] | Vale.PPC64LE.Decls.heaplet_id | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 36,
"end_line": 28,
"start_col": 24,
"start_line": 28
} |
|
Prims.Tot | val va_get_cr0 (s: va_state) : cr0_t | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_cr0 (s:va_state) : cr0_t = s.cr0 | val va_get_cr0 (s: va_state) : cr0_t
let va_get_cr0 (s: va_state) : cr0_t = | false | null | false | s.cr0 | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.cr0_t"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_cr0 (s: va_state) : cr0_t | [] | Vale.PPC64LE.Decls.va_get_cr0 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.cr0_t | {
"end_col": 62,
"end_line": 144,
"start_col": 57,
"start_line": 144
} |
Prims.Tot | val va_get_stack (s: va_state) : SI.vale_stack | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack | val va_get_stack (s: va_state) : SI.vale_stack
let va_get_stack (s: va_state) : SI.vale_stack = | false | null | false | VSS.stack_from_s s.ms_stack | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Stack_Sems.stack_from_s",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Stack_i.vale_stack"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_stack (s: va_state) : SI.vale_stack | [] | Vale.PPC64LE.Decls.va_get_stack | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Stack_i.vale_stack | {
"end_col": 94,
"end_line": 151,
"start_col": 67,
"start_line": 151
} |
FStar.Pervasives.Lemma | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]] | let va_reveal_opaque (s: string) = | false | null | true | norm_spec [zeta; delta_only [s]] | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"lemma"
] | [
"Prims.string",
"FStar.Pervasives.norm_spec",
"Prims.Cons",
"FStar.Pervasives.norm_step",
"FStar.Pervasives.zeta",
"FStar.Pervasives.delta_only",
"Prims.Nil",
"Prims.unit",
"Prims.l_True",
"Prims.squash",
"Prims.eq2",
"FStar.Pervasives.norm",
"FStar.Pervasives.pattern"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized? | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_reveal_opaque : s: Prims.string -> x: _
-> FStar.Pervasives.Lemma
(ensures FStar.Pervasives.norm [FStar.Pervasives.zeta; FStar.Pervasives.delta_only [s]] x == x) | [] | Vale.PPC64LE.Decls.va_reveal_opaque | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Prims.string -> x: _
-> FStar.Pervasives.Lemma
(ensures FStar.Pervasives.norm [FStar.Pervasives.zeta; FStar.Pervasives.delta_only [s]] x == x) | {
"end_col": 66,
"end_line": 44,
"start_col": 34,
"start_line": 44
} |
|
Prims.GTot | val buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b | val buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8)
let buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8) = | false | null | false | M.buffer_as_seq m b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer8",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.PPC64LE.Memory.vuint8",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.nat8"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer8_as_seq (m: vale_heap) (b: M.buffer8) : GTot (Seq.seq nat8) | [] | Vale.PPC64LE.Decls.buffer8_as_seq | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer8
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.nat8) | {
"end_col": 97,
"end_line": 83,
"start_col": 78,
"start_line": 83
} |
Prims.Tot | val va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t) | val va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr
let va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr = | false | null | false | ({ address = r; offset = n }, t) | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id",
"Vale.PPC64LE.Machine_s.reg",
"Prims.int",
"Vale.Arch.HeapTypes_s.taint",
"FStar.Pervasives.Native.Mktuple2",
"Vale.PPC64LE.Machine_s.maddr",
"Vale.PPC64LE.Machine_s.Mkmaddr",
"Vale.PPC64LE.Machine_s.tmaddr"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr] | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_opr_code_Mem64 (h: heaplet_id) (r: reg) (n: int) (t: taint) : tmaddr | [] | Vale.PPC64LE.Decls.va_opr_code_Mem64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} |
h: Vale.PPC64LE.Decls.heaplet_id ->
r: Vale.PPC64LE.Machine_s.reg ->
n: Prims.int ->
t: Vale.Arch.HeapTypes_s.taint
-> Vale.PPC64LE.Machine_s.tmaddr | {
"end_col": 30,
"end_line": 140,
"start_col": 2,
"start_line": 140
} |
Prims.Tot | val valid_stack (m: maddr) (t: taint) (s: state) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint | val valid_stack (m: maddr) (t: taint) (s: state) : prop0
let valid_stack (m: maddr) (t: taint) (s: state) : prop0 = | false | null | false | SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.maddr",
"Vale.Arch.HeapTypes_s.taint",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Stack_i.valid_taint_stack64",
"Vale.PPC64LE.State.eval_maddr",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr] | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_stack (m: maddr) (t: taint) (s: state) : prop0 | [] | Vale.PPC64LE.Decls.valid_stack | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Machine_s.maddr -> t: Vale.Arch.HeapTypes_s.taint -> s: Vale.PPC64LE.State.state
-> Vale.Def.Prop_s.prop0 | {
"end_col": 59,
"end_line": 124,
"start_col": 2,
"start_line": 124
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_hd = Cons?.hd | let va_hd = | false | null | false | Cons?.hd | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.__proj__Cons__item__hd",
"Prims.list",
"Prims.b2t",
"Prims.uu___is_Cons"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_hd : projectee: _: Prims.list _ {Cons? _} -> _ | [] | Vale.PPC64LE.Decls.va_hd | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | projectee: _: Prims.list _ {Cons? _} -> _ | {
"end_col": 27,
"end_line": 38,
"start_col": 19,
"start_line": 38
} |
|
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_codes = list va_code | let va_codes = | false | null | false | list va_code | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.list",
"Vale.PPC64LE.Decls.va_code"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0 | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_codes : Type0 | [] | Vale.PPC64LE.Decls.va_codes | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 34,
"end_line": 57,
"start_col": 22,
"start_line": 57
} |
|
Prims.Tot | val va_op_vec_opr_vec (v: vec) : vec_opr | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_op_vec_opr_vec (v:vec) : vec_opr = v | val va_op_vec_opr_vec (v: vec) : vec_opr
let va_op_vec_opr_vec (v: vec) : vec_opr = | false | null | false | v | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.vec_opr"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_op_vec_opr_vec (v: vec) : vec_opr | [] | Vale.PPC64LE.Decls.va_op_vec_opr_vec | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | v: Vale.PPC64LE.Machine_s.vec -> Vale.PPC64LE.Decls.vec_opr | {
"end_col": 62,
"end_line": 133,
"start_col": 61,
"start_line": 133
} |
Prims.Tot | val va_update_ok (sM sK: va_state) : va_state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_update_ok (sM:va_state) (sK:va_state) : va_state = va_upd_ok sM.ok sK | val va_update_ok (sM sK: va_state) : va_state
let va_update_ok (sM sK: va_state) : va_state = | false | null | false | va_upd_ok sM.ok sK | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.va_upd_ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok }
[@va_qattr] let va_upd_cr0 (cr0:cr0_t) (s:state) : state = { s with cr0 = cr0 }
[@va_qattr] let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer }
[@va_qattr] let va_upd_reg (r:reg) (v:nat64) (s:state) : state = update_reg r v s
[@va_qattr] let va_upd_vec (x:vec) (v:quad32) (s:state) : state = update_vec x v s
[@va_qattr] let va_upd_mem (mem:vale_heap) (s:state) : state = { s with ms_heap = coerce (M.set_vale_heap (coerce s.ms_heap) mem) }
[@va_qattr] let va_upd_mem_layout (layout:vale_heap_layout) (s:state) : state = { s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_layout = layout }) }
[@va_qattr] let va_upd_mem_heaplet (n:heaplet_id) (h:vale_heap) (s:state) : state =
{ s with ms_heap = coerce ({ (coerce s.ms_heap) with vf_heaplets = Map16.upd (coerce s.ms_heap).vf_heaplets n h }) }
[@va_qattr] let va_upd_stack (stack:SI.vale_stack) (s:state) : state = { s with ms_stack = (VSS.stack_to_s stack) }
[@va_qattr] let va_upd_stackTaint (stackTaint:M.memtaint) (s:state) : state = { s with ms_stackTaint = stackTaint } | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_update_ok (sM sK: va_state) : va_state | [] | Vale.PPC64LE.Decls.va_update_ok | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | sM: Vale.PPC64LE.Decls.va_state -> sK: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Decls.va_state | {
"end_col": 95,
"end_line": 185,
"start_col": 77,
"start_line": 185
} |
Prims.GTot | val s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b | val s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64)
let s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) = | false | null | false | buffer64_as_seq m b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Decls.buffer64_as_seq",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val s64 (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | [] | Vale.PPC64LE.Decls.s64 | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer64
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.nat64) | {
"end_col": 88,
"end_line": 85,
"start_col": 69,
"start_line": 85
} |
Prims.Tot | val va_upd_xer (xer: xer_t) (s: state) : state | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_upd_xer (xer:xer_t) (s:state) : state = { s with xer = xer } | val va_upd_xer (xer: xer_t) (s: state) : state
let va_upd_xer (xer: xer_t) (s: state) : state = | false | null | false | { s with xer = xer } | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.xer_t",
"Vale.PPC64LE.State.state",
"Vale.PPC64LE.Machine_s.Mkstate",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ok",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__regs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__vecs",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__cr0",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_heap",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stack",
"Vale.PPC64LE.Machine_s.__proj__Mkstate__item__ms_stackTaint"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_dst_Mem64 (m:maddr) (s:va_state) = valid_mem m s
[@va_qattr] unfold let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_vec_opr (v:vec_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_heaplet (h:heaplet_id) (s:va_state) = True
[@va_qattr] let va_upd_ok (ok:bool) (s:state) : state = { s with ok = ok } | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_upd_xer (xer: xer_t) (s: state) : state | [] | Vale.PPC64LE.Decls.va_upd_xer | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | xer: Vale.PPC64LE.Machine_s.xer_t -> s: Vale.PPC64LE.State.state -> Vale.PPC64LE.State.state | {
"end_col": 77,
"end_line": 174,
"start_col": 61,
"start_line": 174
} |
Prims.Tot | val va_op_cmp_reg (r: reg) : cmp_opr | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_op_cmp_reg (r:reg) : cmp_opr = CReg r | val va_op_cmp_reg (r: reg) : cmp_opr
let va_op_cmp_reg (r: reg) : cmp_opr = | false | null | false | CReg r | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.reg",
"Vale.PPC64LE.Machine_s.CReg",
"Vale.PPC64LE.Machine_s.cmp_opr"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_op_cmp_reg (r: reg) : cmp_opr | [] | Vale.PPC64LE.Decls.va_op_cmp_reg | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | r: Vale.PPC64LE.Machine_s.reg -> Vale.PPC64LE.Machine_s.cmp_opr | {
"end_col": 63,
"end_line": 134,
"start_col": 57,
"start_line": 134
} |
Prims.Tot | val va_subscript (#a: eqtype) (#b: Type) (x: Map.t a b) (y: a) : Tot b | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y | val va_subscript (#a: eqtype) (#b: Type) (x: Map.t a b) (y: a) : Tot b
let va_subscript (#a: eqtype) (#b: Type) (x: Map.t a b) (y: a) : Tot b = | false | null | false | Map.sel x y | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Prims.eqtype",
"FStar.Map.t",
"FStar.Map.sel"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_subscript (#a: eqtype) (#b: Type) (x: Map.t a b) (y: a) : Tot b | [] | Vale.PPC64LE.Decls.va_subscript | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: FStar.Map.t a b -> y: a -> b | {
"end_col": 87,
"end_line": 36,
"start_col": 76,
"start_line": 36
} |
Prims.Tot | val va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h | val va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id
let va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id = | false | null | false | h | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.heaplet_id"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_op_heaplet_mem_heaplet (h: heaplet_id) : heaplet_id | [] | Vale.PPC64LE.Decls.va_op_heaplet_mem_heaplet | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | h: Vale.PPC64LE.Decls.heaplet_id -> Vale.PPC64LE.Decls.heaplet_id | {
"end_col": 80,
"end_line": 136,
"start_col": 79,
"start_line": 136
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let vec_opr = vec | let vec_opr = | false | null | false | vec | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val vec_opr : Type0 | [] | Vale.PPC64LE.Decls.vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | Type0 | {
"end_col": 24,
"end_line": 64,
"start_col": 21,
"start_line": 64
} |
|
Prims.GTot | val va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s | val va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32
let va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32 = | false | null | false | eval_vec v s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.Decls.vec_opr",
"Vale.PPC64LE.State.eval_vec",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_eval_vec_opr (s: va_state) (v: vec_opr) : GTot quad32 | [] | Vale.PPC64LE.Decls.va_eval_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | s: Vale.PPC64LE.Decls.va_state -> v: Vale.PPC64LE.Decls.vec_opr
-> Prims.GTot Vale.PPC64LE.Machine_s.quad32 | {
"end_col": 102,
"end_line": 159,
"start_col": 90,
"start_line": 159
} |
Prims.GTot | val buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b | val buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32)
let buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) = | false | null | false | M.buffer_as_seq m b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer128",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.PPC64LE.Memory.vuint128",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer128_as_seq (m: vale_heap) (b: M.buffer128) : GTot (Seq.seq quad32) | [] | Vale.PPC64LE.Decls.buffer128_as_seq | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer128
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.quad32) | {
"end_col": 103,
"end_line": 86,
"start_col": 84,
"start_line": 86
} |
Prims.Tot | val valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i | val valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0
let valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 = | false | null | false | M.valid_buffer_read m b i | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.Arch.HeapTypes_s.base_typ",
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer",
"Prims.int",
"Vale.PPC64LE.Memory.valid_buffer_read",
"Vale.Def.Prop_s.prop0"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val valid_src_addr (#t: M.base_typ) (m: vale_heap) (b: M.buffer t) (i: int) : prop0 | [] | Vale.PPC64LE.Decls.valid_src_addr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer t -> i: Prims.int
-> Vale.Def.Prop_s.prop0 | {
"end_col": 114,
"end_line": 88,
"start_col": 89,
"start_line": 88
} |
Prims.GTot | val buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b | val buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64)
let buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) = | false | null | false | M.buffer_as_seq m b | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"sometrivial"
] | [
"Vale.PPC64LE.Decls.vale_heap",
"Vale.PPC64LE.Memory.buffer64",
"Vale.PPC64LE.Memory.buffer_as_seq",
"Vale.PPC64LE.Memory.vuint64",
"FStar.Seq.Base.seq",
"Vale.PPC64LE.Machine_s.nat64"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b | false | false | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val buffer64_as_seq (m: vale_heap) (b: M.buffer64) : GTot (Seq.seq nat64) | [] | Vale.PPC64LE.Decls.buffer64_as_seq | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | m: Vale.PPC64LE.Decls.vale_heap -> b: Vale.PPC64LE.Memory.buffer64
-> Prims.GTot (FStar.Seq.Base.seq Vale.PPC64LE.Machine_s.nat64) | {
"end_col": 100,
"end_line": 84,
"start_col": 81,
"start_line": 84
} |
Prims.Tot | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_is_src_vec_opr (v:vec_opr) (s:va_state) = True | let va_is_src_vec_opr (v: vec_opr) (s: va_state) = | false | null | false | True | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Decls.vec_opr",
"Vale.PPC64LE.Decls.va_state",
"Prims.l_True",
"Prims.logical"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer
[@va_qattr] unfold let va_get_reg (r:reg) (s:va_state) : nat64 = eval_reg r s
[@va_qattr] unfold let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s
[@va_qattr] unfold let va_get_mem (s:va_state) : vale_heap = M.get_vale_heap (coerce s.ms_heap)
[@va_qattr] unfold let va_get_mem_layout (s:va_state) : vale_heap_layout = (coerce s.ms_heap).vf_layout
[@va_qattr] unfold let va_get_mem_heaplet (n:heaplet_id) (s:va_state) : vale_heap = Map16.sel (coerce s.ms_heap).vf_heaplets n
[@va_qattr] unfold let va_get_stack (s:va_state) : SI.vale_stack = VSS.stack_from_s s.ms_stack
[@va_qattr] unfold let va_get_stackTaint (s:va_state) : M.memtaint = s.ms_stackTaint
// Evaluation
[@va_qattr] unfold let va_eval_reg (s:va_state) (r:reg) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_Mem64 (s:va_state) (m:maddr) : GTot nat64 = eval_mem (eval_maddr m s) s
[@va_qattr] unfold let va_eval_reg_opr (s:va_state) (r:reg_opr) : GTot nat64 = eval_reg r s
[@va_qattr] unfold let va_eval_cmp_opr (s:va_state) (o:cmp_opr) : GTot nat64 = eval_cmp_opr o s
[@va_qattr] unfold let va_eval_vec_opr (s:va_state) (v:vec_opr) : GTot quad32 = eval_vec v s
[@va_qattr] unfold let va_eval_heaplet (s:va_state) (h:heaplet_id) : vale_heap = va_get_mem_heaplet h s
// Predicates
[@va_qattr] unfold let va_is_src_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_dst_reg_opr (r:reg_opr) (s:va_state) = True
[@va_qattr] unfold let va_is_src_Mem64 (m:maddr) (s:va_state) = valid_mem m s | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_is_src_vec_opr : v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | [] | Vale.PPC64LE.Decls.va_is_src_vec_opr | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | v: Vale.PPC64LE.Decls.vec_opr -> s: Vale.PPC64LE.Decls.va_state -> Prims.logical | {
"end_col": 72,
"end_line": 167,
"start_col": 68,
"start_line": 167
} |
|
Prims.Tot | val va_get_vec (x: vec) (s: va_state) : quad32 | [
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_Sems",
"short_module": "VSS"
},
{
"abbrev": true,
"full_module": "Vale.Lib.Map16",
"short_module": "Map16"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Stack_i",
"short_module": "SI"
},
{
"abbrev": true,
"full_module": "Vale.PPC64LE.Memory",
"short_module": "M"
},
{
"abbrev": false,
"full_module": "Vale.Arch.Heap",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapImpl",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Arch.HeapTypes_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.State",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE.Machine_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.Def.Prop_s",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Mul",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "Vale.PPC64LE",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar.Pervasives",
"short_module": null
},
{
"abbrev": false,
"full_module": "Prims",
"short_module": null
},
{
"abbrev": false,
"full_module": "FStar",
"short_module": null
}
] | false | let va_get_vec (x:vec) (s:va_state) : quad32 = eval_vec x s | val va_get_vec (x: vec) (s: va_state) : quad32
let va_get_vec (x: vec) (s: va_state) : quad32 = | false | null | false | eval_vec x s | {
"checked_file": "Vale.PPC64LE.Decls.fsti.checked",
"dependencies": [
"Vale.PPC64LE.State.fsti.checked",
"Vale.PPC64LE.Stack_Sems.fsti.checked",
"Vale.PPC64LE.Stack_i.fsti.checked",
"Vale.PPC64LE.Memory.fsti.checked",
"Vale.PPC64LE.Machine_s.fst.checked",
"Vale.Lib.Map16.fsti.checked",
"Vale.Def.Prop_s.fst.checked",
"Vale.Arch.HeapTypes_s.fst.checked",
"Vale.Arch.HeapImpl.fsti.checked",
"Vale.Arch.Heap.fsti.checked",
"prims.fst.checked",
"FStar.Seq.fst.checked",
"FStar.Pervasives.Native.fst.checked",
"FStar.Pervasives.fsti.checked",
"FStar.Mul.fst.checked",
"FStar.Map.fsti.checked",
"FStar.All.fst.checked"
],
"interface_file": false,
"source_file": "Vale.PPC64LE.Decls.fsti"
} | [
"total"
] | [
"Vale.PPC64LE.Machine_s.vec",
"Vale.PPC64LE.Decls.va_state",
"Vale.PPC64LE.State.eval_vec",
"Vale.PPC64LE.Machine_s.quad32"
] | [] | module Vale.PPC64LE.Decls
// This interface should hide all of Semantics_s.
// (It should not refer to Semantics_s, directly or indirectly.)
// It should not refer to StateLemmas_i or Print_s,
// because they refer to Semantics_s.
// Regs_i and State_i are ok, because they do not refer to Semantics_s.
open FStar.Mul
open Vale.Def.Prop_s
open Vale.PPC64LE.Machine_s
open Vale.PPC64LE.State
open Vale.Arch.HeapTypes_s
open Vale.Arch.HeapImpl
open Vale.Arch.Heap
module M = Vale.PPC64LE.Memory
module SI = Vale.PPC64LE.Stack_i
module Map16 = Vale.Lib.Map16
module VSS = Vale.PPC64LE.Stack_Sems
val same_heap_types : squash (vale_full_heap == heap_impl)
unfold let coerce (#b #a:Type) (x:a{a == b}) : b = x
unfold let from_heap_impl (heap:heap_impl) : vale_full_heap = coerce heap
unfold let vale_heap = M.vale_heap
unfold let vale_full_heap = M.vale_full_heap
unfold let heaplet_id = M.heaplet_id
val xer_ov (xer:xer_t) : bool
val xer_ca (xer:xer_t) : bool
val update_xer_ov (xer:xer_t) (new_xer_ov:bool) : xer_t
val update_xer_ca (xer:xer_t) (new_xer_ca:bool) : xer_t
//unfold let va_subscript = Map.sel
unfold let va_subscript (#a:eqtype) (#b:Type) (x:Map.t a b) (y:a) : Tot b = Map.sel x y
unfold let va_update = Map.upd
unfold let va_hd = Cons?.hd
//unfold let va_tl = Cons?.tl // F* inlines "let ... = va_tl ..." more than we'd like; revised definition below suppresses this
// REVIEW: FStar.Pervasives.reveal_opaque doesn't include zeta, so it fails for recursive functions
// REVIEW: why is x' necessary to keep x from being normalized?
[@va_qattr] unfold let va_reveal_eq (#ax:Type) (s:string) (x x':ax) = norm [zeta; delta_only [s]] #ax x == x'
let va_reveal_opaque (s:string) = norm_spec [zeta; delta_only [s]]
// hide 'if' so that x and y get fully normalized
let va_if (#a:Type) (b:bool) (x:(_:unit{b}) -> a) (y:(_:unit{~b}) -> a) : a =
if b then x () else y ()
// Type aliases
let va_int_at_least (k:int) = i:int{i >= k}
let va_int_at_most (k:int) = i:int{i <= k}
let va_int_range (k1 k2:int) = i:int{k1 <= i /\ i <= k2}
val ins : Type0
val ocmp : Type0
unfold let va_code = precode ins ocmp
unfold let va_codes = list va_code
let va_tl (cs:va_codes) : Ghost va_codes (requires Cons? cs) (ensures fun tl -> tl == Cons?.tl cs) = Cons?.tl cs
unfold let va_state = state
val va_fuel : Type0
unfold let reg_opr = reg
unfold let va_operand_reg_opr = reg
unfold let va_operand_Mem64 = maddr
unfold let vec_opr = vec
unfold let va_operand_vec_opr = vec
unfold let va_operand_heaplet = heaplet_id
val va_pbool : Type0
val va_ttrue (_:unit) : va_pbool
val va_ffalse (reason:string) : va_pbool
val va_pbool_and (x y:va_pbool) : va_pbool
val mul_nat_helper (x y:nat) : Lemma (x * y >= 0)
[@va_qattr] unfold let va_mul_nat (x y:nat) : nat =
mul_nat_helper x y;
x * y
[@va_qattr] unfold let va_expand_state (s:state) : state = s
unfold let buffer_readable (#t:M.base_typ) (h:vale_heap) (b:M.buffer t) : GTot prop0 = M.buffer_readable #t h b
unfold let buffer_writeable (#t:M.base_typ) (b:M.buffer t) : GTot prop0 = M.buffer_writeable #t b
unfold let buffer_length (#t:M.base_typ) (b:M.buffer t) = M.buffer_length #t b
unfold let buffer8_as_seq (m:vale_heap) (b:M.buffer8) : GTot (Seq.seq nat8) = M.buffer_as_seq m b
unfold let buffer64_as_seq (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = M.buffer_as_seq m b
unfold let s64 (m:vale_heap) (b:M.buffer64) : GTot (Seq.seq nat64) = buffer64_as_seq m b
unfold let buffer128_as_seq (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = M.buffer_as_seq m b
unfold let s128 (m:vale_heap) (b:M.buffer128) : GTot (Seq.seq quad32) = buffer128_as_seq m b
unfold let valid_src_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_read m b i
unfold let valid_dst_addr (#t:M.base_typ) (m:vale_heap) (b:M.buffer t) (i:int) : prop0 = M.valid_buffer_write m b i
unfold let buffer64_read (b:M.buffer64) (i:int) (h:vale_heap) : GTot nat64 = M.buffer_read b i h
unfold let buffer128_read (b:M.buffer128) (i:int) (h:vale_heap) : GTot quad32 = M.buffer_read b i h
unfold let modifies_mem (s:M.loc) (h1 h2:vale_heap) : GTot prop0 = M.modifies s h1 h2
unfold let loc_buffer(#t:M.base_typ) (b:M.buffer t) = M.loc_buffer #t b
unfold let locs_disjoint = M.locs_disjoint
unfold let loc_union = M.loc_union
unfold let valid_addr_mem (r:reg) (n:int) (s:state) : prop0 = valid_mem ({ address=r; offset=n }) s
let valid_buf_maddr64 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer64) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf64 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 8 * index
let valid_buf_maddr128 (addr:int) (s_mem:vale_heap) (layout:vale_heap_layout) (b:M.buffer128) (index:int) (t:taint) : prop0 =
valid_src_addr s_mem b index /\
M.valid_taint_buf128 b s_mem layout.vl_taint t /\
addr == M.buffer_addr b s_mem + 16 * index
let valid_mem_operand64 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer64) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr64 addr s_mem layout b index t
let valid_mem_operand128 (addr:int) (t:taint) (s_mem:vale_heap) (layout:vale_heap_layout) : prop0 =
exists (b:M.buffer128) (index:int).{:pattern (M.valid_buffer_read s_mem b index)}
valid_buf_maddr128 addr s_mem layout b index t
[@va_qattr]
let valid_mem_addr (tm:tmaddr) (s:state) : prop0 =
let (m, t) = tm in
valid_maddr m s /\
valid_mem_operand64 (eval_maddr m s) t (M.get_vale_heap (coerce s.ms_heap)) (coerce s.ms_heap).vf_layout
[@va_qattr]
let valid_stack (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack64 (eval_maddr m s) t s.ms_stackTaint
[@va_qattr]
let valid_stack128 (m:maddr) (t:taint) (s:state) : prop0 =
SI.valid_taint_stack128 (eval_maddr m s) t s.ms_stackTaint
// Constructors
val va_fuel_default : unit -> va_fuel
[@va_qattr] unfold let va_op_reg_opr_reg (r:reg) : reg_opr = r
[@va_qattr] unfold let va_op_vec_opr_vec (v:vec) : vec_opr = v
[@va_qattr] unfold let va_op_cmp_reg (r:reg) : cmp_opr = CReg r
[@va_qattr] unfold let va_const_cmp (n:imm16) : cmp_opr = CImm n
[@va_qattr] unfold let va_op_heaplet_mem_heaplet (h:heaplet_id) : heaplet_id = h
[@va_qattr]
unfold let va_opr_code_Mem64 (h:heaplet_id) (r:reg) (n:int) (t:taint) : tmaddr =
({ address=r; offset=n }, t)
// Getters
[@va_qattr] unfold let va_get_ok (s:va_state) : bool = s.ok
[@va_qattr] unfold let va_get_cr0 (s:va_state) : cr0_t = s.cr0
[@va_qattr] unfold let va_get_xer (s:va_state) : xer_t = s.xer | false | true | Vale.PPC64LE.Decls.fsti | {
"detail_errors": false,
"detail_hint_replay": false,
"initial_fuel": 2,
"initial_ifuel": 0,
"max_fuel": 1,
"max_ifuel": 1,
"no_plugins": false,
"no_smt": false,
"no_tactics": false,
"quake_hi": 1,
"quake_keep": false,
"quake_lo": 1,
"retry": false,
"reuse_hint_for": null,
"smtencoding_elim_box": true,
"smtencoding_l_arith_repr": "native",
"smtencoding_nl_arith_repr": "wrapped",
"smtencoding_valid_elim": false,
"smtencoding_valid_intro": true,
"tcnorm": true,
"trivial_pre_for_unannotated_effectful_fns": false,
"z3cliopt": [
"smt.arith.nl=false",
"smt.QI.EAGER_THRESHOLD=100",
"smt.CASE_SPLIT=3"
],
"z3refresh": false,
"z3rlimit": 5,
"z3rlimit_factor": 1,
"z3seed": 0,
"z3smtopt": [],
"z3version": "4.8.5"
} | null | val va_get_vec (x: vec) (s: va_state) : quad32 | [] | Vale.PPC64LE.Decls.va_get_vec | {
"file_name": "vale/code/arch/ppc64le/Vale.PPC64LE.Decls.fsti",
"git_rev": "12c5e9539c7e3c366c26409d3b86493548c4483e",
"git_url": "https://github.com/hacl-star/hacl-star.git",
"project_name": "hacl-star"
} | x: Vale.PPC64LE.Machine_s.vec -> s: Vale.PPC64LE.Decls.va_state -> Vale.PPC64LE.Machine_s.quad32 | {
"end_col": 78,
"end_line": 147,
"start_col": 66,
"start_line": 147
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.