Skip to content

Commit

Permalink
Merge changes Ie0b72d5b,I304c04e2
Browse files Browse the repository at this point in the history
* changes:
  Clear stack with saved SIMD registers
  ipsec_xvalid: clear scratch SIMD registers after setting patterns
  • Loading branch information
tkanteck authored and Gerrit Code Review committed Oct 24, 2019
2 parents 0230220 + 3a1bc6a commit 07e0d44
Show file tree
Hide file tree
Showing 10 changed files with 251 additions and 2 deletions.
34 changes: 34 additions & 0 deletions LibTestApp/ipsec_xvalid.c
Original file line number Diff line number Diff line change
Expand Up @@ -1133,6 +1133,29 @@ perform_safe_checks(MB_MGR *mgr, const enum arch_type_e arch,
return 0;
}

static void
clear_scratch_simd(const enum arch_type_e arch)
{
switch (arch) {
case ARCH_SSE:
case ARCH_AESNI_EMU:
clear_scratch_xmms_sse();
break;
case ARCH_AVX:
clear_scratch_xmms_avx();
break;
case ARCH_AVX2:
clear_scratch_ymms();
break;
case ARCH_AVX512:
clear_scratch_zmms();
break;
default:
fprintf(stderr, "Invalid architecture\n");
exit(EXIT_FAILURE);
}
}

/* Performs test using AES_HMAC or DOCSIS */
static int
do_test(MB_MGR *enc_mb_mgr, const enum arch_type_e enc_arch,
Expand Down Expand Up @@ -1203,6 +1226,9 @@ do_test(MB_MGR *enc_mb_mgr, const enum arch_type_e enc_arch,
if (safe_check) {
uint8_t *rsp_ptr;

/* Clear scratch registers before expanding keys to prevent
* other functions from storing sensitive data in stack */
clear_scratch_simd(enc_arch);
if (prepare_keys(enc_mb_mgr, enc_keys, key, params, 0) < 0)
goto exit;

Expand Down Expand Up @@ -1252,6 +1278,10 @@ do_test(MB_MGR *enc_mb_mgr, const enum arch_type_e enc_arch,
/* Randomize memory for input digest */
generate_random_buf(in_digest, tag_size);

/* Clear scratch registers before submitting job to prevent
* other functions from storing sensitive data in stack */
if (safe_check)
clear_scratch_simd(enc_arch);
job = IMB_SUBMIT_JOB(enc_mb_mgr);

if (!job)
Expand Down Expand Up @@ -1294,6 +1324,10 @@ do_test(MB_MGR *enc_mb_mgr, const enum arch_type_e enc_arch,
buf_size, tag_size, DECRYPT, dec_keys, iv) < 0)
goto exit;

/* Clear scratch registers before submitting job to prevent
* other functions from storing sensitive data in stack */
if (safe_check)
clear_scratch_simd(dec_arch);
job = IMB_SUBMIT_JOB(dec_mb_mgr);

if (!job)
Expand Down
115 changes: 115 additions & 0 deletions LibTestApp/misc.asm
Original file line number Diff line number Diff line change
Expand Up @@ -134,3 +134,118 @@ dump_zmms:
%endrep

ret

;
; This function clears all scratch XMM registers
;
; void clear_scratch_xmms_sse(void)
MKGLOBAL(clear_scratch_xmms_sse,function,internal)
clear_scratch_xmms_sse:

%ifdef LINUX
%assign i 0
%rep 16
pxor xmm %+ i, xmm %+ i
%assign i (i+1)
%endrep
; On Windows, XMM0-XMM5 registers are scratch registers
%else
%assign i 0
%rep 6
pxor xmm %+ i, xmm %+ i
%assign i (i+1)
%endrep
%endif ; LINUX

ret

;
; This function clears all scratch XMM registers
;
; It should be called before restoring the XMM registers
; for Windows (XMM6-XMM15)
;
; void clear_scratch_xmms_avx(void)
MKGLOBAL(clear_scratch_xmms_avx,function,internal)
clear_scratch_xmms_avx:

%ifdef LINUX
vzeroall
; On Windows, XMM0-XMM5 registers are scratch registers
%else
%assign i 0
%rep 6
vpxor xmm %+ i, xmm %+ i
%assign i (i+1)
%endrep
%endif ; LINUX

ret

;
; This function clears all scratch YMM registers
;
; It should be called before restoring the XMM registers
; for Windows (XMM6-XMM15)
;
; void clear_scratch_ymms(void)
MKGLOBAL(clear_scratch_ymms,function,internal)
clear_scratch_ymms:
; On Linux, all YMM registers are scratch registers
%ifdef LINUX
vzeroall
; On Windows, YMM0-YMM5 registers are scratch registers.
; YMM6-YMM15 upper 128 bits are scratch registers too, but
; the lower 128 bits are to be restored after calling these function
; which clears the upper bits too.
%else
%assign i 0
%rep 6
vpxor ymm %+ i, ymm %+ i
%assign i (i+1)
%endrep
%endif ; LINUX

ret

;
; This function clears all scratch ZMM registers
;
; It should be called before restoring the XMM registers
; for Windows (XMM6-XMM15). YMM registers are used
; on purpose, since XOR'ing YMM registers is faster
; than XOR'ing ZMM registers, and the operation clears
; also the upper 256 bits
;
; void clear_scratch_zmms(void)
MKGLOBAL(clear_scratch_zmms,function,internal)
clear_scratch_zmms:

; On Linux, all ZMM registers are scratch registers
%ifdef LINUX
vzeroall
;; vzeroall only clears the first 16 ZMM registers
%assign i 16
%rep 16
vpxorq ymm %+ i, ymm %+ i
%assign i (i+1)
%endrep
; On Windows, ZMM0-ZMM5 and ZMM16-ZMM31 registers are scratch registers.
; ZMM6-ZMM15 upper 384 bits are scratch registers too, but
; the lower 128 bits are to be restored after calling these function
; which clears the upper bits too.
%else
%assign i 0
%rep 6
vpxorq ymm %+ i, ymm %+ i
%assign i (i+1)
%endrep

%assign i 16
%rep 16
vpxorq ymm %+ i, ymm %+ i
%assign i (i+1)
%endrep
%endif ; LINUX

ret
6 changes: 6 additions & 0 deletions LibTestApp/misc.h
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,10 @@ void dump_xmms_avx(void);
void dump_ymms(void);
void dump_zmms(void);

/* Functions to clear all scratch SIMD registers */
void clear_scratch_xmms_sse(void);
void clear_scratch_xmms_avx(void);
void clear_scratch_ymms(void);
void clear_scratch_zmms(void);

#endif /* XVALIDAPP_MISC_H */
10 changes: 9 additions & 1 deletion avx/sha1_one_block_avx.asm
Original file line number Diff line number Diff line change
Expand Up @@ -477,9 +477,17 @@ loop3_5:
vmovdqa xmm7, [rsp + 1 * 16]
vmovdqa xmm6, [rsp + 0 * 16]

mov rsp,[_RSP]
%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
vpxor xmm0, xmm0
vmovdqa [rsp + 0 * 16], xmm0
vmovdqa [rsp + 1 * 16], xmm0
vmovdqa [rsp + 2 * 16], xmm0
%endif

mov rsp,[_RSP]
%endif ;; LINUX

pop r13
pop r12
pop rdi
Expand Down
12 changes: 12 additions & 0 deletions avx/sha256_one_block_avx.asm
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,19 @@ done_hash:
vmovdqa xmm11,[rsp + _XMM_SAVE + 5*16]
vmovdqa xmm12,[rsp + _XMM_SAVE + 6*16]
vmovdqa xmm13,[rsp + _XMM_SAVE + 7*16]
%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
vpxor xmm0, xmm0
vmovdqa [rsp + _XMM_SAVE + 0 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 1 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 2 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 3 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 4 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 5 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 6 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 7 * 16], xmm0
%endif
%endif ;; LINUX

add rsp, STACK_size

Expand Down
12 changes: 12 additions & 0 deletions avx/sha512_one_block_avx.asm
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,19 @@ done_hash:
vmovdqa xmm11,[rsp + _XMM_SAVE + 5*16]
vmovdqa xmm12,[rsp + _XMM_SAVE + 6*16]
vmovdqa xmm13,[rsp + _XMM_SAVE + 7*16]
%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
vpxor xmm0, xmm0
vmovdqa [rsp + _XMM_SAVE + 0 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 1 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 2 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 3 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 4 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 5 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 6 * 16], xmm0
vmovdqa [rsp + _XMM_SAVE + 7 * 16], xmm0
%endif
%endif ;; LINUX

add rsp, STACK_size

Expand Down
30 changes: 30 additions & 0 deletions include/save_xmms.asm
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,21 @@ restore_xmms:
movdqa xmm13, [ARG1 + 7*16]
movdqa xmm14, [ARG1 + 8*16]
movdqa xmm15, [ARG1 + 9*16]
%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
pxor xmm0, xmm0
movdqa [ARG1 + 0 * 16], xmm0
movdqa [ARG1 + 1 * 16], xmm0
movdqa [ARG1 + 2 * 16], xmm0
movdqa [ARG1 + 3 * 16], xmm0
movdqa [ARG1 + 4 * 16], xmm0
movdqa [ARG1 + 5 * 16], xmm0
movdqa [ARG1 + 6 * 16], xmm0
movdqa [ARG1 + 7 * 16], xmm0
movdqa [ARG1 + 8 * 16], xmm0
movdqa [ARG1 + 9 * 16], xmm0
%endif

ret


Expand Down Expand Up @@ -95,6 +110,21 @@ restore_xmms_avx:
vmovdqa xmm13, [ARG1 + 7*16]
vmovdqa xmm14, [ARG1 + 8*16]
vmovdqa xmm15, [ARG1 + 9*16]

%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
vpxor xmm0, xmm0
vmovdqa [ARG1 + 0 * 16], xmm0
vmovdqa [ARG1 + 1 * 16], xmm0
vmovdqa [ARG1 + 2 * 16], xmm0
vmovdqa [ARG1 + 3 * 16], xmm0
vmovdqa [ARG1 + 4 * 16], xmm0
vmovdqa [ARG1 + 5 * 16], xmm0
vmovdqa [ARG1 + 6 * 16], xmm0
vmovdqa [ARG1 + 7 * 16], xmm0
vmovdqa [ARG1 + 8 * 16], xmm0
vmovdqa [ARG1 + 9 * 16], xmm0
%endif
ret

%ifdef LINUX
Expand Down
10 changes: 9 additions & 1 deletion sse/sha1_one_block_sse.asm
Original file line number Diff line number Diff line change
Expand Up @@ -488,9 +488,17 @@ loop3_5:
movdqa xmm7, [rsp + 1 * 16]
movdqa xmm6, [rsp + 0 * 16]

mov rsp, [_RSP]
%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
pxor xmm0, xmm0
movdqa [rsp + 0 * 16], xmm0
movdqa [rsp + 1 * 16], xmm0
movdqa [rsp + 2 * 16], xmm0
%endif

mov rsp, [_RSP]
%endif ;; LINUX

pop r13
pop r12
pop rdi
Expand Down
11 changes: 11 additions & 0 deletions sse/sha256_one_block_sse.asm
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,18 @@ done_hash:
movdqa xmm10,[rsp + _XMM_SAVE + 4*16]
movdqa xmm11,[rsp + _XMM_SAVE + 5*16]
movdqa xmm12,[rsp + _XMM_SAVE + 6*16]
%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
pxor xmm0, xmm0
movdqa [rsp + _XMM_SAVE + 0 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 1 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 2 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 3 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 4 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 5 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 6 * 16], xmm0
%endif
%endif ;; LINUX

add rsp, STACK_size

Expand Down
13 changes: 13 additions & 0 deletions sse/sha512_one_block_sse.asm
Original file line number Diff line number Diff line change
Expand Up @@ -445,7 +445,20 @@ done_hash:
movdqa xmm11,[rsp + _XMM_SAVE + 5*16]
movdqa xmm12,[rsp + _XMM_SAVE + 6*16]
movdqa xmm13,[rsp + _XMM_SAVE + 7*16]

%ifdef SAFE_DATA
;; Clear potential sensitive data stored in stack
pxor xmm0, xmm0
movdqa [rsp + _XMM_SAVE + 0 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 1 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 2 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 3 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 4 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 5 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 6 * 16], xmm0
movdqa [rsp + _XMM_SAVE + 7 * 16], xmm0
%endif
%endif ;; LINUX

add rsp, STACK_size

Expand Down

0 comments on commit 07e0d44

Please sign in to comment.