Skip to content

Commit 11cb0bb

Browse files
Rollup merge of rust-lang#98126 - fortanix:raoul/mitigate_stale_data_vulnerability, r=cuviper
Mitigate MMIO stale data vulnerability Intel publicly disclosed the MMIO stale data vulnerability on June 14. To mitigate this vulnerability, compiler changes are required for the `x86_64-fortanix-unknown-sgx` target. cc: ``@jethrogb``
2 parents 7cf4f09 + 6a6910e commit 11cb0bb

File tree

3 files changed

+142
-10
lines changed

3 files changed

+142
-10
lines changed

library/std/src/sys/sgx/abi/usercalls/alloc.rs

+110-10
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
#![allow(unused)]
22

3+
use crate::arch::asm;
34
use crate::cell::UnsafeCell;
5+
use crate::cmp;
6+
use crate::convert::TryInto;
47
use crate::mem;
58
use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
69
use crate::ptr::{self, NonNull};
710
use crate::slice;
811
use crate::slice::SliceIndex;
912

10-
use super::super::mem::is_user_range;
13+
use super::super::mem::{is_enclave_range, is_user_range};
1114
use fortanix_sgx_abi::*;
1215

1316
/// A type that can be safely read from or written to userspace.
@@ -210,7 +213,9 @@ where
210213
unsafe {
211214
// Mustn't call alloc with size 0.
212215
let ptr = if size > 0 {
213-
rtunwrap!(Ok, super::alloc(size, T::align_of())) as _
216+
// `copy_to_userspace` is more efficient when data is 8-byte aligned
217+
let alignment = cmp::max(T::align_of(), 8);
218+
rtunwrap!(Ok, super::alloc(size, alignment)) as _
214219
} else {
215220
T::align_of() as _ // dangling pointer ok for size 0
216221
};
@@ -225,13 +230,9 @@ where
225230
/// Copies `val` into freshly allocated space in user memory.
226231
pub fn new_from_enclave(val: &T) -> Self {
227232
unsafe {
228-
let ret = Self::new_uninit_bytes(mem::size_of_val(val));
229-
ptr::copy(
230-
val as *const T as *const u8,
231-
ret.0.as_ptr() as *mut u8,
232-
mem::size_of_val(val),
233-
);
234-
ret
233+
let mut user = Self::new_uninit_bytes(mem::size_of_val(val));
234+
user.copy_from_enclave(val);
235+
user
235236
}
236237
}
237238

@@ -304,6 +305,105 @@ where
304305
}
305306
}
306307

308+
/// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
309+
///
310+
/// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
311+
/// - preceded by the VERW instruction and followed by the MFENCE; LFENCE instruction sequence
312+
/// - or are in multiples of 8 bytes, aligned to an 8-byte boundary
313+
///
314+
/// # Panics
315+
/// This function panics if:
316+
///
317+
/// * The `src` pointer is null
318+
/// * The `dst` pointer is null
319+
/// * The `src` memory range is not in enclave memory
320+
/// * The `dst` memory range is not in user memory
321+
///
322+
/// # References
323+
/// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html
324+
/// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2
325+
pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
326+
unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
327+
unsafe {
328+
let mut seg_sel: u16 = 0;
329+
for off in 0..len {
330+
asm!("
331+
mov %ds, ({seg_sel})
332+
verw ({seg_sel})
333+
movb {val}, ({dst})
334+
mfence
335+
lfence
336+
",
337+
val = in(reg_byte) *src.offset(off as isize),
338+
dst = in(reg) dst.offset(off as isize),
339+
seg_sel = in(reg) &mut seg_sel,
340+
options(nostack, att_syntax)
341+
);
342+
}
343+
}
344+
}
345+
346+
unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
347+
unsafe {
348+
asm!(
349+
"rep movsq (%rsi), (%rdi)",
350+
inout("rcx") len / 8 => _,
351+
inout("rdi") dst => _,
352+
inout("rsi") src => _,
353+
options(att_syntax, nostack, preserves_flags)
354+
);
355+
}
356+
}
357+
assert!(!src.is_null());
358+
assert!(!dst.is_null());
359+
assert!(is_enclave_range(src, len));
360+
assert!(is_user_range(dst, len));
361+
assert!(len < isize::MAX as usize);
362+
assert!(!(src as usize).overflowing_add(len).1);
363+
assert!(!(dst as usize).overflowing_add(len).1);
364+
365+
if len < 8 {
366+
// Can't align on 8 byte boundary: copy safely byte per byte
367+
unsafe {
368+
copy_bytewise_to_userspace(src, dst, len);
369+
}
370+
} else if len % 8 == 0 && dst as usize % 8 == 0 {
371+
// Copying 8-byte aligned quadwords: copy quad word per quad word
372+
unsafe {
373+
copy_aligned_quadwords_to_userspace(src, dst, len);
374+
}
375+
} else {
376+
// Split copies into three parts:
377+
// +--------+
378+
// | small0 | Chunk smaller than 8 bytes
379+
// +--------+
380+
// | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
381+
// +--------+
382+
// | small1 | Chunk smaller than 8 bytes
383+
// +--------+
384+
385+
unsafe {
386+
// Copy small0
387+
let small0_size = (8 - dst as usize % 8) as u8;
388+
let small0_src = src;
389+
let small0_dst = dst;
390+
copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _);
391+
392+
// Copy big
393+
let small1_size = ((len - small0_size as usize) % 8) as u8;
394+
let big_size = len - small0_size as usize - small1_size as usize;
395+
let big_src = src.offset(small0_size as _);
396+
let big_dst = dst.offset(small0_size as _);
397+
copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size);
398+
399+
// Copy small1
400+
let small1_src = src.offset(big_size as isize + small0_size as isize);
401+
let small1_dst = dst.offset(big_size as isize + small0_size as isize);
402+
copy_bytewise_to_userspace(small1_src, small1_dst, small1_size as _);
403+
}
404+
}
405+
}
406+
307407
#[unstable(feature = "sgx_platform", issue = "56975")]
308408
impl<T: ?Sized> UserRef<T>
309409
where
@@ -352,7 +452,7 @@ where
352452
pub fn copy_from_enclave(&mut self, val: &T) {
353453
unsafe {
354454
assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
355-
ptr::copy(
455+
copy_to_userspace(
356456
val as *const T as *const u8,
357457
self.0.get() as *mut T as *mut u8,
358458
mem::size_of_val(val),

library/std/src/sys/sgx/abi/usercalls/mod.rs

+2
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@ use crate::time::{Duration, Instant};
66
pub(crate) mod alloc;
77
#[macro_use]
88
pub(crate) mod raw;
9+
#[cfg(test)]
10+
mod tests;
911

1012
use self::raw::*;
1113

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
use super::alloc::copy_to_userspace;
2+
use super::alloc::User;
3+
4+
#[test]
5+
fn test_copy_function() {
6+
let mut src = [0u8; 100];
7+
let mut dst = User::<[u8]>::uninitialized(100);
8+
9+
for i in 0..src.len() {
10+
src[i] = i as _;
11+
}
12+
13+
for size in 0..48 {
14+
// For all possible alignment
15+
for offset in 0..8 {
16+
// overwrite complete dst
17+
dst.copy_from_enclave(&[0u8; 100]);
18+
19+
// Copy src[0..size] to dst + offset
20+
unsafe { copy_to_userspace(src.as_ptr(), dst.as_mut_ptr().offset(offset), size) };
21+
22+
// Verify copy
23+
for byte in 0..size {
24+
unsafe {
25+
assert_eq!(*dst.as_ptr().offset(offset + byte as isize), src[byte as usize]);
26+
}
27+
}
28+
}
29+
}
30+
}

0 commit comments

Comments
 (0)