Skip to content

Commit 06ba69d

Browse files
Rollup merge of rust-lang#52391 - Amanieu:volatile_unaligned, r=alexcrichton
Add unaligned volatile intrinsics Surprisingly enough, it turns out that unaligned volatile loads are actually useful for certain (very niche) types of lock-free code. I included unaligned volatile stores for completeness, but I currently do not know of any use cases for them. These are only exposed as intrinsics for now. If they turn out to be useful in practice, we can work towards stabilizing them. r? @alexcrichton
2 parents 487e961 + 303306c commit 06ba69d

File tree

6 files changed

+44
-8
lines changed

6 files changed

+44
-8
lines changed

src/libcore/intrinsics.rs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1085,6 +1085,15 @@ extern "rust-intrinsic" {
10851085
/// [`std::ptr::write_volatile`](../../std/ptr/fn.write_volatile.html).
10861086
pub fn volatile_store<T>(dst: *mut T, val: T);
10871087

1088+
/// Perform a volatile load from the `src` pointer
1089+
/// The pointer is not required to be aligned.
1090+
#[cfg(not(stage0))]
1091+
pub fn unaligned_volatile_load<T>(src: *const T) -> T;
1092+
/// Perform a volatile store to the `dst` pointer.
1093+
/// The pointer is not required to be aligned.
1094+
#[cfg(not(stage0))]
1095+
pub fn unaligned_volatile_store<T>(dst: *mut T, val: T);
1096+
10881097
/// Returns the square root of an `f32`
10891098
pub fn sqrtf32(x: f32) -> f32;
10901099
/// Returns the square root of an `f64`

src/librustc_codegen_llvm/builder.rs

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,7 @@ bitflags! {
5454
pub struct MemFlags: u8 {
5555
const VOLATILE = 1 << 0;
5656
const NONTEMPORAL = 1 << 1;
57+
const UNALIGNED = 1 << 2;
5758
}
5859
}
5960

@@ -602,7 +603,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
602603
let ptr = self.check_store(val, ptr);
603604
unsafe {
604605
let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
605-
llvm::LLVMSetAlignment(store, align.abi() as c_uint);
606+
let align = if flags.contains(MemFlags::UNALIGNED) {
607+
1
608+
} else {
609+
align.abi() as c_uint
610+
};
611+
llvm::LLVMSetAlignment(store, align);
606612
if flags.contains(MemFlags::VOLATILE) {
607613
llvm::LLVMSetVolatile(store, llvm::True);
608614
}

src/librustc_codegen_llvm/intrinsic.rs

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -234,15 +234,20 @@ pub fn codegen_intrinsic_call<'a, 'tcx>(bx: &Builder<'a, 'tcx>,
234234
memset_intrinsic(bx, true, substs.type_at(0),
235235
args[0].immediate(), args[1].immediate(), args[2].immediate())
236236
}
237-
"volatile_load" => {
237+
"volatile_load" | "unaligned_volatile_load" => {
238238
let tp_ty = substs.type_at(0);
239239
let mut ptr = args[0].immediate();
240240
if let PassMode::Cast(ty) = fn_ty.ret.mode {
241241
ptr = bx.pointercast(ptr, ty.llvm_type(cx).ptr_to());
242242
}
243243
let load = bx.volatile_load(ptr);
244+
let align = if name == "unaligned_volatile_load" {
245+
1
246+
} else {
247+
cx.align_of(tp_ty).abi() as u32
248+
};
244249
unsafe {
245-
llvm::LLVMSetAlignment(load, cx.align_of(tp_ty).abi() as u32);
250+
llvm::LLVMSetAlignment(load, align);
246251
}
247252
to_immediate(bx, load, cx.layout_of(tp_ty))
248253
},
@@ -251,6 +256,11 @@ pub fn codegen_intrinsic_call<'a, 'tcx>(bx: &Builder<'a, 'tcx>,
251256
args[1].val.volatile_store(bx, dst);
252257
return;
253258
},
259+
"unaligned_volatile_store" => {
260+
let dst = args[0].deref(bx.cx);
261+
args[1].val.unaligned_volatile_store(bx, dst);
262+
return;
263+
},
254264
"prefetch_read_data" | "prefetch_write_data" |
255265
"prefetch_read_instruction" | "prefetch_write_instruction" => {
256266
let expect = cx.get_intrinsic(&("llvm.prefetch"));

src/librustc_codegen_llvm/mir/operand.rs

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -276,6 +276,10 @@ impl<'a, 'tcx> OperandValue {
276276
self.store_with_flags(bx, dest, MemFlags::VOLATILE);
277277
}
278278

279+
pub fn unaligned_volatile_store(self, bx: &Builder<'a, 'tcx>, dest: PlaceRef<'tcx>) {
280+
self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
281+
}
282+
279283
pub fn nontemporal_store(self, bx: &Builder<'a, 'tcx>, dest: PlaceRef<'tcx>) {
280284
self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
281285
}

src/librustc_typeck/check/intrinsic.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -270,9 +270,9 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
270270
"roundf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32),
271271
"roundf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64),
272272

273-
"volatile_load" =>
273+
"volatile_load" | "unaligned_volatile_load" =>
274274
(1, vec![ tcx.mk_imm_ptr(param(0)) ], param(0)),
275-
"volatile_store" =>
275+
"volatile_store" | "unaligned_volatile_store" =>
276276
(1, vec![ tcx.mk_mut_ptr(param(0)), param(0) ], tcx.mk_nil()),
277277

278278
"ctpop" | "ctlz" | "ctlz_nonzero" | "cttz" | "cttz_nonzero" |

src/test/run-make-fulldeps/volatile-intrinsics/main.rs

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,17 +10,24 @@
1010

1111
#![feature(core_intrinsics, volatile)]
1212

13-
use std::intrinsics::{volatile_load, volatile_store};
13+
use std::intrinsics::{
14+
unaligned_volatile_load, unaligned_volatile_store, volatile_load, volatile_store,
15+
};
1416
use std::ptr::{read_volatile, write_volatile};
1517

1618
pub fn main() {
1719
unsafe {
18-
let mut i : isize = 1;
20+
let mut i: isize = 1;
1921
volatile_store(&mut i, 2);
2022
assert_eq!(volatile_load(&i), 2);
2123
}
2224
unsafe {
23-
let mut i : isize = 1;
25+
let mut i: isize = 1;
26+
unaligned_volatile_store(&mut i, 2);
27+
assert_eq!(unaligned_volatile_load(&i), 2);
28+
}
29+
unsafe {
30+
let mut i: isize = 1;
2431
write_volatile(&mut i, 2);
2532
assert_eq!(read_volatile(&i), 2);
2633
}

0 commit comments

Comments
 (0)