blob: 0709368ad860ef973792a951cd7f16f180e5ab70 [file] [log] [blame]
Inna Palantff3f07a2019-07-11 16:15:26 -07001use crate::llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope};
2use crate::llvm::{self, False, BasicBlock};
3use crate::common::Funclet;
4use crate::context::CodegenCx;
5use crate::type_::Type;
6use crate::type_of::LayoutLlvmExt;
7use crate::value::Value;
8use syntax::symbol::LocalInternedString;
9use rustc_codegen_ssa::common::{IntPredicate, TypeKind, RealPredicate};
10use rustc_codegen_ssa::MemFlags;
11use libc::{c_uint, c_char};
12use rustc::ty::{self, Ty, TyCtxt};
13use rustc::ty::layout::{self, Align, Size, TyLayout};
14use rustc::hir::def_id::DefId;
15use rustc::session::config;
16use rustc_data_structures::small_c_str::SmallCStr;
17use rustc_codegen_ssa::traits::*;
18use rustc_codegen_ssa::base::to_immediate;
19use rustc_codegen_ssa::mir::operand::{OperandValue, OperandRef};
20use rustc_codegen_ssa::mir::place::PlaceRef;
Chih-Hung Hsiehfd666f22019-12-19 14:34:18 -080021use rustc_target::spec::{HasTargetSpec, Target};
Inna Palantff3f07a2019-07-11 16:15:26 -070022use std::borrow::Cow;
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -080023use std::ffi::CStr;
Inna Palantff3f07a2019-07-11 16:15:26 -070024use std::ops::{Deref, Range};
25use std::ptr;
26use std::iter::TrustedLen;
27
28// All Builders must have an llfn associated with them
29#[must_use]
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -080030pub struct Builder<'a, 'll, 'tcx> {
Inna Palantff3f07a2019-07-11 16:15:26 -070031 pub llbuilder: &'ll mut llvm::Builder<'ll>,
32 pub cx: &'a CodegenCx<'ll, 'tcx>,
33}
34
35impl Drop for Builder<'a, 'll, 'tcx> {
36 fn drop(&mut self) {
37 unsafe {
38 llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
39 }
40 }
41}
42
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -080043// FIXME(eddyb) use a checked constructor when they become `const fn`.
44const EMPTY_C_STR: &CStr = unsafe {
45 CStr::from_bytes_with_nul_unchecked(b"\0")
46};
47
48/// Empty string, to be used where LLVM expects an instruction name, indicating
49/// that the instruction is to be left unnamed (i.e. numbered, in textual IR).
50// FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer.
51const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr();
Inna Palantff3f07a2019-07-11 16:15:26 -070052
53impl BackendTypes for Builder<'_, 'll, 'tcx> {
54 type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
55 type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
56 type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
57 type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
58
59 type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
60}
61
62impl ty::layout::HasDataLayout for Builder<'_, '_, '_> {
63 fn data_layout(&self) -> &ty::layout::TargetDataLayout {
64 self.cx.data_layout()
65 }
66}
67
68impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -080069 fn tcx(&self) -> TyCtxt<'tcx> {
Inna Palantff3f07a2019-07-11 16:15:26 -070070 self.cx.tcx
71 }
72}
73
Chih-Hung Hsiehfd666f22019-12-19 14:34:18 -080074impl ty::layout::HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> {
75 fn param_env(&self) -> ty::ParamEnv<'tcx> {
76 self.cx.param_env()
77 }
78}
79
80impl HasTargetSpec for Builder<'_, '_, 'tcx> {
81 fn target_spec(&self) -> &Target {
82 &self.cx.target_spec()
83 }
84}
85
Inna Palantff3f07a2019-07-11 16:15:26 -070086impl ty::layout::LayoutOf for Builder<'_, '_, 'tcx> {
87 type Ty = Ty<'tcx>;
88 type TyLayout = TyLayout<'tcx>;
89
90 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
91 self.cx.layout_of(ty)
92 }
93}
94
95impl Deref for Builder<'_, 'll, 'tcx> {
96 type Target = CodegenCx<'ll, 'tcx>;
97
98 fn deref(&self) -> &Self::Target {
99 self.cx
100 }
101}
102
103impl HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
104 type CodegenCx = CodegenCx<'ll, 'tcx>;
105}
106
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800107macro_rules! builder_methods_for_value_instructions {
108 ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
109 $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
110 unsafe {
111 llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
112 }
113 })+
114 }
115}
116
Inna Palantff3f07a2019-07-11 16:15:26 -0700117impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
118 fn new_block<'b>(
119 cx: &'a CodegenCx<'ll, 'tcx>,
120 llfn: &'ll Value,
121 name: &'b str
122 ) -> Self {
123 let mut bx = Builder::with_cx(cx);
124 let llbb = unsafe {
125 let name = SmallCStr::new(name);
126 llvm::LLVMAppendBasicBlockInContext(
127 cx.llcx,
128 llfn,
129 name.as_ptr()
130 )
131 };
132 bx.position_at_end(llbb);
133 bx
134 }
135
136 fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
137 // Create a fresh builder from the crate context.
138 let llbuilder = unsafe {
139 llvm::LLVMCreateBuilderInContext(cx.llcx)
140 };
141 Builder {
142 llbuilder,
143 cx,
144 }
145 }
146
147 fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
148 Builder::new_block(self.cx, self.llfn(), name)
149 }
150
151 fn llbb(&self) -> &'ll BasicBlock {
152 unsafe {
153 llvm::LLVMGetInsertBlock(self.llbuilder)
154 }
155 }
156
157 fn position_at_end(&mut self, llbb: &'ll BasicBlock) {
158 unsafe {
159 llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
160 }
161 }
162
163 fn ret_void(&mut self) {
Inna Palantff3f07a2019-07-11 16:15:26 -0700164 unsafe {
165 llvm::LLVMBuildRetVoid(self.llbuilder);
166 }
167 }
168
169 fn ret(&mut self, v: &'ll Value) {
Inna Palantff3f07a2019-07-11 16:15:26 -0700170 unsafe {
171 llvm::LLVMBuildRet(self.llbuilder, v);
172 }
173 }
174
175 fn br(&mut self, dest: &'ll BasicBlock) {
Inna Palantff3f07a2019-07-11 16:15:26 -0700176 unsafe {
177 llvm::LLVMBuildBr(self.llbuilder, dest);
178 }
179 }
180
181 fn cond_br(
182 &mut self,
183 cond: &'ll Value,
184 then_llbb: &'ll BasicBlock,
185 else_llbb: &'ll BasicBlock,
186 ) {
Inna Palantff3f07a2019-07-11 16:15:26 -0700187 unsafe {
188 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
189 }
190 }
191
192 fn switch(
193 &mut self,
194 v: &'ll Value,
195 else_llbb: &'ll BasicBlock,
196 cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)> + TrustedLen,
197 ) {
198 let switch = unsafe {
199 llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint)
200 };
201 for (on_val, dest) in cases {
202 let on_val = self.const_uint_big(self.val_ty(v), on_val);
203 unsafe {
204 llvm::LLVMAddCase(switch, on_val, dest)
205 }
206 }
207 }
208
209 fn invoke(
210 &mut self,
211 llfn: &'ll Value,
212 args: &[&'ll Value],
213 then: &'ll BasicBlock,
214 catch: &'ll BasicBlock,
215 funclet: Option<&Funclet<'ll>>,
216 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700217
218 debug!("Invoke {:?} with args ({:?})",
219 llfn,
220 args);
221
222 let args = self.check_call("invoke", llfn, args);
223 let bundle = funclet.map(|funclet| funclet.bundle());
224 let bundle = bundle.as_ref().map(|b| &*b.raw);
225
226 unsafe {
227 llvm::LLVMRustBuildInvoke(self.llbuilder,
228 llfn,
229 args.as_ptr(),
230 args.len() as c_uint,
231 then,
232 catch,
233 bundle,
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800234 UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700235 }
236 }
237
238 fn unreachable(&mut self) {
Inna Palantff3f07a2019-07-11 16:15:26 -0700239 unsafe {
240 llvm::LLVMBuildUnreachable(self.llbuilder);
241 }
242 }
243
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800244 builder_methods_for_value_instructions! {
245 add(a, b) => LLVMBuildAdd,
246 fadd(a, b) => LLVMBuildFAdd,
247 sub(a, b) => LLVMBuildSub,
248 fsub(a, b) => LLVMBuildFSub,
249 mul(a, b) => LLVMBuildMul,
250 fmul(a, b) => LLVMBuildFMul,
251 udiv(a, b) => LLVMBuildUDiv,
252 exactudiv(a, b) => LLVMBuildExactUDiv,
253 sdiv(a, b) => LLVMBuildSDiv,
254 exactsdiv(a, b) => LLVMBuildExactSDiv,
255 fdiv(a, b) => LLVMBuildFDiv,
256 urem(a, b) => LLVMBuildURem,
257 srem(a, b) => LLVMBuildSRem,
258 frem(a, b) => LLVMBuildFRem,
259 shl(a, b) => LLVMBuildShl,
260 lshr(a, b) => LLVMBuildLShr,
261 ashr(a, b) => LLVMBuildAShr,
262 and(a, b) => LLVMBuildAnd,
263 or(a, b) => LLVMBuildOr,
264 xor(a, b) => LLVMBuildXor,
265 neg(x) => LLVMBuildNeg,
266 fneg(x) => LLVMBuildFNeg,
267 not(x) => LLVMBuildNot,
268 unchecked_sadd(x, y) => LLVMBuildNSWAdd,
269 unchecked_uadd(x, y) => LLVMBuildNUWAdd,
270 unchecked_ssub(x, y) => LLVMBuildNSWSub,
271 unchecked_usub(x, y) => LLVMBuildNUWSub,
272 unchecked_smul(x, y) => LLVMBuildNSWMul,
273 unchecked_umul(x, y) => LLVMBuildNUWMul,
Inna Palantff3f07a2019-07-11 16:15:26 -0700274 }
275
276 fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700277 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800278 let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, UNNAMED);
Inna Palantff3f07a2019-07-11 16:15:26 -0700279 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
280 instr
281 }
282 }
283
Inna Palantff3f07a2019-07-11 16:15:26 -0700284 fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700285 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800286 let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, UNNAMED);
Inna Palantff3f07a2019-07-11 16:15:26 -0700287 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
288 instr
289 }
290 }
291
Inna Palantff3f07a2019-07-11 16:15:26 -0700292 fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700293 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800294 let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, UNNAMED);
Inna Palantff3f07a2019-07-11 16:15:26 -0700295 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
296 instr
297 }
298 }
299
Inna Palantff3f07a2019-07-11 16:15:26 -0700300 fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700301 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800302 let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, UNNAMED);
Inna Palantff3f07a2019-07-11 16:15:26 -0700303 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
304 instr
305 }
306 }
307
Inna Palantff3f07a2019-07-11 16:15:26 -0700308 fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700309 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800310 let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, UNNAMED);
Inna Palantff3f07a2019-07-11 16:15:26 -0700311 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
312 instr
313 }
314 }
315
Inna Palantff3f07a2019-07-11 16:15:26 -0700316 fn checked_binop(
317 &mut self,
318 oop: OverflowOp,
319 ty: Ty<'_>,
320 lhs: Self::Value,
321 rhs: Self::Value,
322 ) -> (Self::Value, Self::Value) {
323 use syntax::ast::IntTy::*;
324 use syntax::ast::UintTy::*;
325 use rustc::ty::{Int, Uint};
326
327 let new_sty = match ty.sty {
328 Int(Isize) => Int(self.tcx.sess.target.isize_ty),
329 Uint(Usize) => Uint(self.tcx.sess.target.usize_ty),
330 ref t @ Uint(_) | ref t @ Int(_) => t.clone(),
331 _ => panic!("tried to get overflow intrinsic for op applied to non-int type")
332 };
333
334 let name = match oop {
335 OverflowOp::Add => match new_sty {
336 Int(I8) => "llvm.sadd.with.overflow.i8",
337 Int(I16) => "llvm.sadd.with.overflow.i16",
338 Int(I32) => "llvm.sadd.with.overflow.i32",
339 Int(I64) => "llvm.sadd.with.overflow.i64",
340 Int(I128) => "llvm.sadd.with.overflow.i128",
341
342 Uint(U8) => "llvm.uadd.with.overflow.i8",
343 Uint(U16) => "llvm.uadd.with.overflow.i16",
344 Uint(U32) => "llvm.uadd.with.overflow.i32",
345 Uint(U64) => "llvm.uadd.with.overflow.i64",
346 Uint(U128) => "llvm.uadd.with.overflow.i128",
347
348 _ => unreachable!(),
349 },
350 OverflowOp::Sub => match new_sty {
351 Int(I8) => "llvm.ssub.with.overflow.i8",
352 Int(I16) => "llvm.ssub.with.overflow.i16",
353 Int(I32) => "llvm.ssub.with.overflow.i32",
354 Int(I64) => "llvm.ssub.with.overflow.i64",
355 Int(I128) => "llvm.ssub.with.overflow.i128",
356
357 Uint(U8) => "llvm.usub.with.overflow.i8",
358 Uint(U16) => "llvm.usub.with.overflow.i16",
359 Uint(U32) => "llvm.usub.with.overflow.i32",
360 Uint(U64) => "llvm.usub.with.overflow.i64",
361 Uint(U128) => "llvm.usub.with.overflow.i128",
362
363 _ => unreachable!(),
364 },
365 OverflowOp::Mul => match new_sty {
366 Int(I8) => "llvm.smul.with.overflow.i8",
367 Int(I16) => "llvm.smul.with.overflow.i16",
368 Int(I32) => "llvm.smul.with.overflow.i32",
369 Int(I64) => "llvm.smul.with.overflow.i64",
370 Int(I128) => "llvm.smul.with.overflow.i128",
371
372 Uint(U8) => "llvm.umul.with.overflow.i8",
373 Uint(U16) => "llvm.umul.with.overflow.i16",
374 Uint(U32) => "llvm.umul.with.overflow.i32",
375 Uint(U64) => "llvm.umul.with.overflow.i64",
376 Uint(U128) => "llvm.umul.with.overflow.i128",
377
378 _ => unreachable!(),
379 },
380 };
381
382 let intrinsic = self.get_intrinsic(&name);
383 let res = self.call(intrinsic, &[lhs, rhs], None);
384 (
385 self.extract_value(res, 0),
386 self.extract_value(res, 1),
387 )
388 }
389
390 fn alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
391 let mut bx = Builder::with_cx(self.cx);
392 bx.position_at_start(unsafe {
393 llvm::LLVMGetFirstBasicBlock(self.llfn())
394 });
395 bx.dynamic_alloca(ty, name, align)
396 }
397
398 fn dynamic_alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700399 unsafe {
400 let alloca = if name.is_empty() {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800401 llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700402 } else {
403 let name = SmallCStr::new(name);
404 llvm::LLVMBuildAlloca(self.llbuilder, ty,
405 name.as_ptr())
406 };
407 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
408 alloca
409 }
410 }
411
412 fn array_alloca(&mut self,
413 ty: &'ll Type,
414 len: &'ll Value,
415 name: &str,
416 align: Align) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700417 unsafe {
418 let alloca = if name.is_empty() {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800419 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700420 } else {
421 let name = SmallCStr::new(name);
422 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
423 name.as_ptr())
424 };
425 llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
426 alloca
427 }
428 }
429
430 fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700431 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800432 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
Inna Palantff3f07a2019-07-11 16:15:26 -0700433 llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
434 load
435 }
436 }
437
438 fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700439 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800440 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
441 llvm::LLVMSetVolatile(load, llvm::True);
442 load
Inna Palantff3f07a2019-07-11 16:15:26 -0700443 }
444 }
445
446 fn atomic_load(
447 &mut self,
448 ptr: &'ll Value,
449 order: rustc_codegen_ssa::common::AtomicOrdering,
450 size: Size,
451 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700452 unsafe {
453 let load = llvm::LLVMRustBuildAtomicLoad(
454 self.llbuilder,
455 ptr,
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800456 UNNAMED,
Inna Palantff3f07a2019-07-11 16:15:26 -0700457 AtomicOrdering::from_generic(order),
458 );
459 // LLVM requires the alignment of atomic loads to be at least the size of the type.
460 llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
461 load
462 }
463 }
464
465 fn load_operand(
466 &mut self,
467 place: PlaceRef<'tcx, &'ll Value>
468 ) -> OperandRef<'tcx, &'ll Value> {
469 debug!("PlaceRef::load: {:?}", place);
470
471 assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
472
473 if place.layout.is_zst() {
474 return OperandRef::new_zst(self, place.layout);
475 }
476
477 fn scalar_load_metadata<'a, 'll, 'tcx>(
478 bx: &mut Builder<'a, 'll, 'tcx>,
479 load: &'ll Value,
480 scalar: &layout::Scalar
481 ) {
482 let vr = scalar.valid_range.clone();
483 match scalar.value {
484 layout::Int(..) => {
485 let range = scalar.valid_range_exclusive(bx);
486 if range.start != range.end {
487 bx.range_metadata(load, range);
488 }
489 }
490 layout::Pointer if vr.start() < vr.end() && !vr.contains(&0) => {
491 bx.nonnull_metadata(load);
492 }
493 _ => {}
494 }
495 }
496
497 let val = if let Some(llextra) = place.llextra {
498 OperandValue::Ref(place.llval, Some(llextra), place.align)
499 } else if place.layout.is_llvm_immediate() {
500 let mut const_llval = None;
501 unsafe {
502 if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
503 if llvm::LLVMIsGlobalConstant(global) == llvm::True {
504 const_llval = llvm::LLVMGetInitializer(global);
505 }
506 }
507 }
508 let llval = const_llval.unwrap_or_else(|| {
509 let load = self.load(place.llval, place.align);
510 if let layout::Abi::Scalar(ref scalar) = place.layout.abi {
511 scalar_load_metadata(self, load, scalar);
512 }
513 load
514 });
515 OperandValue::Immediate(to_immediate(self, llval, place.layout))
516 } else if let layout::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
517 let b_offset = a.value.size(self).align_to(b.value.align(self).abi);
518
519 let mut load = |i, scalar: &layout::Scalar, align| {
520 let llptr = self.struct_gep(place.llval, i as u64);
521 let load = self.load(llptr, align);
522 scalar_load_metadata(self, load, scalar);
523 if scalar.is_bool() {
524 self.trunc(load, self.type_i1())
525 } else {
526 load
527 }
528 };
529
530 OperandValue::Pair(
531 load(0, a, place.align),
532 load(1, b, place.align.restrict_for_offset(b_offset)),
533 )
534 } else {
535 OperandValue::Ref(place.llval, None, place.align)
536 };
537
538 OperandRef { val, layout: place.layout }
539 }
540
541 fn write_operand_repeatedly(
542 mut self,
543 cg_elem: OperandRef<'tcx, &'ll Value>,
544 count: u64,
545 dest: PlaceRef<'tcx, &'ll Value>,
546 ) -> Self {
547 let zero = self.const_usize(0);
548 let count = self.const_usize(count);
549 let start = dest.project_index(&mut self, zero).llval;
550 let end = dest.project_index(&mut self, count).llval;
551
552 let mut header_bx = self.build_sibling_block("repeat_loop_header");
553 let mut body_bx = self.build_sibling_block("repeat_loop_body");
554 let next_bx = self.build_sibling_block("repeat_loop_next");
555
556 self.br(header_bx.llbb());
557 let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);
558
559 let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
560 header_bx.cond_br(keep_going, body_bx.llbb(), next_bx.llbb());
561
562 let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
563 cg_elem.val.store(&mut body_bx,
564 PlaceRef::new_sized(current, cg_elem.layout, align));
565
566 let next = body_bx.inbounds_gep(current, &[self.const_usize(1)]);
567 body_bx.br(header_bx.llbb());
568 header_bx.add_incoming_to_phi(current, next, body_bx.llbb());
569
570 next_bx
571 }
572
573 fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
574 if self.sess().target.target.arch == "amdgpu" {
575 // amdgpu/LLVM does something weird and thinks a i64 value is
576 // split into a v2i32, halving the bitwidth LLVM expects,
577 // tripping an assertion. So, for now, just disable this
578 // optimization.
579 return;
580 }
581
582 unsafe {
583 let llty = self.cx.val_ty(load);
584 let v = [
585 self.cx.const_uint_big(llty, range.start),
586 self.cx.const_uint_big(llty, range.end)
587 ];
588
589 llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
590 llvm::LLVMMDNodeInContext(self.cx.llcx,
591 v.as_ptr(),
592 v.len() as c_uint));
593 }
594 }
595
596 fn nonnull_metadata(&mut self, load: &'ll Value) {
597 unsafe {
598 llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
599 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
600 }
601 }
602
603 fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
604 self.store_with_flags(val, ptr, align, MemFlags::empty())
605 }
606
607 fn store_with_flags(
608 &mut self,
609 val: &'ll Value,
610 ptr: &'ll Value,
611 align: Align,
612 flags: MemFlags,
613 ) -> &'ll Value {
614 debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
Inna Palantff3f07a2019-07-11 16:15:26 -0700615 let ptr = self.check_store(val, ptr);
616 unsafe {
617 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
618 let align = if flags.contains(MemFlags::UNALIGNED) {
619 1
620 } else {
621 align.bytes() as c_uint
622 };
623 llvm::LLVMSetAlignment(store, align);
624 if flags.contains(MemFlags::VOLATILE) {
625 llvm::LLVMSetVolatile(store, llvm::True);
626 }
627 if flags.contains(MemFlags::NONTEMPORAL) {
628 // According to LLVM [1] building a nontemporal store must
629 // *always* point to a metadata value of the integer 1.
630 //
631 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
632 let one = self.cx.const_i32(1);
633 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
634 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
635 }
636 store
637 }
638 }
639
640 fn atomic_store(&mut self, val: &'ll Value, ptr: &'ll Value,
641 order: rustc_codegen_ssa::common::AtomicOrdering, size: Size) {
642 debug!("Store {:?} -> {:?}", val, ptr);
Inna Palantff3f07a2019-07-11 16:15:26 -0700643 let ptr = self.check_store(val, ptr);
644 unsafe {
645 let store = llvm::LLVMRustBuildAtomicStore(
646 self.llbuilder,
647 val,
648 ptr,
649 AtomicOrdering::from_generic(order),
650 );
651 // LLVM requires the alignment of atomic stores to be at least the size of the type.
652 llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
653 }
654 }
655
656 fn gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700657 unsafe {
658 llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800659 indices.len() as c_uint, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700660 }
661 }
662
663 fn inbounds_gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700664 unsafe {
665 llvm::LLVMBuildInBoundsGEP(
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800666 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700667 }
668 }
669
670 fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700671 assert_eq!(idx as c_uint as u64, idx);
672 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800673 llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700674 }
675 }
676
677 /* Casts */
678 fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700679 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800680 llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700681 }
682 }
683
684 fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700685 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800686 llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700687 }
688 }
689
690 fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700691 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800692 llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700693 }
694 }
695
696 fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700697 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800698 llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700699 }
700 }
701
702 fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700703 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800704 llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700705 }
706 }
707
708 fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700709 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800710 llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700711 }
712 }
713
714 fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700715 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800716 llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700717 }
718 }
719
720 fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700721 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800722 llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700723 }
724 }
725
726 fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700727 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800728 llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700729 }
730 }
731
732 fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700733 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800734 llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700735 }
736 }
737
738 fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700739 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800740 llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700741 }
742 }
743
744
745 fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700746 unsafe {
747 llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
748 }
749 }
750
751 fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700752 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800753 llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700754 }
755 }
756
757 /* Comparisons */
758 fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700759 let op = llvm::IntPredicate::from_generic(op);
760 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800761 llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700762 }
763 }
764
765 fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700766 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800767 llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700768 }
769 }
770
771 /* Miscellaneous instructions */
772 fn memcpy(&mut self, dst: &'ll Value, dst_align: Align,
773 src: &'ll Value, src_align: Align,
774 size: &'ll Value, flags: MemFlags) {
775 if flags.contains(MemFlags::NONTEMPORAL) {
776 // HACK(nox): This is inefficient but there is no nontemporal memcpy.
777 let val = self.load(src, src_align);
778 let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
779 self.store_with_flags(val, ptr, dst_align, flags);
780 return;
781 }
782 let size = self.intcast(size, self.type_isize(), false);
783 let is_volatile = flags.contains(MemFlags::VOLATILE);
784 let dst = self.pointercast(dst, self.type_i8p());
785 let src = self.pointercast(src, self.type_i8p());
786 unsafe {
787 llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align.bytes() as c_uint,
788 src, src_align.bytes() as c_uint, size, is_volatile);
789 }
790 }
791
792 fn memmove(&mut self, dst: &'ll Value, dst_align: Align,
793 src: &'ll Value, src_align: Align,
794 size: &'ll Value, flags: MemFlags) {
795 if flags.contains(MemFlags::NONTEMPORAL) {
796 // HACK(nox): This is inefficient but there is no nontemporal memmove.
797 let val = self.load(src, src_align);
798 let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
799 self.store_with_flags(val, ptr, dst_align, flags);
800 return;
801 }
802 let size = self.intcast(size, self.type_isize(), false);
803 let is_volatile = flags.contains(MemFlags::VOLATILE);
804 let dst = self.pointercast(dst, self.type_i8p());
805 let src = self.pointercast(src, self.type_i8p());
806 unsafe {
807 llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align.bytes() as c_uint,
808 src, src_align.bytes() as c_uint, size, is_volatile);
809 }
810 }
811
812 fn memset(
813 &mut self,
814 ptr: &'ll Value,
815 fill_byte: &'ll Value,
816 size: &'ll Value,
817 align: Align,
818 flags: MemFlags,
819 ) {
820 let ptr_width = &self.sess().target.target.target_pointer_width;
821 let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
822 let llintrinsicfn = self.get_intrinsic(&intrinsic_key);
823 let ptr = self.pointercast(ptr, self.type_i8p());
824 let align = self.const_u32(align.bytes() as u32);
825 let volatile = self.const_bool(flags.contains(MemFlags::VOLATILE));
826 self.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None);
827 }
828
829 fn select(
830 &mut self, cond: &'ll Value,
831 then_val: &'ll Value,
832 else_val: &'ll Value,
833 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700834 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800835 llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700836 }
837 }
838
839 #[allow(dead_code)]
840 fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700841 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800842 llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700843 }
844 }
845
846 fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700847 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800848 llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700849 }
850 }
851
852 fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
853 unsafe {
854 let elt_ty = self.cx.val_ty(elt);
855 let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
856 let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
857 let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
858 self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
859 }
860 }
861
862 fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700863 assert_eq!(idx as c_uint as u64, idx);
864 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800865 llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700866 }
867 }
868
869 fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value,
870 idx: u64) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700871 assert_eq!(idx as c_uint as u64, idx);
872 unsafe {
873 llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800874 UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700875 }
876 }
877
878 fn landing_pad(&mut self, ty: &'ll Type, pers_fn: &'ll Value,
879 num_clauses: usize) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700880 unsafe {
881 llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -0800882 num_clauses as c_uint, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -0700883 }
884 }
885
886 fn set_cleanup(&mut self, landing_pad: &'ll Value) {
Inna Palantff3f07a2019-07-11 16:15:26 -0700887 unsafe {
888 llvm::LLVMSetCleanup(landing_pad, llvm::True);
889 }
890 }
891
892 fn resume(&mut self, exn: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700893 unsafe {
894 llvm::LLVMBuildResume(self.llbuilder, exn)
895 }
896 }
897
898 fn cleanup_pad(&mut self,
899 parent: Option<&'ll Value>,
900 args: &[&'ll Value]) -> Funclet<'ll> {
Inna Palantff3f07a2019-07-11 16:15:26 -0700901 let name = const_cstr!("cleanuppad");
902 let ret = unsafe {
903 llvm::LLVMRustBuildCleanupPad(self.llbuilder,
904 parent,
905 args.len() as c_uint,
906 args.as_ptr(),
907 name.as_ptr())
908 };
909 Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
910 }
911
912 fn cleanup_ret(
913 &mut self, funclet: &Funclet<'ll>,
914 unwind: Option<&'ll BasicBlock>,
915 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700916 let ret = unsafe {
917 llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
918 };
919 ret.expect("LLVM does not have support for cleanupret")
920 }
921
922 fn catch_pad(&mut self,
923 parent: &'ll Value,
924 args: &[&'ll Value]) -> Funclet<'ll> {
Inna Palantff3f07a2019-07-11 16:15:26 -0700925 let name = const_cstr!("catchpad");
926 let ret = unsafe {
927 llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
928 args.len() as c_uint, args.as_ptr(),
929 name.as_ptr())
930 };
931 Funclet::new(ret.expect("LLVM does not have support for catchpad"))
932 }
933
934 fn catch_switch(
935 &mut self,
936 parent: Option<&'ll Value>,
937 unwind: Option<&'ll BasicBlock>,
938 num_handlers: usize,
939 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -0700940 let name = const_cstr!("catchswitch");
941 let ret = unsafe {
942 llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
943 num_handlers as c_uint,
944 name.as_ptr())
945 };
946 ret.expect("LLVM does not have support for catchswitch")
947 }
948
949 fn add_handler(&mut self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
950 unsafe {
951 llvm::LLVMRustAddHandler(catch_switch, handler);
952 }
953 }
954
955 fn set_personality_fn(&mut self, personality: &'ll Value) {
956 unsafe {
957 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
958 }
959 }
960
961 // Atomic Operations
962 fn atomic_cmpxchg(
963 &mut self,
964 dst: &'ll Value,
965 cmp: &'ll Value,
966 src: &'ll Value,
967 order: rustc_codegen_ssa::common::AtomicOrdering,
968 failure_order: rustc_codegen_ssa::common::AtomicOrdering,
969 weak: bool,
970 ) -> &'ll Value {
971 let weak = if weak { llvm::True } else { llvm::False };
972 unsafe {
973 llvm::LLVMRustBuildAtomicCmpXchg(
974 self.llbuilder,
975 dst,
976 cmp,
977 src,
978 AtomicOrdering::from_generic(order),
979 AtomicOrdering::from_generic(failure_order),
980 weak
981 )
982 }
983 }
984 fn atomic_rmw(
985 &mut self,
986 op: rustc_codegen_ssa::common::AtomicRmwBinOp,
987 dst: &'ll Value,
988 src: &'ll Value,
989 order: rustc_codegen_ssa::common::AtomicOrdering,
990 ) -> &'ll Value {
991 unsafe {
992 llvm::LLVMBuildAtomicRMW(
993 self.llbuilder,
994 AtomicRmwBinOp::from_generic(op),
995 dst,
996 src,
997 AtomicOrdering::from_generic(order),
998 False)
999 }
1000 }
1001
1002 fn atomic_fence(
1003 &mut self,
1004 order: rustc_codegen_ssa::common::AtomicOrdering,
1005 scope: rustc_codegen_ssa::common::SynchronizationScope
1006 ) {
1007 unsafe {
1008 llvm::LLVMRustBuildAtomicFence(
1009 self.llbuilder,
1010 AtomicOrdering::from_generic(order),
1011 SynchronizationScope::from_generic(scope)
1012 );
1013 }
1014 }
1015
1016 fn set_invariant_load(&mut self, load: &'ll Value) {
1017 unsafe {
1018 llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1019 llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1020 }
1021 }
1022
1023 fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
1024 self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1025 }
1026
1027 fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
1028 self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1029 }
1030
1031 fn call(
1032 &mut self,
1033 llfn: &'ll Value,
1034 args: &[&'ll Value],
1035 funclet: Option<&Funclet<'ll>>,
1036 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001037
1038 debug!("Call {:?} with args ({:?})",
1039 llfn,
1040 args);
1041
1042 let args = self.check_call("call", llfn, args);
1043 let bundle = funclet.map(|funclet| funclet.bundle());
1044 let bundle = bundle.as_ref().map(|b| &*b.raw);
1045
1046 unsafe {
1047 llvm::LLVMRustBuildCall(
1048 self.llbuilder,
1049 llfn,
1050 args.as_ptr() as *const &llvm::Value,
1051 args.len() as c_uint,
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001052 bundle, UNNAMED
Inna Palantff3f07a2019-07-11 16:15:26 -07001053 )
1054 }
1055 }
1056
1057 fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001058 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001059 llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -07001060 }
1061 }
1062
1063
1064 fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
1065 self.cx
1066 }
1067
1068 unsafe fn delete_basic_block(&mut self, bb: &'ll BasicBlock) {
1069 llvm::LLVMDeleteBasicBlock(bb);
1070 }
1071
1072 fn do_not_inline(&mut self, llret: &'ll Value) {
1073 llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
1074 }
1075}
1076
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001077impl StaticBuilderMethods for Builder<'a, 'll, 'tcx> {
1078 fn get_static(&mut self, def_id: DefId) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001079 // Forward to the `get_static` method of `CodegenCx`
1080 self.cx().get_static(def_id)
1081 }
1082
1083 fn static_panic_msg(
1084 &mut self,
1085 msg: Option<LocalInternedString>,
1086 filename: LocalInternedString,
1087 line: Self::Value,
1088 col: Self::Value,
1089 kind: &str,
1090 ) -> Self::Value {
1091 let align = self.tcx.data_layout.aggregate_align.abi
1092 .max(self.tcx.data_layout.i32_align.abi)
1093 .max(self.tcx.data_layout.pointer_align.abi);
1094
1095 let filename = self.const_str_slice(filename);
1096
1097 let with_msg_components;
1098 let without_msg_components;
1099
1100 let components = if let Some(msg) = msg {
1101 let msg = self.const_str_slice(msg);
1102 with_msg_components = [msg, filename, line, col];
1103 &with_msg_components as &[_]
1104 } else {
1105 without_msg_components = [filename, line, col];
1106 &without_msg_components as &[_]
1107 };
1108
1109 let struct_ = self.const_struct(&components, false);
1110 self.static_addr_of(struct_, align, Some(kind))
1111 }
1112}
1113
1114impl Builder<'a, 'll, 'tcx> {
1115 pub fn llfn(&self) -> &'ll Value {
1116 unsafe {
1117 llvm::LLVMGetBasicBlockParent(self.llbb())
1118 }
1119 }
1120
Inna Palantff3f07a2019-07-11 16:15:26 -07001121 fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
1122 unsafe {
1123 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
1124 }
1125 }
1126
1127 pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001128 unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
1129 }
1130
1131 pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001132 unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
1133 }
1134
1135 pub fn insert_element(
1136 &mut self, vec: &'ll Value,
1137 elt: &'ll Value,
1138 idx: &'ll Value,
1139 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001140 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001141 llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -07001142 }
1143 }
1144
1145 pub fn shuffle_vector(
1146 &mut self,
1147 v1: &'ll Value,
1148 v2: &'ll Value,
1149 mask: &'ll Value,
1150 ) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001151 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001152 llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -07001153 }
1154 }
1155
1156 pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001157 unsafe {
1158 // FIXME: add a non-fast math version once
1159 // https://bugs.llvm.org/show_bug.cgi?id=36732
1160 // is fixed.
1161 let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
1162 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1163 instr
1164 }
1165 }
1166 pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001167 unsafe {
1168 // FIXME: add a non-fast math version once
1169 // https://bugs.llvm.org/show_bug.cgi?id=36732
1170 // is fixed.
1171 let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
1172 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1173 instr
1174 }
1175 }
1176 pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001177 unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
1178 }
1179 pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001180 unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
1181 }
1182 pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001183 unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
1184 }
1185 pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001186 unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
1187 }
1188 pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001189 unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
1190 }
1191 pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001192 unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
1193 }
1194 pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001195 unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
1196 }
1197 pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001198 unsafe {
1199 let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
1200 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1201 instr
1202 }
1203 }
1204 pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001205 unsafe {
1206 let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
1207 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1208 instr
1209 }
1210 }
1211 pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001212 unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1213 }
1214 pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001215 unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1216 }
1217
1218 pub fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1219 unsafe {
1220 llvm::LLVMAddClause(landing_pad, clause);
1221 }
1222 }
1223
1224 pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001225 let ret = unsafe {
1226 llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind)
1227 };
1228 ret.expect("LLVM does not have support for catchret")
1229 }
1230
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001231 fn check_store(&mut self, val: &'ll Value, ptr: &'ll Value) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001232 let dest_ptr_ty = self.cx.val_ty(ptr);
1233 let stored_ty = self.cx.val_ty(val);
1234 let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
1235
1236 assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);
1237
1238 if dest_ptr_ty == stored_ptr_ty {
1239 ptr
1240 } else {
1241 debug!("Type mismatch in store. \
1242 Expected {:?}, got {:?}; inserting bitcast",
1243 dest_ptr_ty, stored_ptr_ty);
1244 self.bitcast(ptr, stored_ptr_ty)
1245 }
1246 }
1247
1248 fn check_call<'b>(&mut self,
1249 typ: &str,
1250 llfn: &'ll Value,
1251 args: &'b [&'ll Value]) -> Cow<'b, [&'ll Value]> {
1252 let mut fn_ty = self.cx.val_ty(llfn);
1253 // Strip off pointers
1254 while self.cx.type_kind(fn_ty) == TypeKind::Pointer {
1255 fn_ty = self.cx.element_type(fn_ty);
1256 }
1257
1258 assert!(self.cx.type_kind(fn_ty) == TypeKind::Function,
1259 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1260
1261 let param_tys = self.cx.func_params_types(fn_ty);
1262
1263 let all_args_match = param_tys.iter()
1264 .zip(args.iter().map(|&v| self.val_ty(v)))
1265 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1266
1267 if all_args_match {
1268 return Cow::Borrowed(args);
1269 }
1270
1271 let casted_args: Vec<_> = param_tys.into_iter()
1272 .zip(args.iter())
1273 .enumerate()
1274 .map(|(i, (expected_ty, &actual_val))| {
1275 let actual_ty = self.val_ty(actual_val);
1276 if expected_ty != actual_ty {
1277 debug!("Type mismatch in function call of {:?}. \
1278 Expected {:?} for param {}, got {:?}; injecting bitcast",
1279 llfn, expected_ty, i, actual_ty);
1280 self.bitcast(actual_val, expected_ty)
1281 } else {
1282 actual_val
1283 }
1284 })
1285 .collect();
1286
1287 Cow::Owned(casted_args)
1288 }
1289
1290 pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001291 unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001292 llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -07001293 }
1294 }
1295
1296 fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
1297 if self.cx.sess().opts.optimize == config::OptLevel::No {
1298 return;
1299 }
1300
1301 let size = size.bytes();
1302 if size == 0 {
1303 return;
1304 }
1305
1306 let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
1307
1308 let ptr = self.pointercast(ptr, self.cx.type_i8p());
1309 self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
1310 }
1311
1312 fn phi(&mut self, ty: &'ll Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
Inna Palantff3f07a2019-07-11 16:15:26 -07001313 assert_eq!(vals.len(), bbs.len());
1314 let phi = unsafe {
Chih-Hung Hsiehda60c852019-12-19 14:56:55 -08001315 llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED)
Inna Palantff3f07a2019-07-11 16:15:26 -07001316 };
1317 unsafe {
1318 llvm::LLVMAddIncoming(phi, vals.as_ptr(),
1319 bbs.as_ptr(),
1320 vals.len() as c_uint);
1321 phi
1322 }
1323 }
1324
1325 fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
Inna Palantff3f07a2019-07-11 16:15:26 -07001326 unsafe {
1327 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1328 }
1329 }
1330}