Trait rustc_codegen_ssa::traits::builder::BuilderMethods
source · pub trait BuilderMethods<'a, 'tcx>:
HasCodegen<'tcx>
+ CoverageInfoBuilderMethods<'tcx>
+ DebugInfoBuilderMethods
+ ArgAbiMethods<'tcx>
+ AbiBuilderMethods<'tcx>
+ IntrinsicCallMethods<'tcx>
+ AsmBuilderMethods<'tcx>
+ StaticBuilderMethods
+ HasParamEnv<'tcx>
+ HasTargetSpec {
Show 124 methods
// Required methods
fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self;
fn cx(&self) -> &Self::CodegenCx;
fn llbb(&self) -> Self::BasicBlock;
fn set_span(&mut self, span: Span);
fn append_block(
cx: &'a Self::CodegenCx,
llfn: Self::Function,
name: &str,
) -> Self::BasicBlock;
fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock;
fn switch_to_block(&mut self, llbb: Self::BasicBlock);
fn ret_void(&mut self);
fn ret(&mut self, v: Self::Value);
fn br(&mut self, dest: Self::BasicBlock);
fn cond_br(
&mut self,
cond: Self::Value,
then_llbb: Self::BasicBlock,
else_llbb: Self::BasicBlock,
);
fn switch(
&mut self,
v: Self::Value,
else_llbb: Self::BasicBlock,
cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>,
);
fn invoke(
&mut self,
llty: Self::Type,
fn_attrs: Option<&CodegenFnAttrs>,
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
llfn: Self::Value,
args: &[Self::Value],
then: Self::BasicBlock,
catch: Self::BasicBlock,
funclet: Option<&Self::Funclet>,
instance: Option<Instance<'tcx>>,
) -> Self::Value;
fn unreachable(&mut self);
fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd_algebraic(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fsub_algebraic(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fmul_algebraic(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fdiv_algebraic(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn frem_algebraic(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn unchecked_sadd(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn unchecked_uadd(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn unchecked_ssub(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn unchecked_usub(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn unchecked_smul(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn unchecked_umul(
&mut self,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn neg(&mut self, v: Self::Value) -> Self::Value;
fn fneg(&mut self, v: Self::Value) -> Self::Value;
fn not(&mut self, v: Self::Value) -> Self::Value;
fn checked_binop(
&mut self,
oop: OverflowOp,
ty: Ty<'_>,
lhs: Self::Value,
rhs: Self::Value,
) -> (Self::Value, Self::Value);
fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
fn to_immediate_scalar(
&mut self,
val: Self::Value,
scalar: Scalar,
) -> Self::Value;
fn alloca(&mut self, size: Size, align: Align) -> Self::Value;
fn dynamic_alloca(&mut self, size: Self::Value, align: Align) -> Self::Value;
fn load(
&mut self,
ty: Self::Type,
ptr: Self::Value,
align: Align,
) -> Self::Value;
fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
fn atomic_load(
&mut self,
ty: Self::Type,
ptr: Self::Value,
order: AtomicOrdering,
size: Size,
) -> Self::Value;
fn load_operand(
&mut self,
place: PlaceRef<'tcx, Self::Value>,
) -> OperandRef<'tcx, Self::Value>;
fn write_operand_repeatedly(
&mut self,
elem: OperandRef<'tcx, Self::Value>,
count: u64,
dest: PlaceRef<'tcx, Self::Value>,
);
fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
fn nonnull_metadata(&mut self, load: Self::Value);
fn store(
&mut self,
val: Self::Value,
ptr: Self::Value,
align: Align,
) -> Self::Value;
fn store_with_flags(
&mut self,
val: Self::Value,
ptr: Self::Value,
align: Align,
flags: MemFlags,
) -> Self::Value;
fn atomic_store(
&mut self,
val: Self::Value,
ptr: Self::Value,
order: AtomicOrdering,
size: Size,
);
fn gep(
&mut self,
ty: Self::Type,
ptr: Self::Value,
indices: &[Self::Value],
) -> Self::Value;
fn inbounds_gep(
&mut self,
ty: Self::Type,
ptr: Self::Value,
indices: &[Self::Value],
) -> Self::Value;
fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn fptoui_sat(
&mut self,
val: Self::Value,
dest_ty: Self::Type,
) -> Self::Value;
fn fptosi_sat(
&mut self,
val: Self::Value,
dest_ty: Self::Type,
) -> Self::Value;
fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn intcast(
&mut self,
val: Self::Value,
dest_ty: Self::Type,
is_signed: bool,
) -> Self::Value;
fn pointercast(
&mut self,
val: Self::Value,
dest_ty: Self::Type,
) -> Self::Value;
fn icmp(
&mut self,
op: IntPredicate,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn fcmp(
&mut self,
op: RealPredicate,
lhs: Self::Value,
rhs: Self::Value,
) -> Self::Value;
fn memcpy(
&mut self,
dst: Self::Value,
dst_align: Align,
src: Self::Value,
src_align: Align,
size: Self::Value,
flags: MemFlags,
);
fn memmove(
&mut self,
dst: Self::Value,
dst_align: Align,
src: Self::Value,
src_align: Align,
size: Self::Value,
flags: MemFlags,
);
fn memset(
&mut self,
ptr: Self::Value,
fill_byte: Self::Value,
size: Self::Value,
align: Align,
flags: MemFlags,
);
fn select(
&mut self,
cond: Self::Value,
then_val: Self::Value,
else_val: Self::Value,
) -> Self::Value;
fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value;
fn extract_element(
&mut self,
vec: Self::Value,
idx: Self::Value,
) -> Self::Value;
fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value;
fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value;
fn insert_value(
&mut self,
agg_val: Self::Value,
elt: Self::Value,
idx: u64,
) -> Self::Value;
fn set_personality_fn(&mut self, personality: Self::Value);
fn cleanup_landing_pad(
&mut self,
pers_fn: Self::Value,
) -> (Self::Value, Self::Value);
fn filter_landing_pad(
&mut self,
pers_fn: Self::Value,
) -> (Self::Value, Self::Value);
fn resume(&mut self, exn0: Self::Value, exn1: Self::Value);
fn cleanup_pad(
&mut self,
parent: Option<Self::Value>,
args: &[Self::Value],
) -> Self::Funclet;
fn cleanup_ret(
&mut self,
funclet: &Self::Funclet,
unwind: Option<Self::BasicBlock>,
);
fn catch_pad(
&mut self,
parent: Self::Value,
args: &[Self::Value],
) -> Self::Funclet;
fn catch_switch(
&mut self,
parent: Option<Self::Value>,
unwind: Option<Self::BasicBlock>,
handlers: &[Self::BasicBlock],
) -> Self::Value;
fn atomic_cmpxchg(
&mut self,
dst: Self::Value,
cmp: Self::Value,
src: Self::Value,
order: AtomicOrdering,
failure_order: AtomicOrdering,
weak: bool,
) -> (Self::Value, Self::Value);
fn atomic_rmw(
&mut self,
op: AtomicRmwBinOp,
dst: Self::Value,
src: Self::Value,
order: AtomicOrdering,
) -> Self::Value;
fn atomic_fence(
&mut self,
order: AtomicOrdering,
scope: SynchronizationScope,
);
fn set_invariant_load(&mut self, load: Self::Value);
fn lifetime_start(&mut self, ptr: Self::Value, size: Size);
fn lifetime_end(&mut self, ptr: Self::Value, size: Size);
fn instrprof_increment(
&mut self,
fn_name: Self::Value,
hash: Self::Value,
num_counters: Self::Value,
index: Self::Value,
);
fn call(
&mut self,
llty: Self::Type,
fn_attrs: Option<&CodegenFnAttrs>,
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
llfn: Self::Value,
args: &[Self::Value],
funclet: Option<&Self::Funclet>,
instance: Option<Instance<'tcx>>,
) -> Self::Value;
fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
// Provided methods
fn to_immediate(
&mut self,
val: Self::Value,
layout: TyAndLayout<'_>,
) -> Self::Value { ... }
fn load_from_place(
&mut self,
ty: Self::Type,
place: PlaceValue<Self::Value>,
) -> Self::Value { ... }
fn store_to_place(
&mut self,
val: Self::Value,
place: PlaceValue<Self::Value>,
) -> Self::Value { ... }
fn store_to_place_with_flags(
&mut self,
val: Self::Value,
place: PlaceValue<Self::Value>,
flags: MemFlags,
) -> Self::Value { ... }
fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value { ... }
fn inbounds_ptradd(
&mut self,
ptr: Self::Value,
offset: Self::Value,
) -> Self::Value { ... }
fn cast_float_to_int(
&mut self,
signed: bool,
x: Self::Value,
dest_ty: Self::Type,
) -> Self::Value { ... }
fn typed_place_copy(
&mut self,
dst: PlaceValue<Self::Value>,
src: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
) { ... }
fn typed_place_copy_with_flags(
&mut self,
dst: PlaceValue<Self::Value>,
src: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
flags: MemFlags,
) { ... }
fn typed_place_swap(
&mut self,
left: PlaceValue<Self::Value>,
right: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
) { ... }
}
Required Methods§
fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self
fn cx(&self) -> &Self::CodegenCx
fn llbb(&self) -> Self::BasicBlock
fn set_span(&mut self, span: Span)
fn append_block( cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str, ) -> Self::BasicBlock
fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock
fn switch_to_block(&mut self, llbb: Self::BasicBlock)
fn ret_void(&mut self)
fn ret(&mut self, v: Self::Value)
fn br(&mut self, dest: Self::BasicBlock)
fn cond_br( &mut self, cond: Self::Value, then_llbb: Self::BasicBlock, else_llbb: Self::BasicBlock, )
fn switch( &mut self, v: Self::Value, else_llbb: Self::BasicBlock, cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>, )
fn invoke( &mut self, llty: Self::Type, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, llfn: Self::Value, args: &[Self::Value], then: Self::BasicBlock, catch: Self::BasicBlock, funclet: Option<&Self::Funclet>, instance: Option<Instance<'tcx>>, ) -> Self::Value
fn unreachable(&mut self)
fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
sourcefn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
Generate a left-shift. Both operands must have the same size. The right operand must be interpreted as unsigned and can be assumed to be less than the size of the left operand.
sourcefn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
Generate a logical right-shift. Both operands must have the same size. The right operand must be interpreted as unsigned and can be assumed to be less than the size of the left operand.
sourcefn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
Generate an arithmetic right-shift. Both operands must have the same size. The right operand must be interpreted as unsigned and can be assumed to be less than the size of the left operand.
fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value
fn neg(&mut self, v: Self::Value) -> Self::Value
fn fneg(&mut self, v: Self::Value) -> Self::Value
fn not(&mut self, v: Self::Value) -> Self::Value
fn checked_binop( &mut self, oop: OverflowOp, ty: Ty<'_>, lhs: Self::Value, rhs: Self::Value, ) -> (Self::Value, Self::Value)
fn from_immediate(&mut self, val: Self::Value) -> Self::Value
fn to_immediate_scalar( &mut self, val: Self::Value, scalar: Scalar, ) -> Self::Value
fn alloca(&mut self, size: Size, align: Align) -> Self::Value
fn dynamic_alloca(&mut self, size: Self::Value, align: Align) -> Self::Value
fn load( &mut self, ty: Self::Type, ptr: Self::Value, align: Align, ) -> Self::Value
fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value
fn atomic_load( &mut self, ty: Self::Type, ptr: Self::Value, order: AtomicOrdering, size: Size, ) -> Self::Value
fn load_operand( &mut self, place: PlaceRef<'tcx, Self::Value>, ) -> OperandRef<'tcx, Self::Value>
sourcefn write_operand_repeatedly(
&mut self,
elem: OperandRef<'tcx, Self::Value>,
count: u64,
dest: PlaceRef<'tcx, Self::Value>,
)
fn write_operand_repeatedly( &mut self, elem: OperandRef<'tcx, Self::Value>, count: u64, dest: PlaceRef<'tcx, Self::Value>, )
Called for Rvalue::Repeat when the elem is neither a ZST nor optimizable using memset.
fn range_metadata(&mut self, load: Self::Value, range: WrappingRange)
fn nonnull_metadata(&mut self, load: Self::Value)
fn store( &mut self, val: Self::Value, ptr: Self::Value, align: Align, ) -> Self::Value
fn store_with_flags( &mut self, val: Self::Value, ptr: Self::Value, align: Align, flags: MemFlags, ) -> Self::Value
fn atomic_store( &mut self, val: Self::Value, ptr: Self::Value, order: AtomicOrdering, size: Size, )
fn gep( &mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value], ) -> Self::Value
fn inbounds_gep( &mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value], ) -> Self::Value
fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn intcast( &mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool, ) -> Self::Value
fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn icmp( &mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value, ) -> Self::Value
fn fcmp( &mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value, ) -> Self::Value
fn memcpy( &mut self, dst: Self::Value, dst_align: Align, src: Self::Value, src_align: Align, size: Self::Value, flags: MemFlags, )
fn memmove( &mut self, dst: Self::Value, dst_align: Align, src: Self::Value, src_align: Align, size: Self::Value, flags: MemFlags, )
fn memset( &mut self, ptr: Self::Value, fill_byte: Self::Value, size: Self::Value, align: Align, flags: MemFlags, )
fn select( &mut self, cond: Self::Value, then_val: Self::Value, else_val: Self::Value, ) -> Self::Value
fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value
fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value
fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value
fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value
fn insert_value( &mut self, agg_val: Self::Value, elt: Self::Value, idx: u64, ) -> Self::Value
fn set_personality_fn(&mut self, personality: Self::Value)
fn cleanup_landing_pad( &mut self, pers_fn: Self::Value, ) -> (Self::Value, Self::Value)
fn filter_landing_pad( &mut self, pers_fn: Self::Value, ) -> (Self::Value, Self::Value)
fn resume(&mut self, exn0: Self::Value, exn1: Self::Value)
fn cleanup_pad( &mut self, parent: Option<Self::Value>, args: &[Self::Value], ) -> Self::Funclet
fn cleanup_ret( &mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock>, )
fn catch_pad( &mut self, parent: Self::Value, args: &[Self::Value], ) -> Self::Funclet
fn catch_switch( &mut self, parent: Option<Self::Value>, unwind: Option<Self::BasicBlock>, handlers: &[Self::BasicBlock], ) -> Self::Value
fn atomic_cmpxchg( &mut self, dst: Self::Value, cmp: Self::Value, src: Self::Value, order: AtomicOrdering, failure_order: AtomicOrdering, weak: bool, ) -> (Self::Value, Self::Value)
fn atomic_rmw( &mut self, op: AtomicRmwBinOp, dst: Self::Value, src: Self::Value, order: AtomicOrdering, ) -> Self::Value
fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope)
fn set_invariant_load(&mut self, load: Self::Value)
sourcefn lifetime_start(&mut self, ptr: Self::Value, size: Size)
fn lifetime_start(&mut self, ptr: Self::Value, size: Size)
Called for StorageLive
sourcefn lifetime_end(&mut self, ptr: Self::Value, size: Size)
fn lifetime_end(&mut self, ptr: Self::Value, size: Size)
Called for StorageDead
fn instrprof_increment( &mut self, fn_name: Self::Value, hash: Self::Value, num_counters: Self::Value, index: Self::Value, )
fn call( &mut self, llty: Self::Type, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, llfn: Self::Value, args: &[Self::Value], funclet: Option<&Self::Funclet>, instance: Option<Instance<'tcx>>, ) -> Self::Value
fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value
fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value)
Provided Methods§
fn to_immediate( &mut self, val: Self::Value, layout: TyAndLayout<'_>, ) -> Self::Value
fn load_from_place( &mut self, ty: Self::Type, place: PlaceValue<Self::Value>, ) -> Self::Value
fn store_to_place( &mut self, val: Self::Value, place: PlaceValue<Self::Value>, ) -> Self::Value
fn store_to_place_with_flags( &mut self, val: Self::Value, place: PlaceValue<Self::Value>, flags: MemFlags, ) -> Self::Value
fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value
fn inbounds_ptradd( &mut self, ptr: Self::Value, offset: Self::Value, ) -> Self::Value
fn cast_float_to_int( &mut self, signed: bool, x: Self::Value, dest_ty: Self::Type, ) -> Self::Value
sourcefn typed_place_copy(
&mut self,
dst: PlaceValue<Self::Value>,
src: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
)
fn typed_place_copy( &mut self, dst: PlaceValue<Self::Value>, src: PlaceValue<Self::Value>, layout: TyAndLayout<'tcx>, )
Typed copy for non-overlapping places.
Has a default implementation in terms of memcpy
, but specific backends
can override to do something smarter if possible.
(For example, typed load-stores with alias metadata.)
fn typed_place_copy_with_flags( &mut self, dst: PlaceValue<Self::Value>, src: PlaceValue<Self::Value>, layout: TyAndLayout<'tcx>, flags: MemFlags, )
sourcefn typed_place_swap(
&mut self,
left: PlaceValue<Self::Value>,
right: PlaceValue<Self::Value>,
layout: TyAndLayout<'tcx>,
)
fn typed_place_swap( &mut self, left: PlaceValue<Self::Value>, right: PlaceValue<Self::Value>, layout: TyAndLayout<'tcx>, )
Typed swap for non-overlapping places.
Avoids alloca
s for Immediates and ScalarPairs.
FIXME: Maybe do something smarter for Ref types too?
For now, the typed_swap
intrinsic just doesn’t call this for those
cases (in non-debug), preferring the fallback body instead.