pub trait BuilderMethods<'a, 'tcx>: HasCodegen<'tcx> + CoverageInfoBuilderMethods<'tcx> + DebugInfoBuilderMethods + ArgAbiMethods<'tcx> + AbiBuilderMethods<'tcx> + IntrinsicCallMethods<'tcx> + AsmBuilderMethods<'tcx> + StaticBuilderMethods + HasParamEnv<'tcx> + HasTargetSpec {
Show 120 methods // Required methods fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self; fn cx(&self) -> &Self::CodegenCx; fn llbb(&self) -> Self::BasicBlock; fn set_span(&mut self, span: Span); fn append_block( cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str ) -> Self::BasicBlock; fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock; fn switch_to_block(&mut self, llbb: Self::BasicBlock); fn ret_void(&mut self); fn ret(&mut self, v: Self::Value); fn br(&mut self, dest: Self::BasicBlock); fn cond_br( &mut self, cond: Self::Value, then_llbb: Self::BasicBlock, else_llbb: Self::BasicBlock ); fn switch( &mut self, v: Self::Value, else_llbb: Self::BasicBlock, cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)> ); fn invoke( &mut self, llty: Self::Type, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, llfn: Self::Value, args: &[Self::Value], then: Self::BasicBlock, catch: Self::BasicBlock, funclet: Option<&Self::Funclet>, instance: Option<Instance<'tcx>> ) -> Self::Value; fn unreachable(&mut self); fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fadd_algebraic( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fsub_algebraic( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fmul_algebraic( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn fdiv_algebraic( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn frem_algebraic( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn unchecked_sadd( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn unchecked_uadd( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn unchecked_ssub( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn unchecked_usub( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn unchecked_smul( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn unchecked_umul( &mut self, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value; fn neg(&mut self, v: Self::Value) -> Self::Value; fn fneg(&mut self, v: Self::Value) -> Self::Value; fn not(&mut self, v: Self::Value) -> Self::Value; fn checked_binop( &mut self, oop: OverflowOp, ty: Ty<'_>, lhs: Self::Value, rhs: Self::Value ) -> (Self::Value, Self::Value); fn from_immediate(&mut self, val: Self::Value) -> Self::Value; fn to_immediate_scalar( &mut self, val: Self::Value, scalar: Scalar ) -> Self::Value; fn alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value; fn byte_array_alloca( &mut self, len: Self::Value, align: Align ) -> Self::Value; fn load( &mut self, ty: Self::Type, ptr: Self::Value, align: Align ) -> Self::Value; fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value; fn atomic_load( &mut self, ty: Self::Type, ptr: Self::Value, order: AtomicOrdering, size: Size ) -> Self::Value; fn load_operand( &mut self, place: PlaceRef<'tcx, Self::Value> ) -> OperandRef<'tcx, Self::Value>; fn write_operand_repeatedly( &mut self, elem: OperandRef<'tcx, Self::Value>, count: u64, dest: PlaceRef<'tcx, Self::Value> ); fn range_metadata(&mut self, load: Self::Value, range: WrappingRange); fn nonnull_metadata(&mut self, load: Self::Value); fn store( &mut self, val: Self::Value, ptr: Self::Value, align: Align ) -> Self::Value; fn store_with_flags( &mut self, val: Self::Value, ptr: Self::Value, align: Align, flags: MemFlags ) -> Self::Value; fn atomic_store( &mut self, val: Self::Value, ptr: Self::Value, order: AtomicOrdering, size: Size ); fn gep( &mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value] ) -> Self::Value; fn inbounds_gep( &mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value] ) -> Self::Value; fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn fptoui_sat( &mut self, val: Self::Value, dest_ty: Self::Type ) -> Self::Value; fn fptosi_sat( &mut self, val: Self::Value, dest_ty: Self::Type ) -> Self::Value; fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn intcast( &mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool ) -> Self::Value; fn pointercast( &mut self, val: Self::Value, dest_ty: Self::Type ) -> Self::Value; fn icmp( &mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn fcmp( &mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value ) -> Self::Value; fn memcpy( &mut self, dst: Self::Value, dst_align: Align, src: Self::Value, src_align: Align, size: Self::Value, flags: MemFlags ); fn memmove( &mut self, dst: Self::Value, dst_align: Align, src: Self::Value, src_align: Align, size: Self::Value, flags: MemFlags ); fn memset( &mut self, ptr: Self::Value, fill_byte: Self::Value, size: Self::Value, align: Align, flags: MemFlags ); fn select( &mut self, cond: Self::Value, then_val: Self::Value, else_val: Self::Value ) -> Self::Value; fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value; fn extract_element( &mut self, vec: Self::Value, idx: Self::Value ) -> Self::Value; fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value; fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value; fn insert_value( &mut self, agg_val: Self::Value, elt: Self::Value, idx: u64 ) -> Self::Value; fn set_personality_fn(&mut self, personality: Self::Value); fn cleanup_landing_pad( &mut self, pers_fn: Self::Value ) -> (Self::Value, Self::Value); fn filter_landing_pad( &mut self, pers_fn: Self::Value ) -> (Self::Value, Self::Value); fn resume(&mut self, exn0: Self::Value, exn1: Self::Value); fn cleanup_pad( &mut self, parent: Option<Self::Value>, args: &[Self::Value] ) -> Self::Funclet; fn cleanup_ret( &mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock> ); fn catch_pad( &mut self, parent: Self::Value, args: &[Self::Value] ) -> Self::Funclet; fn catch_switch( &mut self, parent: Option<Self::Value>, unwind: Option<Self::BasicBlock>, handlers: &[Self::BasicBlock] ) -> Self::Value; fn atomic_cmpxchg( &mut self, dst: Self::Value, cmp: Self::Value, src: Self::Value, order: AtomicOrdering, failure_order: AtomicOrdering, weak: bool ) -> (Self::Value, Self::Value); fn atomic_rmw( &mut self, op: AtomicRmwBinOp, dst: Self::Value, src: Self::Value, order: AtomicOrdering ) -> Self::Value; fn atomic_fence( &mut self, order: AtomicOrdering, scope: SynchronizationScope ); fn set_invariant_load(&mut self, load: Self::Value); fn lifetime_start(&mut self, ptr: Self::Value, size: Size); fn lifetime_end(&mut self, ptr: Self::Value, size: Size); fn instrprof_increment( &mut self, fn_name: Self::Value, hash: Self::Value, num_counters: Self::Value, index: Self::Value ); fn call( &mut self, llty: Self::Type, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, llfn: Self::Value, args: &[Self::Value], funclet: Option<&Self::Funclet>, instance: Option<Instance<'tcx>> ) -> Self::Value; fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value; fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value); // Provided methods fn to_immediate( &mut self, val: Self::Value, layout: TyAndLayout<'_> ) -> Self::Value { ... } fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value { ... } fn inbounds_ptradd( &mut self, ptr: Self::Value, offset: Self::Value ) -> Self::Value { ... } fn cast_float_to_int( &mut self, signed: bool, x: Self::Value, dest_ty: Self::Type ) -> Self::Value { ... } fn typed_place_copy( &mut self, dst: PlaceRef<'tcx, Self::Value>, src: PlaceRef<'tcx, Self::Value> ) { ... } fn typed_place_swap( &mut self, left: PlaceRef<'tcx, Self::Value>, right: PlaceRef<'tcx, Self::Value> ) { ... }
}

Required Methods§

source

fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self

source

fn cx(&self) -> &Self::CodegenCx

source

fn llbb(&self) -> Self::BasicBlock

source

fn set_span(&mut self, span: Span)

source

fn append_block( cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str ) -> Self::BasicBlock

source

fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock

source

fn switch_to_block(&mut self, llbb: Self::BasicBlock)

source

fn ret_void(&mut self)

source

fn ret(&mut self, v: Self::Value)

source

fn br(&mut self, dest: Self::BasicBlock)

source

fn cond_br( &mut self, cond: Self::Value, then_llbb: Self::BasicBlock, else_llbb: Self::BasicBlock )

source

fn switch( &mut self, v: Self::Value, else_llbb: Self::BasicBlock, cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)> )

source

fn invoke( &mut self, llty: Self::Type, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, llfn: Self::Value, args: &[Self::Value], then: Self::BasicBlock, catch: Self::BasicBlock, funclet: Option<&Self::Funclet>, instance: Option<Instance<'tcx>> ) -> Self::Value

source

fn unreachable(&mut self)

source

fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value

source

fn neg(&mut self, v: Self::Value) -> Self::Value

source

fn fneg(&mut self, v: Self::Value) -> Self::Value

source

fn not(&mut self, v: Self::Value) -> Self::Value

source

fn checked_binop( &mut self, oop: OverflowOp, ty: Ty<'_>, lhs: Self::Value, rhs: Self::Value ) -> (Self::Value, Self::Value)

source

fn from_immediate(&mut self, val: Self::Value) -> Self::Value

source

fn to_immediate_scalar( &mut self, val: Self::Value, scalar: Scalar ) -> Self::Value

source

fn alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value

source

fn byte_array_alloca(&mut self, len: Self::Value, align: Align) -> Self::Value

source

fn load( &mut self, ty: Self::Type, ptr: Self::Value, align: Align ) -> Self::Value

source

fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value

source

fn atomic_load( &mut self, ty: Self::Type, ptr: Self::Value, order: AtomicOrdering, size: Size ) -> Self::Value

source

fn load_operand( &mut self, place: PlaceRef<'tcx, Self::Value> ) -> OperandRef<'tcx, Self::Value>

source

fn write_operand_repeatedly( &mut self, elem: OperandRef<'tcx, Self::Value>, count: u64, dest: PlaceRef<'tcx, Self::Value> )

Called for Rvalue::Repeat when the elem is neither a ZST nor optimizable using memset.

source

fn range_metadata(&mut self, load: Self::Value, range: WrappingRange)

source

fn nonnull_metadata(&mut self, load: Self::Value)

source

fn store( &mut self, val: Self::Value, ptr: Self::Value, align: Align ) -> Self::Value

source

fn store_with_flags( &mut self, val: Self::Value, ptr: Self::Value, align: Align, flags: MemFlags ) -> Self::Value

source

fn atomic_store( &mut self, val: Self::Value, ptr: Self::Value, order: AtomicOrdering, size: Size )

source

fn gep( &mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value] ) -> Self::Value

source

fn inbounds_gep( &mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value] ) -> Self::Value

source

fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn intcast( &mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool ) -> Self::Value

source

fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn icmp( &mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value ) -> Self::Value

source

fn fcmp( &mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value ) -> Self::Value

source

fn memcpy( &mut self, dst: Self::Value, dst_align: Align, src: Self::Value, src_align: Align, size: Self::Value, flags: MemFlags )

source

fn memmove( &mut self, dst: Self::Value, dst_align: Align, src: Self::Value, src_align: Align, size: Self::Value, flags: MemFlags )

source

fn memset( &mut self, ptr: Self::Value, fill_byte: Self::Value, size: Self::Value, align: Align, flags: MemFlags )

source

fn select( &mut self, cond: Self::Value, then_val: Self::Value, else_val: Self::Value ) -> Self::Value

source

fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value

source

fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value

source

fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value

source

fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value

source

fn insert_value( &mut self, agg_val: Self::Value, elt: Self::Value, idx: u64 ) -> Self::Value

source

fn set_personality_fn(&mut self, personality: Self::Value)

source

fn cleanup_landing_pad( &mut self, pers_fn: Self::Value ) -> (Self::Value, Self::Value)

source

fn filter_landing_pad( &mut self, pers_fn: Self::Value ) -> (Self::Value, Self::Value)

source

fn resume(&mut self, exn0: Self::Value, exn1: Self::Value)

source

fn cleanup_pad( &mut self, parent: Option<Self::Value>, args: &[Self::Value] ) -> Self::Funclet

source

fn cleanup_ret( &mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock> )

source

fn catch_pad( &mut self, parent: Self::Value, args: &[Self::Value] ) -> Self::Funclet

source

fn catch_switch( &mut self, parent: Option<Self::Value>, unwind: Option<Self::BasicBlock>, handlers: &[Self::BasicBlock] ) -> Self::Value

source

fn atomic_cmpxchg( &mut self, dst: Self::Value, cmp: Self::Value, src: Self::Value, order: AtomicOrdering, failure_order: AtomicOrdering, weak: bool ) -> (Self::Value, Self::Value)

source

fn atomic_rmw( &mut self, op: AtomicRmwBinOp, dst: Self::Value, src: Self::Value, order: AtomicOrdering ) -> Self::Value

source

fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope)

source

fn set_invariant_load(&mut self, load: Self::Value)

source

fn lifetime_start(&mut self, ptr: Self::Value, size: Size)

Called for StorageLive

source

fn lifetime_end(&mut self, ptr: Self::Value, size: Size)

Called for StorageDead

source

fn instrprof_increment( &mut self, fn_name: Self::Value, hash: Self::Value, num_counters: Self::Value, index: Self::Value )

source

fn call( &mut self, llty: Self::Type, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, llfn: Self::Value, args: &[Self::Value], funclet: Option<&Self::Funclet>, instance: Option<Instance<'tcx>> ) -> Self::Value

source

fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value

source

fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value)

Provided Methods§

source

fn to_immediate( &mut self, val: Self::Value, layout: TyAndLayout<'_> ) -> Self::Value

source

fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value

source

fn inbounds_ptradd( &mut self, ptr: Self::Value, offset: Self::Value ) -> Self::Value

source

fn cast_float_to_int( &mut self, signed: bool, x: Self::Value, dest_ty: Self::Type ) -> Self::Value

source

fn typed_place_copy( &mut self, dst: PlaceRef<'tcx, Self::Value>, src: PlaceRef<'tcx, Self::Value> )

Typed copy for non-overlapping places.

Has a default implementation in terms of memcpy, but specific backends can override to do something smarter if possible.

(For example, typed load-stores with alias metadata.)

source

fn typed_place_swap( &mut self, left: PlaceRef<'tcx, Self::Value>, right: PlaceRef<'tcx, Self::Value> )

Typed swap for non-overlapping places.

Avoids allocas for Immediates and ScalarPairs.

FIXME: Maybe do something smarter for Ref types too? For now, the typed_swap intrinsic just doesn’t call this for those cases (in non-debug), preferring the fallback body instead.

Object Safety§

This trait is not object safe.

Implementors§