diff --git a/Cargo.lock b/Cargo.lock index 2ee2c52b32ade..5146ed6c3bf6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4216,6 +4216,7 @@ dependencies = [ name = "rustc_mir_dataflow" version = "0.0.0" dependencies = [ + "itertools", "polonius-engine", "regex", "rustc_abi", diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 53cf4f34ae794..d7a5da3661097 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -685,7 +685,14 @@ impl<'a, 'tcx> ResultsVisitor<'a, 'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt< TerminatorKind::SwitchInt { discr, targets: _ } => { self.consume_operand(loc, (discr, span), state); } - TerminatorKind::Drop { place, target: _, unwind: _, replace } => { + TerminatorKind::Drop { + place, + target: _, + unwind: _, + replace, + drop: _, + async_fut: _, + } => { debug!( "visit_terminator_drop \ loc: {:?} term: {:?} place: {:?} span: {:?}", diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs index 0d1d8642bcacc..99dd0b2dd4664 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs @@ -101,7 +101,14 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> { TerminatorKind::SwitchInt { discr, targets: _ } => { self.consume_operand(location, discr); } - TerminatorKind::Drop { place: drop_place, target: _, unwind: _, replace } => { + TerminatorKind::Drop { + place: drop_place, + target: _, + unwind: _, + replace, + drop: _, + async_fut: _, + } => { let write_kind = if *replace { WriteKind::Replace } else { WriteKind::StorageDeadOrDrop }; self.access_place( diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 26a5d438edb98..2879192593462 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -1396,8 +1396,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } } TerminatorKind::Unreachable => {} - TerminatorKind::Drop { target, unwind, .. } - | TerminatorKind::Assert { target, unwind, .. } => { + TerminatorKind::Drop { target, unwind, drop, .. } => { + self.assert_iscleanup(body, block_data, target, is_cleanup); + self.assert_iscleanup_unwind(body, block_data, unwind, is_cleanup); + if let Some(drop) = drop { + self.assert_iscleanup(body, block_data, drop, is_cleanup); + } + } + TerminatorKind::Assert { target, unwind, .. } => { self.assert_iscleanup(body, block_data, target, is_cleanup); self.assert_iscleanup_unwind(body, block_data, unwind, is_cleanup); } diff --git a/compiler/rustc_codegen_cranelift/src/abi/mod.rs b/compiler/rustc_codegen_cranelift/src/abi/mod.rs index fdcd9caf4ac87..d5594be2495d9 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/mod.rs @@ -441,7 +441,9 @@ pub(crate) fn codegen_terminator_call<'tcx>( Err(instance) => Some(instance), } } - InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None) => { + // We don't need AsyncDropGlueCtorShim here because it is not `noop func`, + // it is `func returning noop future` + InstanceKind::DropGlue(_, None) => { // empty drop glue - a nop. let dest = target.expect("Non terminating drop_in_place_real???"); let ret_block = fx.get_block(dest); @@ -707,9 +709,8 @@ pub(crate) fn codegen_drop<'tcx>( let ty = drop_place.layout().ty; let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty); - if let ty::InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None) = - drop_instance.def - { + // AsyncDropGlueCtorShim can't be here + if let ty::InstanceKind::DropGlue(_, None) = drop_instance.def { // we don't actually need to drop anything } else { match ty.kind() { diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index a2b9e5712e50b..ddecd2ff91d71 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -565,7 +565,11 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { | TerminatorKind::CoroutineDrop => { bug!("shouldn't exist at codegen {:?}", bb_data.terminator()); } - TerminatorKind::Drop { place, target, unwind: _, replace: _ } => { + TerminatorKind::Drop { place, target, unwind: _, replace: _, drop, async_fut } => { + assert!( + async_fut.is_none() && drop.is_none(), + "Async Drop must be expanded or reset to sync before codegen" + ); let drop_place = codegen_place(fx, *place); crate::abi::codegen_drop(fx, source_info, drop_place, *target); } diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs index a72e205c9b249..b7c1197e0fe66 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs @@ -721,8 +721,7 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>( _ => unreachable!(), }; - let coroutine_layout = - cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args.kind_ty()).unwrap(); + let coroutine_layout = cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args.args).unwrap(); let common_upvar_names = cx.tcx.closure_saved_names_of_captured_variables(coroutine_def_id); let variant_range = coroutine_args.variant_range(coroutine_def_id, cx.tcx); diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs index 11824398f243e..8ac5c19983607 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs @@ -174,10 +174,8 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>( DIFlags::FlagZero, ), |cx, coroutine_type_di_node| { - let coroutine_layout = cx - .tcx - .coroutine_layout(coroutine_def_id, coroutine_args.as_coroutine().kind_ty()) - .unwrap(); + let coroutine_layout = + cx.tcx.coroutine_layout(coroutine_def_id, coroutine_args).unwrap(); let Variants::Multiple { tag_encoding: TagEncoding::Direct, ref variants, .. } = coroutine_type_and_layout.variants diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs index 1dbaffaa57745..99399c0491ae1 100644 --- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs +++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs @@ -372,7 +372,7 @@ fn exported_symbols_provider_local( )); } MonoItem::Fn(Instance { - def: InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)), + def: InstanceKind::AsyncDropGlueCtorShim(_, ty), args, }) => { // A little sanity-check @@ -386,6 +386,16 @@ fn exported_symbols_provider_local( }, )); } + MonoItem::Fn(Instance { def: InstanceKind::AsyncDropGlue(_, ty), args: _ }) => { + symbols.push(( + ExportedSymbol::AsyncDropGlue(ty), + SymbolExportInfo { + level: SymbolExportLevel::Rust, + kind: SymbolExportKind::Text, + used: false, + }, + )); + } _ => { // Any other symbols don't qualify for sharing } @@ -409,6 +419,7 @@ fn upstream_monomorphizations_provider( let drop_in_place_fn_def_id = tcx.lang_items().drop_in_place_fn(); let async_drop_in_place_fn_def_id = tcx.lang_items().async_drop_in_place_fn(); + let async_drop_in_place_poll_fn_def_id = tcx.lang_items().async_drop_in_place_poll_fn(); for &cnum in cnums.iter() { for (exported_symbol, _) in tcx.exported_symbols(cnum).iter() { @@ -427,8 +438,13 @@ fn upstream_monomorphizations_provider( if let Some(async_drop_in_place_fn_def_id) = async_drop_in_place_fn_def_id { (async_drop_in_place_fn_def_id, tcx.mk_args(&[ty.into()])) } else { - // `drop_in_place` in place does not exist, don't try - // to use it. + continue; + } + } + ExportedSymbol::AsyncDropGlue(ty) => { + if let Some(poll_fn_def_id) = async_drop_in_place_poll_fn_def_id { + (poll_fn_def_id, tcx.mk_args(&[ty.into()])) + } else { continue; } } @@ -580,6 +596,13 @@ pub(crate) fn symbol_name_for_instance_in_crate<'tcx>( instantiating_crate, ) } + ExportedSymbol::AsyncDropGlue(ty) => { + rustc_symbol_mangling::symbol_name_for_instance_in_crate( + tcx, + Instance::resolve_async_drop_in_place_poll(tcx, ty), + instantiating_crate, + ) + } ExportedSymbol::NoDefId(symbol_name) => symbol_name.to_string(), } } @@ -631,6 +654,7 @@ pub(crate) fn linking_symbol_name_for_instance_in_crate<'tcx>( // AsyncDropGlueCtorShim always use the Rust calling convention and thus follow the // target's default symbol decoration scheme. ExportedSymbol::AsyncDropGlueCtorShim(..) => None, + ExportedSymbol::AsyncDropGlue(..) => None, // NoDefId always follow the target's default symbol decoration scheme. ExportedSymbol::NoDefId(..) => None, // ThreadLocalShim always follow the target's default symbol decoration scheme. diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 8c571558717ea..0525ec396ff96 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -867,10 +867,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let def = instance.map(|i| i.def); - if let Some( - ty::InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None), - ) = def - { + // We don't need AsyncDropGlueCtorShim here because it is not `noop func`, + // it is `func returning noop future` + if let Some(ty::InstanceKind::DropGlue(_, None)) = def { // Empty drop glue; a no-op. let target = target.unwrap(); return helper.funclet_br(self, bx, target, mergeable_succ); @@ -1334,8 +1333,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { MergingSucc::False } - mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => self - .codegen_drop_terminator( + mir::TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut } => { + assert!( + async_fut.is_none() && drop.is_none(), + "Async Drop must be expanded or reset to sync before codegen" + ); + self.codegen_drop_terminator( helper, bx, &terminator.source_info, @@ -1343,7 +1346,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { target, unwind, mergeable_succ(), - ), + ) + } mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, unwind } => self .codegen_assert_terminator( diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs index 82e0a6e6666f3..528e4b5a68719 100644 --- a/compiler/rustc_const_eval/src/const_eval/machine.rs +++ b/compiler/rustc_const_eval/src/const_eval/machine.rs @@ -502,6 +502,7 @@ impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> { RemainderByZero(op) => RemainderByZero(eval_to_int(op)?), ResumedAfterReturn(coroutine_kind) => ResumedAfterReturn(*coroutine_kind), ResumedAfterPanic(coroutine_kind) => ResumedAfterPanic(*coroutine_kind), + ResumedAfterDrop(coroutine_kind) => ResumedAfterDrop(*coroutine_kind), MisalignedPointerDereference { ref required, ref found } => { MisalignedPointerDereference { required: eval_to_int(required)?, diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index cdf706f3752a4..8b4b730289722 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -569,6 +569,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { | ty::InstanceKind::FnPtrAddrShim(..) | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) + | ty::InstanceKind::AsyncDropGlue(..) + | ty::InstanceKind::FutureDropPollShim(..) | ty::InstanceKind::Item(_) => { // We need MIR for this fn. // Note that this can be an intrinsic, if we are executing its fallback body. diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index abe73c43d8a94..3f52e2384bff9 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -539,7 +539,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } - Drop { place, target, unwind, replace: _ } => { + Drop { place, target, unwind, replace: _, drop, async_fut } => { + assert!( + async_fut.is_none() && drop.is_none(), + "Async Drop must be expanded or reset to sync in runtime MIR" + ); let place = self.eval_place(place)?; let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty); if let ty::InstanceKind::DropGlue(_, None) = instance.def { diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 3a2e810dc6af7..306ae24264757 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -391,6 +391,8 @@ declare_features! ( (unstable, associated_const_equality, "1.58.0", Some(92827)), /// Allows associated type defaults. (unstable, associated_type_defaults, "1.2.0", Some(29661)), + /// Allows implementing `AsyncDrop`. + (incomplete, async_drop, "CURRENT_RUSTC_VERSION", Some(126482)), /// Allows async functions to be called from `dyn Trait`. (incomplete, async_fn_in_dyn_trait, "1.85.0", Some(133119)), /// Allows `#[track_caller]` on async functions. diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index b6689c95c4bb2..18f9cb0335fbd 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -187,19 +187,10 @@ language_item_table! { Drop, sym::drop, drop_trait, Target::Trait, GenericRequirement::None; Destruct, sym::destruct, destruct_trait, Target::Trait, GenericRequirement::None; - - AsyncDrop, sym::async_drop, async_drop_trait, Target::Trait, GenericRequirement::Exact(0); - AsyncDestruct, sym::async_destruct, async_destruct_trait, Target::Trait, GenericRequirement::Exact(0); + AsyncDrop, sym::async_drop, async_drop_trait, Target::Trait, GenericRequirement::None; AsyncDropInPlace, sym::async_drop_in_place, async_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - SurfaceAsyncDropInPlace, sym::surface_async_drop_in_place, surface_async_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropSurfaceDropInPlace, sym::async_drop_surface_drop_in_place, async_drop_surface_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropSlice, sym::async_drop_slice, async_drop_slice_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropChain, sym::async_drop_chain, async_drop_chain_fn, Target::Fn, GenericRequirement::Exact(2); - AsyncDropNoop, sym::async_drop_noop, async_drop_noop_fn, Target::Fn, GenericRequirement::Exact(0); - AsyncDropDeferredDropInPlace, sym::async_drop_deferred_drop_in_place, async_drop_deferred_drop_in_place_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropFuse, sym::async_drop_fuse, async_drop_fuse_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropDefer, sym::async_drop_defer, async_drop_defer_fn, Target::Fn, GenericRequirement::Exact(1); - AsyncDropEither, sym::async_drop_either, async_drop_either_fn, Target::Fn, GenericRequirement::Exact(3); + AsyncDropInPlacePoll, sym::async_drop_in_place_poll, async_drop_in_place_poll_fn, Target::Closure, GenericRequirement::Exact(1); + FutureDropPoll, sym::future_drop_poll, future_drop_poll_fn, Target::Fn, GenericRequirement::Exact(1); CoerceUnsized, sym::coerce_unsized, coerce_unsized_trait, Target::Trait, GenericRequirement::Minimum(1); DispatchFromDyn, sym::dispatch_from_dyn, dispatch_from_dyn_trait, Target::Trait, GenericRequirement::Minimum(1); @@ -317,6 +308,10 @@ language_item_table! { PanicAsyncGenFnResumedPanic, sym::panic_const_async_gen_fn_resumed_panic, panic_const_async_gen_fn_resumed_panic, Target::Fn, GenericRequirement::None; PanicGenFnNonePanic, sym::panic_const_gen_fn_none_panic, panic_const_gen_fn_none_panic, Target::Fn, GenericRequirement::None; PanicNullPointerDereference, sym::panic_null_pointer_dereference, panic_null_pointer_dereference, Target::Fn, GenericRequirement::None; + PanicCoroutineResumedDrop, sym::panic_const_coroutine_resumed_drop, panic_const_coroutine_resumed_drop, Target::Fn, GenericRequirement::None; + PanicAsyncFnResumedDrop, sym::panic_const_async_fn_resumed_drop, panic_const_async_fn_resumed_drop, Target::Fn, GenericRequirement::None; + PanicAsyncGenFnResumedDrop, sym::panic_const_async_gen_fn_resumed_drop, panic_const_async_gen_fn_resumed_drop, Target::Fn, GenericRequirement::None; + PanicGenFnNoneDrop, sym::panic_const_gen_fn_none_drop, panic_const_gen_fn_none_drop, Target::Fn, GenericRequirement::None; /// libstd panic entry point. Necessary for const eval to be able to catch it BeginPanic, sym::begin_panic, begin_panic_fn, Target::Fn, GenericRequirement::None; @@ -330,7 +325,6 @@ language_item_table! { ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, Target::Fn, GenericRequirement::None; DropInPlace, sym::drop_in_place, drop_in_place_fn, Target::Fn, GenericRequirement::Minimum(1); - FallbackSurfaceDrop, sym::fallback_surface_drop, fallback_surface_drop_fn, Target::Fn, GenericRequirement::None; AllocLayout, sym::alloc_layout, alloc_layout, Target::Struct, GenericRequirement::None; /// For all binary crates without `#![no_main]`, Rust will generate a "main" function. diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index f2d0b9117314c..617400ed74099 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -34,11 +34,9 @@ pub(crate) fn check_legal_trait_for_method_call( receiver: Option, expr_span: Span, trait_id: DefId, - body_id: DefId, + _body_id: DefId, ) -> Result<(), ErrorGuaranteed> { - if tcx.is_lang_item(trait_id, LangItem::Drop) - && tcx.lang_items().fallback_surface_drop_fn() != Some(body_id) - { + if tcx.is_lang_item(trait_id, LangItem::Drop) { let sugg = if let Some(receiver) = receiver.filter(|s| !s.is_empty()) { errors::ExplicitDestructorCallSugg::Snippet { lo: expr_span.shrink_to_lo(), diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index e5adcdb244f6d..cb8624fe91dff 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -911,11 +911,13 @@ fn run_required_analyses(tcx: TyCtxt<'_>) { tcx.ensure_ok().check_coroutine_obligations( tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(), ); - // Eagerly check the unsubstituted layout for cycles. - tcx.ensure_ok().layout_of( - ty::TypingEnv::post_analysis(tcx, def_id.to_def_id()) - .as_query_input(tcx.type_of(def_id).instantiate_identity()), - ); + if !tcx.is_templated_coroutine(def_id.to_def_id()) { + // Eagerly check the unsubstituted layout for cycles. + tcx.ensure_ok().layout_of( + ty::TypingEnv::post_analysis(tcx, def_id.to_def_id()) + .as_query_input(tcx.type_of(def_id).instantiate_identity()), + ); + } } }); }); diff --git a/compiler/rustc_middle/messages.ftl b/compiler/rustc_middle/messages.ftl index ded1c58057272..574371a2efdd8 100644 --- a/compiler/rustc_middle/messages.ftl +++ b/compiler/rustc_middle/messages.ftl @@ -1,10 +1,14 @@ middle_adjust_for_foreign_abi_error = target architecture {$arch} does not support `extern {$abi}` ABI +middle_assert_async_resume_after_drop = `async fn` resumed after async drop + middle_assert_async_resume_after_panic = `async fn` resumed after panicking middle_assert_async_resume_after_return = `async fn` resumed after completion +middle_assert_coroutine_resume_after_drop = coroutine resumed after async drop + middle_assert_coroutine_resume_after_panic = coroutine resumed after panicking middle_assert_coroutine_resume_after_return = coroutine resumed after completion @@ -12,6 +16,8 @@ middle_assert_coroutine_resume_after_return = coroutine resumed after completion middle_assert_divide_by_zero = attempt to divide `{$val}` by zero +middle_assert_gen_resume_after_drop = `gen` fn or block cannot be further iterated on after it async dropped + middle_assert_gen_resume_after_panic = `gen` fn or block cannot be further iterated on after it panicked middle_assert_misaligned_ptr_deref = diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 69b2f92f7166b..a638a60c903c5 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -9,6 +9,7 @@ macro_rules! arena_types { ($macro:path) => ( $macro!([ [] layout: rustc_abi::LayoutData, + [] proxy_coroutine_layout: rustc_middle::mir::CoroutineLayout<'tcx>, [] fn_abi: rustc_target::callconv::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>, // AdtDef are interned and compared by address [decode] adt_def: rustc_middle::ty::AdtDefData, diff --git a/compiler/rustc_middle/src/middle/exported_symbols.rs b/compiler/rustc_middle/src/middle/exported_symbols.rs index 0bfbd39879747..0a4c028f5918e 100644 --- a/compiler/rustc_middle/src/middle/exported_symbols.rs +++ b/compiler/rustc_middle/src/middle/exported_symbols.rs @@ -44,6 +44,7 @@ pub enum ExportedSymbol<'tcx> { Generic(DefId, GenericArgsRef<'tcx>), DropGlue(Ty<'tcx>), AsyncDropGlueCtorShim(Ty<'tcx>), + AsyncDropGlue(Ty<'tcx>), ThreadLocalShim(DefId), NoDefId(ty::SymbolName<'tcx>), } @@ -63,6 +64,9 @@ impl<'tcx> ExportedSymbol<'tcx> { ExportedSymbol::AsyncDropGlueCtorShim(ty) => { tcx.symbol_name(ty::Instance::resolve_async_drop_in_place(tcx, ty)) } + ExportedSymbol::AsyncDropGlue(ty) => { + tcx.symbol_name(ty::Instance::resolve_async_drop_in_place_poll(tcx, ty)) + } ExportedSymbol::ThreadLocalShim(def_id) => tcx.symbol_name(ty::Instance { def: ty::InstanceKind::ThreadLocalShim(def_id), args: ty::GenericArgs::empty(), diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 63e20fb0d64e5..f4a9c0fb79712 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -202,7 +202,13 @@ pub struct CoroutineInfo<'tcx> { /// Coroutine drop glue. This field is populated after the state transform pass. pub coroutine_drop: Option>, - /// The layout of a coroutine. This field is populated after the state transform pass. + /// Coroutine async drop glue. + pub coroutine_drop_async: Option>, + + /// When coroutine has sync drop, this is async proxy calling `coroutine_drop` sync impl. + pub coroutine_drop_proxy_async: Option>, + + /// The layout of a coroutine. Produced by the state transformation. pub coroutine_layout: Option>, /// If this is a coroutine then record the type of source expression that caused this coroutine @@ -222,6 +228,8 @@ impl<'tcx> CoroutineInfo<'tcx> { yield_ty: Some(yield_ty), resume_ty: Some(resume_ty), coroutine_drop: None, + coroutine_drop_async: None, + coroutine_drop_proxy_async: None, coroutine_layout: None, } } @@ -602,6 +610,26 @@ impl<'tcx> Body<'tcx> { self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_drop.as_ref()) } + #[inline] + pub fn coroutine_drop_async(&self) -> Option<&Body<'tcx>> { + self.coroutine.as_ref().and_then(|coroutine| coroutine.coroutine_drop_async.as_ref()) + } + + #[inline] + pub fn coroutine_requires_async_drop(&self) -> bool { + self.coroutine_drop_async().is_some() + } + + #[inline] + pub fn future_drop_poll(&self) -> Option<&Body<'tcx>> { + self.coroutine.as_ref().and_then(|coroutine| { + coroutine + .coroutine_drop_async + .as_ref() + .or(coroutine.coroutine_drop_proxy_async.as_ref()) + }) + } + #[inline] pub fn coroutine_kind(&self) -> Option { self.coroutine.as_ref().map(|coroutine| coroutine.coroutine_kind) diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index d4a9aac3733ff..f92ef393b793d 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -475,6 +475,8 @@ impl<'tcx> CodegenUnit<'tcx> { | InstanceKind::CloneShim(..) | InstanceKind::ThreadLocalShim(..) | InstanceKind::FnPtrAddrShim(..) + | InstanceKind::AsyncDropGlue(..) + | InstanceKind::FutureDropPollShim(..) | InstanceKind::AsyncDropGlueCtorShim(..) => None, } } diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index 11ebbbe807db7..3bb80f28d8276 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -253,9 +253,7 @@ fn dump_path<'tcx>( })); s } - ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) => { - // Unfortunately, pretty-printed typed are not very filename-friendly. - // We dome some filtering. + ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => { let mut s = ".".to_owned(); s.extend(ty.to_string().chars().filter_map(|c| match c { ' ' => None, @@ -264,6 +262,34 @@ fn dump_path<'tcx>( })); s } + ty::InstanceKind::AsyncDropGlue(_, ty) => { + let ty::Coroutine(_, args) = ty.kind() else { + bug!(); + }; + let ty = args.first().unwrap().expect_ty(); + let mut s = ".".to_owned(); + s.extend(ty.to_string().chars().filter_map(|c| match c { + ' ' => None, + ':' | '<' | '>' => Some('_'), + c => Some(c), + })); + s + } + ty::InstanceKind::FutureDropPollShim(_, proxy_cor, impl_cor) => { + let mut s = ".".to_owned(); + s.extend(proxy_cor.to_string().chars().filter_map(|c| match c { + ' ' => None, + ':' | '<' | '>' => Some('_'), + c => Some(c), + })); + s.push('.'); + s.extend(impl_cor.to_string().chars().filter_map(|c| match c { + ' ' => None, + ':' | '<' | '>' => Some('_'), + c => Some(c), + })); + s + } _ => String::new(), }; @@ -1047,7 +1073,13 @@ impl<'tcx> TerminatorKind<'tcx> { Call { target: None, unwind: _, .. } => vec![], Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()], Yield { drop: None, .. } => vec!["resume".into()], - Drop { unwind: UnwindAction::Cleanup(_), .. } => vec!["return".into(), "unwind".into()], + Drop { unwind: UnwindAction::Cleanup(_), drop: Some(_), .. } => { + vec!["return".into(), "unwind".into(), "drop".into()] + } + Drop { unwind: UnwindAction::Cleanup(_), drop: None, .. } => { + vec!["return".into(), "unwind".into()] + } + Drop { unwind: _, drop: Some(_), .. } => vec!["return".into(), "drop".into()], Drop { unwind: _, .. } => vec!["return".into()], Assert { unwind: UnwindAction::Cleanup(_), .. } => { vec!["success".into(), "unwind".into()] diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 9cec8d832dd14..c91db422dc45b 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -71,6 +71,8 @@ pub enum MirPhase { /// is fields of packed structs. In analysis MIR, `Drop(P)` for a `P` that might be misaligned /// for this reason implicitly moves `P` to a temporary before dropping. Runtime MIR has no such /// rules, and dropping a misaligned place is simply UB. + /// - Async drops: after drop elaboration some drops may become async (`drop`, `async_fut` fields). + /// StateTransform pass will expand those async drops or reset to sync. /// - Unwinding: in analysis MIR, unwinding from a function which may not unwind aborts. In runtime /// MIR, this is UB. /// - Retags: If `-Zmir-emit-retag` is enabled, analysis MIR has "implicit" retags in the same way @@ -788,7 +790,34 @@ pub enum TerminatorKind<'tcx> { /// The `replace` flag indicates whether this terminator was created as part of an assignment. /// This should only be used for diagnostic purposes, and does not have any operational /// meaning. - Drop { place: Place<'tcx>, target: BasicBlock, unwind: UnwindAction, replace: bool }, + /// + /// Async drop processing: + /// In compiler/rustc_mir_build/src/build/scope.rs we detect possible async drop: + /// drop of object with `needs_async_drop`. + /// Async drop later, in StateTransform pass, may be expanded into additional yield-point + /// for poll-loop of async drop future. + /// So we need prepared 'drop' target block in the similar way as for `Yield` terminator + /// (see `drops.build_mir::` in scopes.rs). + /// In compiler/rustc_mir_transform/src/elaborate_drops.rs for object implementing `AsyncDrop` trait + /// we need to prepare async drop feature - resolve `AsyncDrop::drop` and codegen call. + /// `async_fut` is set to the corresponding local. + /// For coroutine drop we don't need this logic because coroutine drop works with the same + /// layout object as coroutine itself. So `async_fut` will be `None` for coroutine drop. + /// Both `drop` and `async_fut` fields are only used in compiler/rustc_mir_transform/src/coroutine.rs, + /// StateTransform pass. In `expand_async_drops` async drops are expanded + /// into one or two yield points with poll ready/pending switch. + /// When a coroutine has any internal async drop, the coroutine drop function will be async + /// (generated by `create_coroutine_drop_shim_async`, not `create_coroutine_drop_shim`). + Drop { + place: Place<'tcx>, + target: BasicBlock, + unwind: UnwindAction, + replace: bool, + /// Cleanup to be done if the coroutine is dropped at this suspend point (for async drop). + drop: Option, + /// Prepared async future local (for async drop) + async_fut: Option, + }, /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of /// the referred to function. The operand types must match the argument types of the function. @@ -1075,6 +1104,7 @@ pub enum AssertKind { RemainderByZero(O), ResumedAfterReturn(CoroutineKind), ResumedAfterPanic(CoroutineKind), + ResumedAfterDrop(CoroutineKind), MisalignedPointerDereference { required: O, found: O }, NullPointerDereference, } diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index 9357e19f7c579..6ccd173900905 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -207,6 +207,16 @@ impl AssertKind { LangItem::PanicGenFnNonePanic } NullPointerDereference => LangItem::PanicNullPointerDereference, + ResumedAfterDrop(CoroutineKind::Coroutine(_)) => LangItem::PanicCoroutineResumedDrop, + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { + LangItem::PanicAsyncFnResumedDrop + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => { + LangItem::PanicAsyncGenFnResumedDrop + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { + LangItem::PanicGenFnNoneDrop + } BoundsCheck { .. } | MisalignedPointerDereference { .. } => { bug!("Unexpected AssertKind") @@ -297,6 +307,18 @@ impl AssertKind { ResumedAfterPanic(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { write!(f, "\"`gen fn` should just keep returning `None` after panicking\"") } + ResumedAfterDrop(CoroutineKind::Coroutine(_)) => { + write!(f, "\"coroutine resumed after async drop\"") + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { + write!(f, "\"`async fn` resumed after async drop\"") + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => { + write!(f, "\"`async gen fn` resumed after async drop\"") + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { + write!(f, "\"`gen fn` resumed after drop\"") + } } } @@ -344,6 +366,19 @@ impl AssertKind { middle_assert_coroutine_resume_after_panic } NullPointerDereference => middle_assert_null_ptr_deref, + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) => { + middle_assert_async_resume_after_drop + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _)) => { + todo!() + } + ResumedAfterDrop(CoroutineKind::Desugared(CoroutineDesugaring::Gen, _)) => { + middle_assert_gen_resume_after_drop + } + ResumedAfterDrop(CoroutineKind::Coroutine(_)) => { + middle_assert_coroutine_resume_after_drop + } + MisalignedPointerDereference { .. } => middle_assert_misaligned_ptr_deref, } } @@ -376,7 +411,10 @@ impl AssertKind { add!("left", format!("{left:#?}")); add!("right", format!("{right:#?}")); } - ResumedAfterReturn(_) | ResumedAfterPanic(_) | NullPointerDereference => {} + ResumedAfterReturn(_) + | ResumedAfterPanic(_) + | NullPointerDereference + | ResumedAfterDrop(_) => {} MisalignedPointerDereference { required, found } => { add!("required", format!("{required:#?}")); add!("found", format!("{found:#?}")); @@ -433,7 +471,7 @@ mod helper { #[inline] pub fn successors_for_value(&self, value: u128) -> Successors<'_> { let target = self.target_for_value(value); - (&[]).into_iter().copied().chain(Some(target)) + (&[]).into_iter().copied().chain(Some(target).into_iter().chain(None)) } } @@ -442,13 +480,23 @@ mod helper { pub fn successors(&self) -> Successors<'_> { use self::TerminatorKind::*; match *self { + // 3-successors for async drop: target, unwind, dropline (parent coroutine drop) + Drop { target: ref t, unwind: UnwindAction::Cleanup(u), drop: Some(d), .. } => { + slice::from_ref(t) + .into_iter() + .copied() + .chain(Some(u).into_iter().chain(Some(d))) + } + // 2-successors Call { target: Some(ref t), unwind: UnwindAction::Cleanup(u), .. } | Yield { resume: ref t, drop: Some(u), .. } - | Drop { target: ref t, unwind: UnwindAction::Cleanup(u), .. } + | Drop { target: ref t, unwind: UnwindAction::Cleanup(u), drop: None, .. } + | Drop { target: ref t, unwind: _, drop: Some(u), .. } | Assert { target: ref t, unwind: UnwindAction::Cleanup(u), .. } | FalseUnwind { real_target: ref t, unwind: UnwindAction::Cleanup(u) } => { - slice::from_ref(t).into_iter().copied().chain(Some(u)) + slice::from_ref(t).into_iter().copied().chain(Some(u).into_iter().chain(None)) } + // single successor Goto { target: ref t } | Call { target: None, unwind: UnwindAction::Cleanup(ref t), .. } | Call { target: Some(ref t), unwind: _, .. } @@ -456,23 +504,33 @@ mod helper { | Drop { target: ref t, unwind: _, .. } | Assert { target: ref t, unwind: _, .. } | FalseUnwind { real_target: ref t, unwind: _ } => { - slice::from_ref(t).into_iter().copied().chain(None) + slice::from_ref(t).into_iter().copied().chain(None.into_iter().chain(None)) } + // No successors UnwindResume | UnwindTerminate(_) | CoroutineDrop | Return | Unreachable | TailCall { .. } - | Call { target: None, unwind: _, .. } => (&[]).into_iter().copied().chain(None), + | Call { target: None, unwind: _, .. } => { + (&[]).into_iter().copied().chain(None.into_iter().chain(None)) + } + // Multiple successors InlineAsm { ref targets, unwind: UnwindAction::Cleanup(u), .. } => { - targets.iter().copied().chain(Some(u)) + targets.iter().copied().chain(Some(u).into_iter().chain(None)) + } + InlineAsm { ref targets, unwind: _, .. } => { + targets.iter().copied().chain(None.into_iter().chain(None)) } - InlineAsm { ref targets, unwind: _, .. } => targets.iter().copied().chain(None), - SwitchInt { ref targets, .. } => targets.targets.iter().copied().chain(None), - FalseEdge { ref real_target, imaginary_target } => { - slice::from_ref(real_target).into_iter().copied().chain(Some(imaginary_target)) + SwitchInt { ref targets, .. } => { + targets.targets.iter().copied().chain(None.into_iter().chain(None)) } + // FalseEdge + FalseEdge { ref real_target, imaginary_target } => slice::from_ref(real_target) + .into_iter() + .copied() + .chain(Some(imaginary_target).into_iter().chain(None)), } } @@ -480,16 +538,31 @@ mod helper { pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { use self::TerminatorKind::*; match *self { + // 3-successors for async drop: target, unwind, dropline (parent coroutine drop) + Drop { + target: ref mut t, + unwind: UnwindAction::Cleanup(ref mut u), + drop: Some(ref mut d), + .. + } => slice::from_mut(t).into_iter().chain(Some(u).into_iter().chain(Some(d))), + // 2-successors Call { target: Some(ref mut t), unwind: UnwindAction::Cleanup(ref mut u), .. } | Yield { resume: ref mut t, drop: Some(ref mut u), .. } - | Drop { target: ref mut t, unwind: UnwindAction::Cleanup(ref mut u), .. } + | Drop { + target: ref mut t, + unwind: UnwindAction::Cleanup(ref mut u), + drop: None, + .. + } + | Drop { target: ref mut t, unwind: _, drop: Some(ref mut u), .. } | Assert { target: ref mut t, unwind: UnwindAction::Cleanup(ref mut u), .. } | FalseUnwind { real_target: ref mut t, unwind: UnwindAction::Cleanup(ref mut u), - } => slice::from_mut(t).into_iter().chain(Some(u)), + } => slice::from_mut(t).into_iter().chain(Some(u).into_iter().chain(None)), + // single successor Goto { target: ref mut t } | Call { target: None, unwind: UnwindAction::Cleanup(ref mut t), .. } | Call { target: Some(ref mut t), unwind: _, .. } @@ -497,22 +570,33 @@ mod helper { | Drop { target: ref mut t, unwind: _, .. } | Assert { target: ref mut t, unwind: _, .. } | FalseUnwind { real_target: ref mut t, unwind: _ } => { - slice::from_mut(t).into_iter().chain(None) + slice::from_mut(t).into_iter().chain(None.into_iter().chain(None)) } + // No successors UnwindResume | UnwindTerminate(_) | CoroutineDrop | Return | Unreachable | TailCall { .. } - | Call { target: None, unwind: _, .. } => (&mut []).into_iter().chain(None), + | Call { target: None, unwind: _, .. } => { + (&mut []).into_iter().chain(None.into_iter().chain(None)) + } + // Multiple successors InlineAsm { ref mut targets, unwind: UnwindAction::Cleanup(ref mut u), .. } => { - targets.iter_mut().chain(Some(u)) + targets.iter_mut().chain(Some(u).into_iter().chain(None)) + } + InlineAsm { ref mut targets, unwind: _, .. } => { + targets.iter_mut().chain(None.into_iter().chain(None)) + } + SwitchInt { ref mut targets, .. } => { + targets.targets.iter_mut().chain(None.into_iter().chain(None)) } - InlineAsm { ref mut targets, unwind: _, .. } => targets.iter_mut().chain(None), - SwitchInt { ref mut targets, .. } => targets.targets.iter_mut().chain(None), + // FalseEdge FalseEdge { ref mut real_target, ref mut imaginary_target } => { - slice::from_mut(real_target).into_iter().chain(Some(imaginary_target)) + slice::from_mut(real_target) + .into_iter() + .chain(Some(imaginary_target).into_iter().chain(None)) } } } @@ -645,8 +729,10 @@ impl<'tcx> TerminatorKind<'tcx> { Goto { target } => TerminatorEdges::Single(target), + // FIXME: Maybe we need also TerminatorEdges::Trio for async drop + // (target + unwind + dropline) Assert { target, unwind, expected: _, msg: _, cond: _ } - | Drop { target, unwind, place: _, replace: _ } + | Drop { target, unwind, place: _, replace: _, drop: _, async_fut: _ } | FalseUnwind { real_target: target, unwind } => match unwind { UnwindAction::Cleanup(unwind) => TerminatorEdges::Double(target, unwind), UnwindAction::Continue | UnwindAction::Terminate(_) | UnwindAction::Unreachable => { diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 8ad88fbda7c89..cbb3e34d7e57a 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -349,17 +349,21 @@ macro_rules! make_mir_visitor { coroutine_closure_def_id: _def_id, receiver_by_ref: _, } | - ty::InstanceKind::AsyncDropGlueCtorShim(_def_id, None) | ty::InstanceKind::DropGlue(_def_id, None) => {} ty::InstanceKind::FnPtrShim(_def_id, ty) | ty::InstanceKind::DropGlue(_def_id, Some(ty)) | ty::InstanceKind::CloneShim(_def_id, ty) | ty::InstanceKind::FnPtrAddrShim(_def_id, ty) | - ty::InstanceKind::AsyncDropGlueCtorShim(_def_id, Some(ty)) => { + ty::InstanceKind::AsyncDropGlue(_def_id, ty) | + ty::InstanceKind::AsyncDropGlueCtorShim(_def_id, ty) => { // FIXME(eddyb) use a better `TyContext` here. self.visit_ty($(& $mutability)? *ty, TyContext::Location(location)); } + ty::InstanceKind::FutureDropPollShim(_def_id, proxy_ty, impl_ty) => { + self.visit_ty($(& $mutability)? *proxy_ty, TyContext::Location(location)); + self.visit_ty($(& $mutability)? *impl_ty, TyContext::Location(location)); + } } self.visit_args(callee_args, location); } @@ -512,6 +516,8 @@ macro_rules! make_mir_visitor { target: _, unwind: _, replace: _, + drop: _, + async_fut: _, } => { self.visit_place( place, @@ -638,7 +644,7 @@ macro_rules! make_mir_visitor { OverflowNeg(op) | DivisionByZero(op) | RemainderByZero(op) => { self.visit_operand(op, location); } - ResumedAfterReturn(_) | ResumedAfterPanic(_) | NullPointerDereference => { + ResumedAfterReturn(_) | ResumedAfterPanic(_) | NullPointerDereference | ResumedAfterDrop(_) => { // Nothing to visit } MisalignedPointerDereference { required, found } => { diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 478ac19d199bd..3167bc0730bb1 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -549,6 +549,13 @@ rustc_queries! { desc { |tcx| "elaborating drops for `{}`", tcx.def_path_str(key) } } + query templated_mir_drops_elaborated_and_const_checked(ty: Ty<'tcx>) + -> &'tcx Steal> + { + no_hash + desc { |tcx| "elaborating drops for templated mir `{}`", ty } + } + query mir_for_ctfe( key: DefId ) -> &'tcx mir::Body<'tcx> { @@ -614,6 +621,11 @@ rustc_queries! { feedable } + /// MIR for templated coroutine after our optimization passes have run. + query templated_optimized_mir(ty: Ty<'tcx>) -> &'tcx mir::Body<'tcx> { + desc { |tcx| "optimizing templated MIR for `{}`", ty } + } + /// Scans through a function's MIR after MIR optimizations, to prepare the /// information needed by codegen when `-Cinstrument-coverage` is active. /// @@ -1310,7 +1322,11 @@ rustc_queries! { /// Generates a MIR body for the shim. query mir_shims(key: ty::InstanceKind<'tcx>) -> &'tcx mir::Body<'tcx> { arena_cache - desc { |tcx| "generating MIR shim for `{}`", tcx.def_path_str(key.def_id()) } + desc { + |tcx| "generating MIR shim for `{}`, instance={:?}", + tcx.def_path_str(key.def_id()), + key + } } /// The `symbol_name` query provides the symbol name for calling a @@ -1531,6 +1547,10 @@ rustc_queries! { query is_unpin_raw(env: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> bool { desc { "computing whether `{}` is `Unpin`", env.value } } + /// Query backing `Ty::is_async_drop`. + query is_async_drop_raw(env: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> bool { + desc { "computing whether `{}` is `AsyncDrop`", env.value } + } /// Query backing `Ty::needs_drop`. query needs_drop_raw(env: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) -> bool { desc { "computing whether `{}` needs drop", env.value } @@ -1563,6 +1583,14 @@ rustc_queries! { cache_on_disk_if { true } } + /// A list of types where the ADT requires async drop if and only if any of + /// those types require async drop. If the ADT is known to always need async drop + /// then `Err(AlwaysRequiresDrop)` is returned. + query adt_async_drop_tys(def_id: DefId) -> Result<&'tcx ty::List>, AlwaysRequiresDrop> { + desc { |tcx| "computing when `{}` needs async drop", tcx.def_path_str(def_id) } + cache_on_disk_if { true } + } + /// A list of types where the ADT requires drop if and only if any of those types /// has significant drop. A type marked with the attribute `rustc_insignificant_dtor` /// is considered to not be significant. A drop is significant if it is implemented diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 2566081cf5dba..e2680ab8c0a64 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -682,7 +682,6 @@ macro_rules! bidirectional_lang_item_map { bidirectional_lang_item_map! { // tidy-alphabetical-start - AsyncDestruct, AsyncFn, AsyncFnKindHelper, AsyncFnKindUpvars, @@ -1634,6 +1633,10 @@ impl<'tcx> TyCtxt<'tcx> { self.coroutine_kind(def_id).is_some() } + pub fn is_templated_coroutine(self, def_id: DefId) -> bool { + Some(def_id) == self.lang_items().async_drop_in_place_poll_fn() + } + /// Returns the movability of the coroutine of `def_id`, or panics /// if given a `def_id` that is not a coroutine. pub fn coroutine_movability(self, def_id: DefId) -> hir::Movability { diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index b7a648aae3f19..be49f67b5b320 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -146,6 +146,9 @@ pub enum InstanceKind<'tcx> { /// native support. ThreadLocalShim(DefId), + /// Proxy shim for async drop of future (def_id, proxy_cor_ty, impl_cor_ty) + FutureDropPollShim(DefId, Ty<'tcx>, Ty<'tcx>), + /// `core::ptr::drop_in_place::`. /// /// The `DefId` is for `core::ptr::drop_in_place`. @@ -172,7 +175,13 @@ pub enum InstanceKind<'tcx> { /// /// The `DefId` is for `core::future::async_drop::async_drop_in_place`, the `Ty` /// is the type `T`. - AsyncDropGlueCtorShim(DefId, Option>), + AsyncDropGlueCtorShim(DefId, Ty<'tcx>), + + /// `core::future::async_drop::async_drop_in_place::<'_, T>::{closure}`. + /// + /// async_drop_in_place poll function implementation (for generated coroutine). + /// `Ty` here is `async_drop_in_place::{closure}` coroutine type, not just `T` + AsyncDropGlue(DefId, Ty<'tcx>), } impl<'tcx> Instance<'tcx> { @@ -220,7 +229,9 @@ impl<'tcx> Instance<'tcx> { .upstream_monomorphizations_for(def) .and_then(|monos| monos.get(&self.args).cloned()), InstanceKind::DropGlue(_, Some(_)) => tcx.upstream_drop_glue_for(self.args), - InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) => { + InstanceKind::AsyncDropGlue(_, _) => None, + InstanceKind::FutureDropPollShim(_, _, _) => None, + InstanceKind::AsyncDropGlueCtorShim(_, _) => { tcx.upstream_async_drop_glue_for(self.args) } _ => None, @@ -247,6 +258,8 @@ impl<'tcx> InstanceKind<'tcx> { | InstanceKind::DropGlue(def_id, _) | InstanceKind::CloneShim(def_id, _) | InstanceKind::FnPtrAddrShim(def_id, _) + | InstanceKind::FutureDropPollShim(def_id, _, _) + | InstanceKind::AsyncDropGlue(def_id, _) | InstanceKind::AsyncDropGlueCtorShim(def_id, _) => def_id, } } @@ -256,7 +269,9 @@ impl<'tcx> InstanceKind<'tcx> { match self { ty::InstanceKind::Item(def) => Some(def), ty::InstanceKind::DropGlue(def_id, Some(_)) - | InstanceKind::AsyncDropGlueCtorShim(def_id, Some(_)) + | InstanceKind::AsyncDropGlueCtorShim(def_id, _) + | InstanceKind::AsyncDropGlue(def_id, _) + | InstanceKind::FutureDropPollShim(def_id, ..) | InstanceKind::ThreadLocalShim(def_id) => Some(def_id), InstanceKind::VTableShim(..) | InstanceKind::ReifyShim(..) @@ -266,7 +281,6 @@ impl<'tcx> InstanceKind<'tcx> { | InstanceKind::ClosureOnceShim { .. } | ty::InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::DropGlue(..) - | InstanceKind::AsyncDropGlueCtorShim(..) | InstanceKind::CloneShim(..) | InstanceKind::FnPtrAddrShim(..) => None, } @@ -291,7 +305,9 @@ impl<'tcx> InstanceKind<'tcx> { let def_id = match *self { ty::InstanceKind::Item(def) => def, ty::InstanceKind::DropGlue(_, Some(_)) => return false, - ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) => return false, + ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => return ty.is_coroutine(), + ty::InstanceKind::FutureDropPollShim(_, _, _) => return false, + ty::InstanceKind::AsyncDropGlue(_, _) => return false, ty::InstanceKind::ThreadLocalShim(_) => return false, _ => return true, }; @@ -313,7 +329,7 @@ impl<'tcx> InstanceKind<'tcx> { return true; } if let ty::InstanceKind::DropGlue(.., Some(ty)) - | ty::InstanceKind::AsyncDropGlueCtorShim(.., Some(ty)) = *self + | ty::InstanceKind::AsyncDropGlueCtorShim(.., ty) = *self { // Drop glue generally wants to be instantiated at every codegen // unit, but without an #[inline] hint. We should make this @@ -368,11 +384,12 @@ impl<'tcx> InstanceKind<'tcx> { | InstanceKind::FnPtrAddrShim(..) | InstanceKind::FnPtrShim(..) | InstanceKind::DropGlue(_, Some(_)) - | InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) => false, + | InstanceKind::FutureDropPollShim(..) + | InstanceKind::AsyncDropGlue(_, _) => false, + InstanceKind::AsyncDropGlueCtorShim(_, _) => false, InstanceKind::ClosureOnceShim { .. } | InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::DropGlue(..) - | InstanceKind::AsyncDropGlueCtorShim(..) | InstanceKind::Item(_) | InstanceKind::Intrinsic(..) | InstanceKind::ReifyShim(..) @@ -449,8 +466,11 @@ pub fn fmt_instance( InstanceKind::DropGlue(_, Some(ty)) => write!(f, " - shim(Some({ty}))"), InstanceKind::CloneShim(_, ty) => write!(f, " - shim({ty})"), InstanceKind::FnPtrAddrShim(_, ty) => write!(f, " - shim({ty})"), - InstanceKind::AsyncDropGlueCtorShim(_, None) => write!(f, " - shim(None)"), - InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) => write!(f, " - shim(Some({ty}))"), + InstanceKind::FutureDropPollShim(_, proxy_ty, impl_ty) => { + write!(f, " - dropshim({proxy_ty}-{impl_ty})") + } + InstanceKind::AsyncDropGlue(_, ty) => write!(f, " - shim({ty})"), + InstanceKind::AsyncDropGlueCtorShim(_, ty) => write!(f, " - shim(Some({ty}))"), } } @@ -468,6 +488,59 @@ impl<'tcx> fmt::Display for Instance<'tcx> { } } +// async_drop_in_place::coroutine.poll, when T is a standart coroutine, +// should be resolved to this coroutine's future_drop_poll (through FutureDropPollShim proxy). +// async_drop_in_place::coroutine>::coroutine.poll, +// when T is a standart coroutine, should be resolved to this coroutine's future_drop_poll. +// async_drop_in_place::coroutine>::coroutine.poll, +// when T is not a coroutine, should be resolved to the innermost +// async_drop_in_place::coroutine's poll function (through FutureDropPollShim proxy) +fn resolve_async_drop_poll<'tcx>(tcx: TyCtxt<'tcx>, mut cor_ty: Ty<'tcx>) -> Instance<'tcx> { + let first_cor = cor_ty; + let ty::Coroutine(_, proxy_args) = first_cor.kind() else { + bug!(); + }; + let async_drop_in_place_poll = tcx.lang_items().async_drop_in_place_poll_fn().unwrap(); + let mut child_ty = cor_ty; + loop { + if let ty::Coroutine(child_def, child_args) = child_ty.kind() { + cor_ty = child_ty; + if *child_def == async_drop_in_place_poll { + child_ty = child_args.first().unwrap().expect_ty(); + continue; + } else { + return Instance { + def: ty::InstanceKind::FutureDropPollShim( + async_drop_in_place_poll, + first_cor, + cor_ty, + ), + args: proxy_args, + }; + } + } else { + let ty::Coroutine(_, child_args) = cor_ty.kind() else { + bug!(); + }; + if first_cor != cor_ty { + return Instance { + def: ty::InstanceKind::FutureDropPollShim( + async_drop_in_place_poll, + first_cor, + cor_ty, + ), + args: proxy_args, + }; + } else { + return Instance { + def: ty::InstanceKind::AsyncDropGlue(async_drop_in_place_poll, cor_ty), + args: child_args, + }; + } + } + } +} + impl<'tcx> Instance<'tcx> { pub fn new(def_id: DefId, args: GenericArgsRef<'tcx>) -> Instance<'tcx> { assert!( @@ -782,6 +855,12 @@ impl<'tcx> Instance<'tcx> { ) } + pub fn resolve_async_drop_in_place_poll(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ty::Instance<'tcx> { + let def_id = tcx.require_lang_item(LangItem::AsyncDropInPlacePoll, None); + let args = tcx.mk_args(&[ty.into()]); + Instance::expect_resolve(tcx, ty::TypingEnv::fully_monomorphized(), def_id, args, DUMMY_SP) + } + #[instrument(level = "debug", skip(tcx), ret)] pub fn fn_once_adapter_instance( tcx: TyCtxt<'tcx>, @@ -846,6 +925,9 @@ impl<'tcx> Instance<'tcx> { }; if tcx.is_lang_item(trait_item_id, coroutine_callable_item) { + if Some(coroutine_def_id) == tcx.lang_items().async_drop_in_place_poll_fn() { + return Some(resolve_async_drop_poll(tcx, rcvr_args.type_at(0))); + } let ty::Coroutine(_, id_args) = *tcx.type_of(coroutine_def_id).skip_binder().kind() else { bug!() diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index e5015ea3c3d1f..784bbd207fc31 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -922,23 +922,58 @@ where i, ), - ty::Coroutine(def_id, args) => match this.variants { - Variants::Empty => unreachable!(), - Variants::Single { index } => TyMaybeWithLayout::Ty( - args.as_coroutine() - .state_tys(def_id, tcx) - .nth(index.as_usize()) - .unwrap() - .nth(i) - .unwrap(), - ), - Variants::Multiple { tag, tag_field, .. } => { - if i == tag_field { - return TyMaybeWithLayout::TyAndLayout(tag_layout(tag)); + ty::Coroutine(def_id, args) => { + // layout of `async_drop_in_place::{closure}` in case, + // when T is a coroutine, contains this internal coroutine's ref + if tcx.is_templated_coroutine(def_id) { + fn find_impl_coroutine<'tcx>( + tcx: TyCtxt<'tcx>, + mut cor_ty: Ty<'tcx>, + ) -> Ty<'tcx> { + let mut ty = cor_ty; + loop { + if let ty::Coroutine(def_id, args) = ty.kind() { + cor_ty = ty; + if tcx.is_templated_coroutine(*def_id) { + ty = args.first().unwrap().expect_ty(); + continue; + } else { + return cor_ty; + } + } else { + return cor_ty; + } + } + } + let arg_cor_ty = args.first().unwrap().expect_ty(); + if arg_cor_ty.is_coroutine() { + assert!(i == 0); + let impl_cor_ty = find_impl_coroutine(tcx, arg_cor_ty); + return TyMaybeWithLayout::Ty(Ty::new_mut_ref( + tcx, + tcx.lifetimes.re_static, + impl_cor_ty, + )); } - TyMaybeWithLayout::Ty(args.as_coroutine().prefix_tys()[i]) } - }, + match this.variants { + Variants::Empty => unreachable!(), + Variants::Single { index } => TyMaybeWithLayout::Ty( + args.as_coroutine() + .state_tys(def_id, tcx) + .nth(index.as_usize()) + .unwrap() + .nth(i) + .unwrap(), + ), + Variants::Multiple { tag, tag_field, .. } => { + if i == tag_field { + return TyMaybeWithLayout::TyAndLayout(tag_layout(tag)); + } + TyMaybeWithLayout::Ty(args.as_coroutine().prefix_tys()[i]) + } + } + } ty::Tuple(tys) => TyMaybeWithLayout::Ty(tys[i]), diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 6fe1502c66dd6..ebf5150faf0ae 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -37,6 +37,7 @@ use rustc_hir::LangItem; use rustc_hir::def::{CtorKind, CtorOf, DefKind, DocLinkResMap, LifetimeRes, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap}; use rustc_index::IndexVec; +use rustc_index::bit_set::BitMatrix; use rustc_macros::{ Decodable, Encodable, HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable, extension, @@ -100,7 +101,7 @@ pub use self::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeV use crate::error::{OpaqueHiddenTypeMismatch, TypeMismatchReason}; use crate::metadata::ModChild; use crate::middle::privacy::EffectiveVisibilities; -use crate::mir::{Body, CoroutineLayout}; +use crate::mir::{Body, CoroutineLayout, CoroutineSavedLocal, CoroutineSavedTy, SourceInfo}; use crate::query::{IntoQueryParam, Providers}; use crate::ty; pub use crate::ty::diagnostics::*; @@ -1735,11 +1736,14 @@ impl<'tcx> TyCtxt<'tcx> { | ty::InstanceKind::Virtual(..) | ty::InstanceKind::ClosureOnceShim { .. } | ty::InstanceKind::ConstructCoroutineInClosureShim { .. } + | ty::InstanceKind::FutureDropPollShim(..) | ty::InstanceKind::DropGlue(..) | ty::InstanceKind::CloneShim(..) | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::FnPtrAddrShim(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) => self.mir_shims(instance), + // async drop glue should be processed specifically, as a templated coroutine + ty::InstanceKind::AsyncDropGlue(_, ty) => self.templated_optimized_mir(ty), } } @@ -1850,16 +1854,17 @@ impl<'tcx> TyCtxt<'tcx> { self.def_kind(trait_def_id) == DefKind::TraitAlias } - /// Returns layout of a coroutine. Layout might be unavailable if the + /// Returns layout of a non-templated coroutine. Layout might be unavailable if the /// coroutine is tainted by errors. /// /// Takes `coroutine_kind` which can be acquired from the `CoroutineArgs::kind_ty`, /// e.g. `args.as_coroutine().kind_ty()`. - pub fn coroutine_layout( + pub fn ordinary_coroutine_layout( self, def_id: DefId, coroutine_kind_ty: Ty<'tcx>, ) -> Option<&'tcx CoroutineLayout<'tcx>> { + debug_assert_ne!(Some(def_id), self.lang_items().async_drop_in_place_poll_fn()); let mir = self.optimized_mir(def_id); // Regular coroutine if coroutine_kind_ty.is_unit() { @@ -1889,6 +1894,66 @@ impl<'tcx> TyCtxt<'tcx> { } } + /// Returns layout of a templated coroutine. Layout might be unavailable if the + /// coroutine is tainted by errors. Atm, the only templated coroutine is + /// `async_drop_in_place::{closure}` returned from `async fn async_drop_in_place(..)`. + pub fn templated_coroutine_layout(self, ty: Ty<'tcx>) -> Option<&'tcx CoroutineLayout<'tcx>> { + self.templated_optimized_mir(ty).coroutine_layout_raw() + } + + /// Returns layout of a templated (or not) coroutine. Layout might be unavailable if the + /// coroutine is tainted by errors. + pub fn coroutine_layout( + self, + def_id: DefId, + args: GenericArgsRef<'tcx>, + ) -> Option<&'tcx CoroutineLayout<'tcx>> { + if Some(def_id) == self.lang_items().async_drop_in_place_poll_fn() { + fn find_impl_coroutine<'tcx>(tcx: TyCtxt<'tcx>, mut cor_ty: Ty<'tcx>) -> Ty<'tcx> { + let mut ty = cor_ty; + loop { + if let ty::Coroutine(def_id, args) = ty.kind() { + cor_ty = ty; + if tcx.is_templated_coroutine(*def_id) { + ty = args.first().unwrap().expect_ty(); + continue; + } else { + return cor_ty; + } + } else { + return cor_ty; + } + } + } + // layout of `async_drop_in_place::{closure}` in case, + // when T is a coroutine, contains this internal coroutine's ref + let arg_cor_ty = args.first().unwrap().expect_ty(); + if arg_cor_ty.is_coroutine() { + let impl_cor_ty = find_impl_coroutine(self, arg_cor_ty); + let impl_ref = Ty::new_mut_ref(self, self.lifetimes.re_static, impl_cor_ty); + let span = self.def_span(def_id); + let source_info = SourceInfo::outermost(span); + let proxy_layout = CoroutineLayout { + field_tys: [CoroutineSavedTy { + ty: impl_ref, + source_info, + ignore_for_traits: true, + }] + .into(), + field_names: [None].into(), + variant_fields: [IndexVec::from([CoroutineSavedLocal::ZERO])].into(), + variant_source_info: [source_info].into(), + storage_conflicts: BitMatrix::new(1, 1), + }; + return Some(self.arena.alloc(proxy_layout)); + } else { + self.templated_coroutine_layout(Ty::new_coroutine(self, def_id, args)) + } + } else { + self.ordinary_coroutine_layout(def_id, args.as_coroutine().kind_ty()) + } + } + /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements. /// If it implements no trait, returns `None`. pub fn trait_id_of_impl(self, def_id: DefId) -> Option { diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index a9a47c87a3882..d72ad888bf634 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -4,7 +4,6 @@ use std::assert_matches::debug_assert_matches; use std::borrow::Cow; -use std::iter; use std::ops::{ControlFlow, Range}; use hir::def::{CtorKind, DefKind}; @@ -20,7 +19,7 @@ use rustc_type_ir::TyKind::*; use rustc_type_ir::visit::TypeVisitableExt; use rustc_type_ir::{self as ir, BoundVar, CollectAndApply, DynKind}; use tracing::instrument; -use ty::util::{AsyncDropGlueMorphology, IntTypeExt}; +use ty::util::IntTypeExt; use super::GenericParamDefKind; use crate::infer::canonical::Canonical; @@ -79,8 +78,7 @@ impl<'tcx> ty::CoroutineArgs> { #[inline] fn variant_range(&self, def_id: DefId, tcx: TyCtxt<'tcx>) -> Range { // FIXME requires optimized MIR - FIRST_VARIANT - ..tcx.coroutine_layout(def_id, tcx.types.unit).unwrap().variant_fields.next_index() + FIRST_VARIANT..tcx.coroutine_layout(def_id, self.args).unwrap().variant_fields.next_index() } /// The discriminant for the given variant. Panics if the `variant_index` is @@ -140,10 +138,14 @@ impl<'tcx> ty::CoroutineArgs> { def_id: DefId, tcx: TyCtxt<'tcx>, ) -> impl Iterator> + Captures<'tcx>> { - let layout = tcx.coroutine_layout(def_id, self.kind_ty()).unwrap(); + let layout = tcx.coroutine_layout(def_id, self.args).unwrap(); layout.variant_fields.iter().map(move |variant| { variant.iter().map(move |field| { - ty::EarlyBinder::bind(layout.field_tys[*field].ty).instantiate(tcx, self.args) + if tcx.is_templated_coroutine(def_id) { + layout.field_tys[*field].ty + } else { + ty::EarlyBinder::bind(layout.field_tys[*field].ty).instantiate(tcx, self.args) + } }) }) } @@ -1018,10 +1020,6 @@ impl<'tcx> rustc_type_ir::inherent::Ty> for Ty<'tcx> { self.discriminant_ty(interner) } - fn async_destructor_ty(self, interner: TyCtxt<'tcx>) -> Ty<'tcx> { - self.async_destructor_ty(interner) - } - fn has_unsafe_fields(self) -> bool { Ty::has_unsafe_fields(self) } @@ -1548,125 +1546,6 @@ impl<'tcx> Ty<'tcx> { } } - /// Returns the type of the async destructor of this type. - pub fn async_destructor_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { - match self.async_drop_glue_morphology(tcx) { - AsyncDropGlueMorphology::Noop => { - return Ty::async_destructor_combinator(tcx, LangItem::AsyncDropNoop) - .instantiate_identity(); - } - AsyncDropGlueMorphology::DeferredDropInPlace => { - let drop_in_place = - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropDeferredDropInPlace) - .instantiate(tcx, &[self.into()]); - return Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[drop_in_place.into()]); - } - AsyncDropGlueMorphology::Custom => (), - } - - match *self.kind() { - ty::Param(_) | ty::Alias(..) | ty::Infer(ty::TyVar(_)) => { - let assoc_items = tcx - .associated_item_def_ids(tcx.require_lang_item(LangItem::AsyncDestruct, None)); - Ty::new_projection(tcx, assoc_items[0], [self]) - } - - ty::Array(elem_ty, _) | ty::Slice(elem_ty) => { - let dtor = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropSlice) - .instantiate(tcx, &[elem_ty.into()]); - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[dtor.into()]) - } - - ty::Adt(adt_def, args) if adt_def.is_enum() || adt_def.is_struct() => self - .adt_async_destructor_ty( - tcx, - adt_def.variants().iter().map(|v| v.fields.iter().map(|f| f.ty(tcx, args))), - ), - ty::Tuple(tys) => self.adt_async_destructor_ty(tcx, iter::once(tys)), - ty::Closure(_, args) => { - self.adt_async_destructor_ty(tcx, iter::once(args.as_closure().upvar_tys())) - } - ty::CoroutineClosure(_, args) => self - .adt_async_destructor_ty(tcx, iter::once(args.as_coroutine_closure().upvar_tys())), - - ty::Adt(adt_def, _) => { - assert!(adt_def.is_union()); - - let surface_drop = self.surface_async_dropper_ty(tcx).unwrap(); - - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[surface_drop.into()]) - } - - ty::Bound(..) - | ty::Foreign(_) - | ty::Placeholder(_) - | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { - bug!("`async_destructor_ty` applied to unexpected type: {self:?}") - } - - _ => bug!("`async_destructor_ty` is not yet implemented for type: {self:?}"), - } - } - - fn adt_async_destructor_ty(self, tcx: TyCtxt<'tcx>, variants: I) -> Ty<'tcx> - where - I: Iterator + ExactSizeIterator, - I::Item: IntoIterator>, - { - debug_assert_eq!(self.async_drop_glue_morphology(tcx), AsyncDropGlueMorphology::Custom); - - let defer = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropDefer); - let chain = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropChain); - - let noop = - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropNoop).instantiate_identity(); - let either = Ty::async_destructor_combinator(tcx, LangItem::AsyncDropEither); - - let variants_dtor = variants - .into_iter() - .map(|variant| { - variant - .into_iter() - .map(|ty| defer.instantiate(tcx, &[ty.into()])) - .reduce(|acc, next| chain.instantiate(tcx, &[acc.into(), next.into()])) - .unwrap_or(noop) - }) - .reduce(|other, matched| { - either.instantiate(tcx, &[other.into(), matched.into(), self.into()]) - }) - .unwrap(); - - let dtor = if let Some(dropper_ty) = self.surface_async_dropper_ty(tcx) { - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropChain) - .instantiate(tcx, &[dropper_ty.into(), variants_dtor.into()]) - } else { - variants_dtor - }; - - Ty::async_destructor_combinator(tcx, LangItem::AsyncDropFuse) - .instantiate(tcx, &[dtor.into()]) - } - - fn surface_async_dropper_ty(self, tcx: TyCtxt<'tcx>) -> Option> { - let adt_def = self.ty_adt_def()?; - let dropper = adt_def - .async_destructor(tcx) - .map(|_| LangItem::SurfaceAsyncDropInPlace) - .or_else(|| adt_def.destructor(tcx).map(|_| LangItem::AsyncDropSurfaceDropInPlace))?; - Some(Ty::async_destructor_combinator(tcx, dropper).instantiate(tcx, &[self.into()])) - } - - fn async_destructor_combinator( - tcx: TyCtxt<'tcx>, - lang_item: LangItem, - ) -> ty::EarlyBinder<'tcx, Ty<'tcx>> { - tcx.fn_sig(tcx.require_lang_item(lang_item, None)) - .map_bound(|fn_sig| fn_sig.output().no_bound_vars().unwrap()) - } - /// Returns the type of metadata for (potentially wide) pointers to this type, /// or the struct tail if the metadata type cannot be determined. pub fn ptr_metadata_ty_or_tail( @@ -2010,6 +1889,13 @@ impl<'tcx> Ty<'tcx> { } } + pub fn is_templated_coroutine(self, tcx: TyCtxt<'_>) -> bool { + match self.kind() { + ty::Coroutine(def, ..) => tcx.is_templated_coroutine(*def), + _ => false, + } + } + /// Returns `true` when the outermost type cannot be further normalized, /// resolved, or instantiated. This includes all primitive types, but also /// things like ADTs and trait objects, since even if their arguments or diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 94bd359f6eb0f..bdc07122b3471 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -436,25 +436,6 @@ impl<'tcx> TyCtxt<'tcx> { Some(ty::AsyncDestructor { future, ctor }) } - /// Returns async drop glue morphology for a definition. To get async drop - /// glue morphology for a type see [`Ty::async_drop_glue_morphology`]. - // - // FIXME: consider making this a query - pub fn async_drop_glue_morphology(self, did: DefId) -> AsyncDropGlueMorphology { - let ty: Ty<'tcx> = self.type_of(did).instantiate_identity(); - - // Async drop glue morphology is an internal detail, so - // using `TypingMode::PostAnalysis` probably should be fine. - let typing_env = ty::TypingEnv::fully_monomorphized(); - if ty.needs_async_drop(self, typing_env) { - AsyncDropGlueMorphology::Custom - } else if ty.needs_drop(self, typing_env) { - AsyncDropGlueMorphology::DeferredDropInPlace - } else { - AsyncDropGlueMorphology::Noop - } - } - /// Returns the set of types that are required to be alive in /// order to run the destructor of `def` (see RFCs 769 and /// 1238). @@ -1096,18 +1077,6 @@ impl<'tcx> TypeFolder> for WeakAliasTypeExpander<'tcx> { } } -/// Indicates the form of `AsyncDestruct::Destructor`. Used to simplify async -/// drop glue for types not using async drop. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub enum AsyncDropGlueMorphology { - /// Async destructor simply does nothing - Noop, - /// Async destructor simply runs `drop_in_place` - DeferredDropInPlace, - /// Async destructor has custom logic - Custom, -} - impl<'tcx> Ty<'tcx> { /// Returns the `Size` for primitive types (bool, uint, int, char, float). pub fn primitive_size(self, tcx: TyCtxt<'tcx>) -> Size { @@ -1277,16 +1246,17 @@ impl<'tcx> Ty<'tcx> { } } - /// Get morphology of the async drop glue, needed for types which do not - /// use async drop. To get async drop glue morphology for a definition see - /// [`TyCtxt::async_drop_glue_morphology`]. Used for `AsyncDestruct::Destructor` - /// type construction. - // - // FIXME: implement optimization to not instantiate a certain morphology of - // async drop glue too soon to allow per type optimizations, see array case - // for more info. Perhaps then remove this method and use `needs_(async_)drop` - // instead. - pub fn async_drop_glue_morphology(self, tcx: TyCtxt<'tcx>) -> AsyncDropGlueMorphology { + /// Checks whether values of this type `T` implement the `AsyncDrop` trait. + pub fn is_async_drop(self, tcx: TyCtxt<'tcx>, typing_env: ty::TypingEnv<'tcx>) -> bool { + !self.is_trivially_not_async_drop() + && tcx.is_async_drop_raw(typing_env.as_query_input(self)) + } + + /// Fast path helper for testing if a type is `AsyncDrop`. + /// + /// Returning true means the type is known to be `!AsyncDrop`. Returning + /// `false` means nothing -- could be `AsyncDrop`, might not be. + fn is_trivially_not_async_drop(self) -> bool { match self.kind() { ty::Int(_) | ty::Uint(_) @@ -1298,46 +1268,26 @@ impl<'tcx> Ty<'tcx> { | ty::Ref(..) | ty::RawPtr(..) | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Infer(ty::FreshIntTy(_)) - | ty::Infer(ty::FreshFloatTy(_)) => AsyncDropGlueMorphology::Noop, - + | ty::Error(_) + | ty::FnPtr(..) => true, // FIXME(unsafe_binders): ty::UnsafeBinder(_) => todo!(), - - ty::Tuple(tys) if tys.is_empty() => AsyncDropGlueMorphology::Noop, - ty::Adt(adt_def, _) if adt_def.is_manually_drop() => AsyncDropGlueMorphology::Noop, - - // Foreign types can never have destructors. - ty::Foreign(_) => AsyncDropGlueMorphology::Noop, - - // FIXME: implement dynamic types async drops - ty::Error(_) | ty::Dynamic(..) => AsyncDropGlueMorphology::DeferredDropInPlace, - - ty::Tuple(_) | ty::Array(_, _) | ty::Slice(_) => { - // Assume worst-case scenario, because we can instantiate async - // destructors in different orders: - // - // 1. Instantiate [T; N] with T = String and N = 0 - // 2. Instantiate <[String; 0] as AsyncDestruct>::Destructor - // - // And viceversa, thus we cannot rely on String not using async - // drop or array having zero (0) elements - AsyncDropGlueMorphology::Custom - } - ty::Pat(ty, _) => ty.async_drop_glue_morphology(tcx), - - ty::Adt(adt_def, _) => tcx.async_drop_glue_morphology(adt_def.did()), - - ty::Closure(did, _) - | ty::CoroutineClosure(did, _) - | ty::Coroutine(did, _) - | ty::CoroutineWitness(did, _) => tcx.async_drop_glue_morphology(*did), - - ty::Alias(..) | ty::Param(_) | ty::Bound(..) | ty::Placeholder(..) | ty::Infer(_) => { - // No specifics, but would usually mean forwarding async drop glue - AsyncDropGlueMorphology::Custom + ty::Tuple(fields) => fields.iter().all(Self::is_trivially_not_async_drop), + ty::Pat(elem_ty, _) | ty::Slice(elem_ty) | ty::Array(elem_ty, _) => { + elem_ty.is_trivially_not_async_drop() } + ty::Adt(..) + | ty::Bound(..) + | ty::Closure(..) + | ty::CoroutineClosure(..) + | ty::Dynamic(..) + | ty::Foreign(_) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Infer(_) + | ty::Alias(..) + | ty::Param(_) + | ty::Placeholder(_) => false, } } @@ -1383,9 +1333,6 @@ impl<'tcx> Ty<'tcx> { /// (Note that this implies that if `ty` has an async destructor attached, /// then `needs_async_drop` will definitely return `true` for `ty`.) /// - /// When constructing `AsyncDestruct::Destructor` type, use - /// [`Ty::async_drop_glue_morphology`] instead. - // // FIXME(zetanumbers): Note that this method is used to check eligible types // in unions. #[inline] @@ -1669,6 +1616,46 @@ pub fn needs_drop_components_with_async<'tcx>( } } +pub fn is_trivially_const_drop(ty: Ty<'_>) -> bool { + match *ty.kind() { + ty::Bool + | ty::Char + | ty::Int(_) + | ty::Uint(_) + | ty::Float(_) + | ty::Infer(ty::IntVar(_)) + | ty::Infer(ty::FloatVar(_)) + | ty::Str + | ty::RawPtr(_, _) + | ty::Ref(..) + | ty::FnDef(..) + | ty::FnPtr(..) + | ty::Never + | ty::Foreign(_) => true, + + ty::Alias(..) + | ty::Dynamic(..) + | ty::Error(_) + | ty::Bound(..) + | ty::Param(_) + | ty::Placeholder(_) + | ty::Infer(_) + | ty::UnsafeBinder(_) => false, + + // Not trivial because they have components, and instead of looking inside, + // we'll just perform trait selection. + ty::Closure(..) + | ty::CoroutineClosure(..) + | ty::Coroutine(..) + | ty::CoroutineWitness(..) + | ty::Adt(..) => false, + + ty::Array(ty, _) | ty::Slice(ty) | ty::Pat(ty, _) => is_trivially_const_drop(ty), + + ty::Tuple(tys) => tys.iter().all(|ty| is_trivially_const_drop(ty)), + } +} + /// Does the equivalent of /// ```ignore (illustrative) /// let v = self.iter().map(|p| p.fold_with(folder)).collect::>(); diff --git a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index eab414e150fa3..12d07eb223b1e 100644 --- a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -70,6 +70,8 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { target: self.parse_return_to(args[1])?, unwind: self.parse_unwind_action(args[2])?, replace: false, + drop: None, + async_fut: None, }) }, @call(mir_call, args) => { diff --git a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs index 2c9a1de7f9978..aa7139c2b702e 100644 --- a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs @@ -754,6 +754,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target: success, unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, ); this.diverge_from(block); diff --git a/compiler/rustc_mir_build/src/builder/scope.rs b/compiler/rustc_mir_build/src/builder/scope.rs index d3551ea3a97b4..1a21b4722993a 100644 --- a/compiler/rustc_mir_build/src/builder/scope.rs +++ b/compiler/rustc_mir_build/src/builder/scope.rs @@ -89,6 +89,7 @@ use rustc_index::{IndexSlice, IndexVec}; use rustc_middle::middle::region; use rustc_middle::mir::*; use rustc_middle::thir::{ExprId, LintLevel}; +use rustc_middle::ty::{self, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_session::lint::Level; use rustc_span::source_map::Spanned; @@ -402,6 +403,8 @@ impl DropTree { unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup), place: drop_node.data.local.into(), replace: false, + drop: None, + async_fut: None, }; cfg.terminate(block, drop_node.data.source_info, terminator); } @@ -845,6 +848,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target: next, unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, ); block = next; @@ -876,6 +881,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { block.unit() } + fn is_async_drop_impl( + tcx: TyCtxt<'tcx>, + local_decls: &IndexVec>, + typing_env: ty::TypingEnv<'tcx>, + local: Local, + ) -> bool { + let ty = local_decls[local].ty; + if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() { + return true; + } + ty.needs_async_drop(tcx, typing_env) + } + fn is_async_drop(&self, local: Local) -> bool { + Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local) + } + fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock { // If we are emitting a `drop` statement, we need to have the cached // diverge cleanup pads ready in case that drop panics. @@ -884,14 +905,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX }; let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes"); + let has_async_drops = is_coroutine + && scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local)); + let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None }; + let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes"); + let typing_env = self.typing_env(); build_scope_drops( &mut self.cfg, &mut self.scopes.unwind_drops, + &mut self.scopes.coroutine_drops, scope, block, unwind_to, + dropline_to, is_coroutine && needs_cleanup, self.arg_count, + |v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v), ) .into_block() } @@ -1308,22 +1337,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.scopes.unwind_drops.add_entry_point(start, next_drop); } - /// Sets up a path that performs all required cleanup for dropping a - /// coroutine, starting from the given block that ends in - /// [TerminatorKind::Yield]. - /// - /// This path terminates in CoroutineDrop. - pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) { + /// Returns the [DropIdx] for the innermost drop for dropline (coroutine drop path). + /// The `DropIdx` will be created if it doesn't already exist. + fn diverge_dropline(&mut self) -> DropIdx { + // It is okay to use dummy span because the getting scope index on the topmost scope + // must always succeed. + self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP) + } + + /// Similar to diverge_cleanup_target, but for dropline (coroutine drop path) + fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx { debug_assert!( - matches!( - self.cfg.block_data(yield_block).terminator().kind, - TerminatorKind::Yield { .. } - ), - "coroutine_drop_cleanup called on block with non-yield terminator." + self.coroutine.is_some(), + "diverge_dropline_target is valid only for coroutine" ); - let (uncached_scope, mut cached_drop) = self - .scopes - .scopes + let target = self.scopes.scope_index(target_scope, span); + let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target] .iter() .enumerate() .rev() @@ -1332,13 +1361,34 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }) .unwrap_or((0, ROOT_NODE)); - for scope in &mut self.scopes.scopes[uncached_scope..] { + if uncached_scope > target { + return cached_drop; + } + + for scope in &mut self.scopes.scopes[uncached_scope..=target] { for drop in &scope.drops { cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop); } scope.cached_coroutine_drop_block = Some(cached_drop); } + cached_drop + } + + /// Sets up a path that performs all required cleanup for dropping a + /// coroutine, starting from the given block that ends in + /// [TerminatorKind::Yield]. + /// + /// This path terminates in CoroutineDrop. + pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) { + debug_assert!( + matches!( + self.cfg.block_data(yield_block).terminator().kind, + TerminatorKind::Yield { .. } + ), + "coroutine_drop_cleanup called on block with non-yield terminator." + ); + let cached_drop = self.diverge_dropline(); self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop); } @@ -1369,6 +1419,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target: assign, unwind: UnwindAction::Cleanup(assign_unwind), replace: true, + drop: None, + async_fut: None, }, ); self.diverge_from(block); @@ -1430,18 +1482,26 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// * `unwind_to`, describes the drops that would occur at this point in the code if a /// panic occurred (a subset of the drops in `scope`, since we sometimes elide StorageDead and other /// instructions on unwinding) +/// * `dropline_to`, describes the drops that would occur at this point in the code if a +/// coroutine drop occured. /// * `storage_dead_on_unwind`, if true, then we should emit `StorageDead` even when unwinding /// * `arg_count`, number of MIR local variables corresponding to fn arguments (used to assert that we don't drop those) -fn build_scope_drops<'tcx>( +fn build_scope_drops<'tcx, F>( cfg: &mut CFG<'tcx>, unwind_drops: &mut DropTree, + coroutine_drops: &mut DropTree, scope: &Scope, block: BasicBlock, unwind_to: DropIdx, + dropline_to: Option, storage_dead_on_unwind: bool, arg_count: usize, -) -> BlockAnd<()> { - debug!("build_scope_drops({:?} -> {:?})", block, scope); + is_async_drop: F, +) -> BlockAnd<()> +where + F: Fn(Local) -> bool, +{ + debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to); // Build up the drops in evaluation order. The end result will // look like: @@ -1474,6 +1534,9 @@ fn build_scope_drops<'tcx>( // will branch to `drops[n]`. let mut block = block; + // `dropline_to` indicates what needs to be dropped should coroutine drop occur. + let mut dropline_to = dropline_to; + for drop_data in scope.drops.iter().rev() { let source_info = drop_data.source_info; let local = drop_data.local; @@ -1490,6 +1553,12 @@ fn build_scope_drops<'tcx>( debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].next; + if let Some(idx) = dropline_to { + debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local); + debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind); + dropline_to = Some(coroutine_drops.drops[idx].next); + } + // If the operand has been moved, and we are not on an unwind // path, then don't generate the drop. (We only take this into // account for non-unwind paths so as not to disturb the @@ -1499,6 +1568,12 @@ fn build_scope_drops<'tcx>( } unwind_drops.add_entry_point(block, unwind_to); + if let Some(to) = dropline_to + && is_async_drop(local) + { + coroutine_drops.add_entry_point(block, to); + } + let next = cfg.start_new_block(); cfg.terminate( block, @@ -1508,6 +1583,8 @@ fn build_scope_drops<'tcx>( target: next, unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, ); block = next; @@ -1554,6 +1631,11 @@ fn build_scope_drops<'tcx>( debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind); unwind_to = unwind_drops.drops[unwind_to].next; } + if let Some(idx) = dropline_to { + debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local); + debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind); + dropline_to = Some(coroutine_drops.drops[idx].next); + } // Only temps and vars need their storage dead. assert!(local.index() > arg_count); cfg.push(block, Statement { source_info, kind: StatementKind::StorageDead(local) }); @@ -1612,6 +1694,39 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { } } } + // Link the exit drop tree to dropline drop tree (coroutine drop path) for async drops + if is_coroutine + && drops.drops.iter().any(|DropNode { data, next: _ }| { + data.kind == DropKind::Value && self.is_async_drop(data.local) + }) + { + let dropline_target = self.diverge_dropline_target(else_scope, span); + let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1); + for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) { + match drop_data.data.kind { + DropKind::Storage | DropKind::ForLint => { + let coroutine_drop = self + .scopes + .coroutine_drops + .add_drop(drop_data.data, dropline_indices[drop_data.next]); + dropline_indices.push(coroutine_drop); + } + DropKind::Value => { + let coroutine_drop = self + .scopes + .coroutine_drops + .add_drop(drop_data.data, dropline_indices[drop_data.next]); + if self.is_async_drop(drop_data.data.local) { + self.scopes.coroutine_drops.add_entry_point( + blocks[drop_idx].unwrap(), + dropline_indices[drop_data.next], + ); + } + dropline_indices.push(coroutine_drop); + } + } + } + } blocks[ROOT_NODE].map(BasicBlock::unit) } @@ -1657,9 +1772,11 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> { // to be captured by the coroutine. I'm not sure how important this // optimization is, but it is here. for (drop_idx, drop_node) in drops.drops.iter_enumerated() { - if let DropKind::Value = drop_node.data.kind { + if let DropKind::Value = drop_node.data.kind + && let Some(bb) = blocks[drop_idx] + { debug_assert!(drop_node.next < drops.drops.next_index()); - drops.entry_points.push((drop_node.next, blocks[drop_idx].unwrap())); + drops.entry_points.push((drop_node.next, bb)); } } Self::build_unwind_tree(cfg, drops, fn_span, resume_block); @@ -1713,6 +1830,8 @@ impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop { let term = cfg.block_data_mut(from).terminator_mut(); if let TerminatorKind::Yield { ref mut drop, .. } = term.kind { *drop = Some(to); + } else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind { + *drop = Some(to); } else { span_bug!( term.source_info.span, diff --git a/compiler/rustc_mir_dataflow/Cargo.toml b/compiler/rustc_mir_dataflow/Cargo.toml index f84a06638976e..96cdab1fac01d 100644 --- a/compiler/rustc_mir_dataflow/Cargo.toml +++ b/compiler/rustc_mir_dataflow/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start +itertools = "0.12" polonius-engine = "0.13.0" regex = "1" rustc_abi = { path = "../rustc_abi" } diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs index c1c47ecccf302..884bce9becf79 100644 --- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs +++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs @@ -5,12 +5,12 @@ use rustc_hir::lang_items::LangItem; use rustc_index::Idx; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::*; -use rustc_middle::span_bug; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::util::IntTypeExt; -use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt}; +use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt}; +use rustc_middle::{bug, span_bug, traits}; use rustc_span::DUMMY_SP; -use rustc_span::source_map::Spanned; +use rustc_span::source_map::{Spanned, dummy_spanned}; use tracing::{debug, instrument}; /// The value of an inserted drop flag. @@ -112,6 +112,9 @@ pub trait DropElaborator<'a, 'tcx>: fmt::Debug { fn body(&self) -> &'a Body<'tcx>; fn tcx(&self) -> TyCtxt<'tcx>; fn typing_env(&self) -> ty::TypingEnv<'tcx>; + fn allow_async_drops(&self) -> bool; + + fn terminator_loc(&self, bb: BasicBlock) -> Location; // Drop logic @@ -167,6 +170,7 @@ where path: D::Path, succ: BasicBlock, unwind: Unwind, + dropline: Option, } /// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it. @@ -185,11 +189,12 @@ pub fn elaborate_drop<'b, 'tcx, D>( succ: BasicBlock, unwind: Unwind, bb: BasicBlock, + dropline: Option, ) where D: DropElaborator<'b, 'tcx>, 'tcx: 'b, { - DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb) + DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb) } impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D> @@ -206,6 +211,209 @@ where self.elaborator.tcx() } + // Generates three blocks: + // * #1:pin_obj_bb: call Pin::new_unchecked(&mut obj) + // * #2:call_drop_bb: fut = call obj.() OR call async_drop_in_place(obj) + // * #3:drop_term_bb: drop (obj, fut, ...) + // We keep async drop unexpanded to poll-loop here, to expand it later, at StateTransform - + // into states expand. + // call_destructor_only - to call only AsyncDrop::drop, not full async_drop_in_place glue + fn build_async_drop( + &mut self, + place: Place<'tcx>, + drop_ty: Ty<'tcx>, + bb: Option, + succ: BasicBlock, + unwind: Unwind, + dropline: Option, + call_destructor_only: bool, + ) -> BasicBlock { + let tcx = self.tcx(); + let span = self.source_info.span; + + let pin_obj_bb = bb.unwrap_or_else(|| { + self.elaborator.patch().new_block(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { + // Temporary terminator, will be replaced by patch + source_info: self.source_info, + kind: TerminatorKind::Return, + }), + is_cleanup: false, + }) + }); + + let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only { + // Resolving obj.() + let trait_ref = ty::TraitRef::new( + tcx, + tcx.require_lang_item(LangItem::AsyncDrop, Some(span)), + [drop_ty], + ); + let (drop_trait, trait_args) = match tcx.codegen_select_candidate( + ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref), + ) { + Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData { + impl_def_id, + args, + .. + })) => (*impl_def_id, *args), + impl_source => { + span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source); + } + }; + let drop_fn_def_id = tcx.associated_item_def_ids(drop_trait)[0]; + let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args); + let sig = drop_fn.fn_sig(tcx); + let sig = tcx.instantiate_bound_regions_with_erased(sig); + (sig.output(), drop_fn_def_id, trait_args) + } else { + // Resolving async_drop_in_place function for drop_ty + let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, Some(span)); + let trait_args = tcx.mk_args(&[drop_ty.into()]); + let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args); + let sig = tcx.instantiate_bound_regions_with_erased(sig); + (sig.output(), drop_fn_def_id, trait_args) + }; + + let fut = Place::from(self.new_temp(fut_ty)); + + // #1:pin_obj_bb >>> obj_ref = &mut obj + let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty); + let obj_ref_place = Place::from(self.new_temp(obj_ref_ty)); + + let term_loc = self.elaborator.terminator_loc(pin_obj_bb); + self.elaborator.patch().add_assign( + term_loc, + obj_ref_place, + Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + place, + ), + ); + + // pin_obj_place preparation + let pin_obj_new_unchecked_fn = Ty::new_fn_def( + tcx, + tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)), + [GenericArg::from(obj_ref_ty)], + ); + let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap(); + let pin_obj_place = Place::from(self.new_temp(pin_obj_ty)); + let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand { + span, + user_ty: None, + const_: Const::zero_sized(pin_obj_new_unchecked_fn), + })); + + // #3:drop_term_bb + let drop_term_bb = self.new_block( + unwind, + TerminatorKind::Drop { + place, + target: succ, + unwind: unwind.into_action(), + replace: false, + drop: dropline, + async_fut: Some(fut.local), + }, + ); + + // #2:call_drop_bb + let mut call_statements = Vec::new(); + let drop_arg = if call_destructor_only { + pin_obj_place + } else { + let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else { + bug!(); + }; + let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty); + let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty)); + let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args); + let addr = Rvalue::RawPtr( + RawPtrKind::Mut, + pin_obj_place.project_deeper( + &[ProjectionElem::Field(FieldIdx::ZERO, unwrap_ty), ProjectionElem::Deref], + tcx, + ), + ); + call_statements.push(self.assign(obj_ptr_place, addr)); + obj_ptr_place + }; + call_statements.push(Statement { + source_info: self.source_info, + kind: StatementKind::StorageLive(fut.local), + }); + + let call_drop_bb = self.new_block_with_statements( + unwind, + call_statements, + TerminatorKind::Call { + func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span), + args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(), + destination: fut, + target: Some(drop_term_bb), + unwind: unwind.into_action(), + call_source: CallSource::Misc, + fn_span: self.source_info.span, + }, + ); + + // StorageDead(fut) in self.succ block (at the begin) + self.elaborator.patch().add_statement( + Location { block: self.succ, statement_index: 0 }, + StatementKind::StorageDead(fut.local), + ); + + // #1:pin_obj_bb >>> call Pin::new_unchecked(&mut obj) + self.elaborator.patch().patch_terminator( + pin_obj_bb, + TerminatorKind::Call { + func: pin_obj_new_unchecked_fn, + args: [dummy_spanned(Operand::Move(obj_ref_place))].into(), + destination: pin_obj_place, + target: Some(call_drop_bb), + unwind: unwind.into_action(), + call_source: CallSource::Misc, + fn_span: span, + }, + ); + pin_obj_bb + } + + fn build_drop(&mut self, bb: BasicBlock) { + let drop_ty = self.place_ty(self.place); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !self.elaborator.body()[bb].is_cleanup + && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env()) + { + self.build_async_drop( + self.place, + drop_ty, + Some(bb), + self.succ, + self.unwind, + self.dropline, + false, + ); + } else { + self.elaborator.patch().patch_terminator( + bb, + TerminatorKind::Drop { + place: self.place, + target: self.succ, + unwind: self.unwind.into_action(), + replace: false, + drop: None, + async_fut: None, + }, + ); + } + } + /// This elaborates a single drop instruction, located at `bb`, and /// patches over it. /// @@ -233,15 +441,7 @@ where .patch_terminator(bb, TerminatorKind::Goto { target: self.succ }); } DropStyle::Static => { - self.elaborator.patch().patch_terminator( - bb, - TerminatorKind::Drop { - place: self.place, - target: self.succ, - unwind: self.unwind.into_action(), - replace: false, - }, - ); + self.build_drop(bb); } DropStyle::Conditional => { let drop_bb = self.complete_drop(self.succ, self.unwind); @@ -291,6 +491,7 @@ where path: Option, succ: BasicBlock, unwind: Unwind, + dropline: Option, ) -> BasicBlock { if let Some(path) = path { debug!("drop_subpath: for std field {:?}", place); @@ -302,6 +503,7 @@ where place, succ, unwind, + dropline, } .elaborated_drop_block() } else { @@ -313,6 +515,7 @@ where place, succ, unwind, + dropline, // Using `self.path` here to condition the drop on // our own drop flag. path: self.path, @@ -327,25 +530,36 @@ where /// /// `unwind_ladder` is such a list of steps in reverse order, /// which is called if the matching step of the drop glue panics. + /// + /// `dropline_ladder` is a similar list of steps in reverse order, + /// which is called if the matching step of the drop glue will contain async drop + /// (expanded later to Yield) and the containing coroutine will be dropped at this point. fn drop_halfladder( &mut self, unwind_ladder: &[Unwind], + dropline_ladder: &[Option], mut succ: BasicBlock, fields: &[(Place<'tcx>, Option)], ) -> Vec { iter::once(succ) - .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| { - succ = self.drop_subpath(place, path, succ, unwind_succ); - succ - })) + .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map( + |(&(place, path), &unwind_succ, &dropline_to)| { + succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to); + succ + }, + )) .collect() } - fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) { + fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option) { // Clear the "master" drop flag at the end. This is needed // because the "master" drop protects the ADT's discriminant, // which is invalidated after the ADT is dropped. - (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind) + ( + self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), + self.unwind, + self.dropline, + ) } /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders @@ -363,6 +577,22 @@ where /// .c2: /// ELAB(drop location.2 [target=`self.unwind`]) /// + /// For possible-async drops in coroutines we also need dropline ladder + /// .d0 (mainline): + /// ELAB(drop location.0 [target=.d1, unwind=.c1, drop=.e1]) + /// .d1 (mainline): + /// ELAB(drop location.1 [target=.d2, unwind=.c2, drop=.e2]) + /// .d2 (mainline): + /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`, drop=`self.drop`]) + /// .c1 (unwind): + /// ELAB(drop location.1 [target=.c2]) + /// .c2 (unwind): + /// ELAB(drop location.2 [target=`self.unwind`]) + /// .e1 (dropline): + /// ELAB(drop location.1 [target=.e2, unwind=.c2]) + /// .e2 (dropline): + /// ELAB(drop location.2 [target=`self.drop`, unwind=`self.unwind`]) + /// /// NOTE: this does not clear the master drop flag, so you need /// to point succ/unwind on a `drop_ladder_bottom`. fn drop_ladder( @@ -370,8 +600,13 @@ where fields: Vec<(Place<'tcx>, Option)>, succ: BasicBlock, unwind: Unwind, - ) -> (BasicBlock, Unwind) { + dropline: Option, + ) -> (BasicBlock, Unwind, Option) { debug!("drop_ladder({:?}, {:?})", self, fields); + assert!( + if unwind.is_cleanup() { dropline.is_none() } else { true }, + "Dropline is set for cleanup drop ladder" + ); let mut fields = fields; fields.retain(|&(place, _)| { @@ -380,17 +615,28 @@ where debug!("drop_ladder - fields needing drop: {:?}", fields); + let dropline_ladder: Vec> = vec![None; fields.len() + 1]; let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1]; - let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind { - let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields); + let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind { + let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields); halfladder.into_iter().map(Unwind::To).collect() } else { unwind_ladder }; + let dropline_ladder: Vec<_> = if let Some(succ) = dropline { + let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields); + halfladder.into_iter().map(Some).collect() + } else { + dropline_ladder + }; - let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields); + let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields); - (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap()) + ( + *normal_ladder.last().unwrap(), + *unwind_ladder.last().unwrap(), + *dropline_ladder.last().unwrap(), + ) } fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock { @@ -407,8 +653,8 @@ where }) .collect(); - let (succ, unwind) = self.drop_ladder_bottom(); - self.drop_ladder(fields, succ, unwind).0 + let (succ, unwind, dropline) = self.drop_ladder_bottom(); + self.drop_ladder(fields, succ, unwind, dropline).0 } /// Drops the T contained in a `Box` if it has not been moved out of @@ -419,6 +665,7 @@ where args: GenericArgsRef<'tcx>, succ: BasicBlock, unwind: Unwind, + dropline: Option, ) -> BasicBlock { // drop glue is sent straight to codegen // box cannot be directly dereferenced @@ -434,7 +681,7 @@ where let interior_path = self.elaborator.deref_subpath(self.path); - self.drop_subpath(interior, interior_path, succ, unwind) + self.drop_subpath(interior, interior_path, succ, unwind, dropline) } #[instrument(level = "debug", ret)] @@ -456,19 +703,22 @@ where let skip_contents = adt.is_union() || adt.is_manually_drop(); let contents_drop = if skip_contents { - (self.succ, self.unwind) + (self.succ, self.unwind, self.dropline) } else { self.open_drop_for_adt_contents(adt, args) }; if adt.is_box() { // we need to drop the inside of the box before running the destructor - let succ = self.destructor_call_block(contents_drop); + let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1)); let unwind = contents_drop .1 - .map(|unwind| self.destructor_call_block((unwind, Unwind::InCleanup))); + .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup))); + let dropline = contents_drop + .2 + .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1))); - self.open_drop_for_box_contents(adt, args, succ, unwind) + self.open_drop_for_box_contents(adt, args, succ, unwind, dropline) } else if adt.has_dtor(self.tcx()) { self.destructor_call_block(contents_drop) } else { @@ -480,14 +730,14 @@ where &mut self, adt: ty::AdtDef<'tcx>, args: GenericArgsRef<'tcx>, - ) -> (BasicBlock, Unwind) { - let (succ, unwind) = self.drop_ladder_bottom(); + ) -> (BasicBlock, Unwind, Option) { + let (succ, unwind, dropline) = self.drop_ladder_bottom(); if !adt.is_enum() { let fields = self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args); - self.drop_ladder(fields, succ, unwind) + self.drop_ladder(fields, succ, unwind, dropline) } else { - self.open_drop_for_multivariant(adt, args, succ, unwind) + self.open_drop_for_multivariant(adt, args, succ, unwind, dropline) } } @@ -497,11 +747,14 @@ where args: GenericArgsRef<'tcx>, succ: BasicBlock, unwind: Unwind, - ) -> (BasicBlock, Unwind) { + dropline: Option, + ) -> (BasicBlock, Unwind, Option) { let mut values = Vec::with_capacity(adt.variants().len()); let mut normal_blocks = Vec::with_capacity(adt.variants().len()); let mut unwind_blocks = if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) }; + let mut dropline_blocks = + if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) }; let mut have_otherwise_with_drop_glue = false; let mut have_otherwise = false; @@ -539,11 +792,16 @@ where let unwind_blocks = unwind_blocks.as_mut().unwrap(); let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1]; - let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields); + let dropline_ladder: Vec> = vec![None; fields.len() + 1]; + let halfladder = + self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields); unwind_blocks.push(halfladder.last().cloned().unwrap()); } - let (normal, _) = self.drop_ladder(fields, succ, unwind); + let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline); normal_blocks.push(normal); + if dropline.is_some() { + dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap()); + } } else { have_otherwise = true; @@ -583,6 +841,9 @@ where Unwind::InCleanup, ) }), + dropline.map(|dropline| { + self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind) + }), ) } @@ -622,8 +883,8 @@ where self.drop_flag_test_block(switch_block, succ, unwind) } - fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock { - debug!("destructor_call_block({:?}, {:?})", self, succ); + fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock { + debug!("destructor_call_block_sync({:?}, {:?})", self, succ); let tcx = self.tcx(); let drop_trait = tcx.require_lang_item(LangItem::Drop, None); let drop_fn = tcx.associated_item_def_ids(drop_trait)[0]; @@ -671,6 +932,30 @@ where self.drop_flag_test_block(destructor_block, succ, unwind) } + fn destructor_call_block( + &mut self, + (succ, unwind, dropline): (BasicBlock, Unwind, Option), + ) -> BasicBlock { + debug!("destructor_call_block({:?}, {:?})", self, succ); + let ty = self.place_ty(self.place); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !unwind.is_cleanup() + && ty.is_async_drop(self.tcx(), self.elaborator.typing_env()) + { + let destructor_block = + self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true); + + let block_start = Location { block: destructor_block, statement_index: 0 }; + self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow); + + self.drop_flag_test_block(destructor_block, succ, unwind) + } else { + self.destructor_call_block_sync((succ, unwind)) + } + } + /// Create a loop that drops an array: /// /// ```text @@ -689,6 +974,7 @@ where len: Local, ety: Ty<'tcx>, unwind: Unwind, + dropline: Option, ) -> BasicBlock { let copy = |place: Place<'tcx>| Operand::Copy(place); let move_ = |place: Place<'tcx>| Operand::Move(place); @@ -732,16 +1018,35 @@ where }; let loop_block = self.elaborator.patch().new_block(loop_block); - self.elaborator.patch().patch_terminator( - drop_block, - TerminatorKind::Drop { - place: tcx.mk_place_deref(ptr), - target: loop_block, - unwind: unwind.into_action(), - replace: false, - }, - ); - + let place = tcx.mk_place_deref(ptr); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !unwind.is_cleanup() + && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env()) + { + self.build_async_drop( + place, + ety, + Some(drop_block), + loop_block, + unwind, + dropline, + false, + ); + } else { + self.elaborator.patch().patch_terminator( + drop_block, + TerminatorKind::Drop { + place, + target: loop_block, + unwind: unwind.into_action(), + replace: false, + drop: None, + async_fut: None, + }, + ); + } loop_block } @@ -808,8 +1113,8 @@ where (tcx.mk_place_elem(self.place, project), path) }) .collect::>(); - let (succ, unwind) = self.drop_ladder_bottom(); - return self.drop_ladder(fields, succ, unwind).0; + let (succ, unwind, dropline) = self.drop_ladder_bottom(); + return self.drop_ladder(fields, succ, unwind, dropline).0; } } @@ -843,7 +1148,7 @@ where &mut self.place, Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx), ); - let slice_block = self.drop_loop_pair_for_slice(ety); + let slice_block = self.drop_loop_trio_for_slice(ety); self.place = array_place; delegate_block.terminator = Some(Terminator { @@ -853,18 +1158,22 @@ where self.elaborator.patch().new_block(delegate_block) } - /// Creates a pair of drop-loops of `place`, which drops its contents, even - /// in the case of 1 panic. - fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock { - debug!("drop_loop_pair_for_slice({:?})", ety); + /// Creates a trio of drop-loops of `place`, which drops its contents, even + /// in the case of 1 panic or in the case of coroutine drop + fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock { + debug!("drop_loop_trio_for_slice({:?})", ety); let tcx = self.tcx(); let len = self.new_temp(tcx.types.usize); let cur = self.new_temp(tcx.types.usize); - let unwind = - self.unwind.map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup)); + let unwind = self + .unwind + .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None)); - let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind); + let dropline = + self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None)); + + let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline); let [PlaceElem::Deref] = self.place.projection.as_slice() else { span_bug!( @@ -928,7 +1237,7 @@ where let size = size.try_to_target_usize(self.tcx()); self.open_drop_for_array(ty, *ety, size) } - ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety), + ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety), _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty), } @@ -965,21 +1274,53 @@ where fn elaborated_drop_block(&mut self) -> BasicBlock { debug!("elaborated_drop_block({:?})", self); - let blk = self.drop_block(self.succ, self.unwind); + let blk = self.drop_block_simple(self.succ, self.unwind); self.elaborate_drop(blk); blk } - fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { + fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { let block = TerminatorKind::Drop { place: self.place, target, unwind: unwind.into_action(), replace: false, + drop: self.dropline, + async_fut: None, }; self.new_block(unwind, block) } + fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { + let drop_ty = self.place_ty(self.place); + if self.tcx().features().async_drop() + && self.elaborator.body().coroutine.is_some() + && self.elaborator.allow_async_drops() + && !unwind.is_cleanup() + && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env()) + { + self.build_async_drop( + self.place, + drop_ty, + None, + self.succ, + unwind, + self.dropline, + false, + ) + } else { + let block = TerminatorKind::Drop { + place: self.place, + target, + unwind: unwind.into_action(), + replace: false, + drop: None, + async_fut: None, + }; + self.new_block(unwind, block) + } + } + fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock { let block = TerminatorKind::Goto { target }; self.new_block(unwind, block) @@ -1021,6 +1362,19 @@ where }) } + fn new_block_with_statements( + &mut self, + unwind: Unwind, + statements: Vec>, + k: TerminatorKind<'tcx>, + ) -> BasicBlock { + self.elaborator.patch().new_block(BasicBlockData { + statements, + terminator: Some(Terminator { source_info: self.source_info, kind: k }), + is_cleanup: unwind.is_cleanup(), + }) + } + fn new_temp(&mut self, ty: Ty<'tcx>) -> Local { self.elaborator.patch().new_temp(ty, self.source_info.span) } diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs index 760f94af52dd3..09fe3ce70930e 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs @@ -375,7 +375,14 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> { // the result of `is_unwind_dead`. let mut edges = terminator.edges(); if self.skip_unreachable_unwind - && let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind + && let mir::TerminatorKind::Drop { + target, + unwind, + place, + replace: _, + drop: _, + async_fut: _, + } = terminator.kind && matches!(unwind, mir::UnwindAction::Cleanup(_)) && self.is_unwind_dead(place, state) { diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index a51af8c40fd43..af4ccb305579c 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -407,6 +407,9 @@ impl<'tcx> Map<'tcx> { if exclude.contains(local) { continue; } + if decl.ty.is_templated_coroutine(tcx) { + continue; + } // Create a place for the local. debug_assert!(self.locals[local].is_none()); diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs index 8716fd1c098e3..99710dbd2c7e4 100644 --- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs +++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs @@ -83,7 +83,9 @@ fn add_move_for_packed_drop<'tcx>( is_cleanup: bool, ) { debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc); - let TerminatorKind::Drop { ref place, target, unwind, replace } = terminator.kind else { + let TerminatorKind::Drop { ref place, target, unwind, replace, drop, async_fut } = + terminator.kind + else { unreachable!(); }; @@ -106,6 +108,8 @@ fn add_move_for_packed_drop<'tcx>( target: storage_dead_block, unwind, replace, + drop, + async_fut, }, ); } diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index a9bdbeb9cb806..17bee40398a91 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -64,6 +64,7 @@ use rustc_index::bit_set::{BitMatrix, DenseBitSet, GrowableBitSet}; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; use rustc_middle::mir::*; +use rustc_middle::ty::util::Discr; use rustc_middle::ty::{ self, CoroutineArgs, CoroutineArgsExt, GenericArgsRef, InstanceKind, Ty, TyCtxt, TypingMode, }; @@ -74,7 +75,9 @@ use rustc_mir_dataflow::impls::{ }; use rustc_mir_dataflow::{Analysis, Results, ResultsVisitor}; use rustc_span::def_id::{DefId, LocalDefId}; -use rustc_span::{Span, sym}; +use rustc_span::source_map::dummy_spanned; +use rustc_span::symbol::sym; +use rustc_span::{DUMMY_SP, Span}; use rustc_target::spec::PanicStrategy; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; use rustc_trait_selection::infer::TyCtxtInferExt as _; @@ -159,6 +162,7 @@ fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtx } const SELF_ARG: Local = Local::from_u32(1); +const CTX_ARG: Local = Local::from_u32(2); /// A `yield` point in the coroutine. struct SuspensionPoint<'tcx> { @@ -468,6 +472,35 @@ fn make_aggregate_adt<'tcx>( Rvalue::Aggregate(Box::new(AggregateKind::Adt(def_id, variant_idx, args, None, None)), operands) } +// Fix return Poll::Pending statement into Poll<()>::Pending for async drop function +struct FixReturnPendingVisitor<'tcx> { + tcx: TyCtxt<'tcx>, +} + +impl<'tcx> MutVisitor<'tcx> for FixReturnPendingVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_assign( + &mut self, + place: &mut Place<'tcx>, + rvalue: &mut Rvalue<'tcx>, + _location: Location, + ) { + if place.local != RETURN_PLACE { + return; + } + + // Converting `_0 = Poll::::Pending` to `_0 = Poll::<()>::Pending` + if let Rvalue::Aggregate(ref mut kind, _) = rvalue { + if let AggregateKind::Adt(_, _, ref mut args, _, _) = **kind { + *args = self.tcx.mk_args(&[self.tcx.types.unit.into()]); + } + } + } +} + fn make_coroutine_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let coroutine_ty = body.local_decls.raw[1].ty; @@ -538,11 +571,11 @@ fn replace_local<'tcx>( /// The async lowering step and the type / lifetime inference / checking are /// still using the `ResumeTy` indirection for the time being, and that indirection /// is removed here. After this transform, the coroutine body only knows about `&mut Context<'_>`. -fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { +fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> Ty<'tcx> { let context_mut_ref = Ty::new_task_context(tcx); // replace the type of the `resume` argument - replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref); + replace_resume_ty_local(tcx, body, CTX_ARG, context_mut_ref); let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None); @@ -568,6 +601,408 @@ fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { _ => {} } } + context_mut_ref +} + +// rv = call fut.poll() +fn build_poll_call<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + poll_unit_place: &Place<'tcx>, + switch_block: BasicBlock, + fut_pin_place: &Place<'tcx>, + fut_ty: Ty<'tcx>, + context_ref_place: &Place<'tcx>, + unwind: UnwindAction, +) -> BasicBlock { + let poll_fn = tcx.require_lang_item(LangItem::FuturePoll, None); + let poll_fn = Ty::new_fn_def(tcx, poll_fn, [fut_ty]); + let poll_fn = Operand::Constant(Box::new(ConstOperand { + span: DUMMY_SP, + user_ty: None, + const_: Const::zero_sized(poll_fn), + })); + let call = TerminatorKind::Call { + func: poll_fn.clone(), + args: [ + dummy_spanned(Operand::Move(*fut_pin_place)), + dummy_spanned(Operand::Move(*context_ref_place)), + ] + .into(), + destination: *poll_unit_place, + target: Some(switch_block), + unwind, + call_source: CallSource::Misc, + fn_span: DUMMY_SP, + }; + insert_term_block(body, call) +} + +// pin_fut = Pin::new_unchecked(&mut fut) +fn build_pin_fut<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + fut_place: Place<'tcx>, + unwind: UnwindAction, +) -> (BasicBlock, Place<'tcx>) { + let span = body.span; + let source_info = SourceInfo::outermost(span); + let fut_ty = fut_place.ty(&body.local_decls, tcx).ty; + let fut_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, fut_ty); + let fut_ref_place = Place::from(body.local_decls.push(LocalDecl::new(fut_ref_ty, span))); + let pin_fut_new_unchecked_fn = Ty::new_fn_def( + tcx, + tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)), + [fut_ref_ty], + ); + let fut_pin_ty = pin_fut_new_unchecked_fn.fn_sig(tcx).output().skip_binder(); + let fut_pin_place = Place::from(body.local_decls.push(LocalDecl::new(fut_pin_ty, span))); + let pin_fut_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand { + span, + user_ty: None, + const_: Const::zero_sized(pin_fut_new_unchecked_fn), + })); + + let storage_live = + Statement { source_info, kind: StatementKind::StorageLive(fut_pin_place.local) }; + + let fut_ref_assign = Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + fut_ref_place, + Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + fut_place, + ), + ))), + }; + + // call Pin::new_unchecked(&mut fut) + let pin_fut_bb = body.basic_blocks_mut().push(BasicBlockData { + statements: [storage_live, fut_ref_assign].to_vec(), + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::Call { + func: pin_fut_new_unchecked_fn, + args: [dummy_spanned(Operand::Move(fut_ref_place))].into(), + destination: fut_pin_place, + target: None, // will be fixed later + unwind, + call_source: CallSource::Misc, + fn_span: span, + }, + }), + is_cleanup: false, + }); + (pin_fut_bb, fut_pin_place) +} + +// Build Poll switch for async drop +// match rv { +// Ready() => ready_block +// Pending => yield_block +//} +fn build_poll_switch<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + poll_enum: Ty<'tcx>, + poll_unit_place: &Place<'tcx>, + ready_block: BasicBlock, + yield_block: BasicBlock, +) -> BasicBlock { + let poll_enum_adt = poll_enum.ty_adt_def().unwrap(); + + let Discr { val: poll_ready_discr, ty: poll_discr_ty } = poll_enum + .discriminant_for_variant( + tcx, + poll_enum_adt.variant_index_with_id(tcx.require_lang_item(LangItem::PollReady, None)), + ) + .unwrap(); + let poll_pending_discr = poll_enum + .discriminant_for_variant( + tcx, + poll_enum_adt.variant_index_with_id(tcx.require_lang_item(LangItem::PollPending, None)), + ) + .unwrap() + .val; + let source_info = SourceInfo::outermost(body.span); + let poll_discr_place = + Place::from(body.local_decls.push(LocalDecl::new(poll_discr_ty, source_info.span))); + let discr_assign = Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + poll_discr_place, + Rvalue::Discriminant(*poll_unit_place), + ))), + }; + let unreachable_block = insert_term_block(body, TerminatorKind::Unreachable); + body.basic_blocks_mut().push(BasicBlockData { + statements: [discr_assign].to_vec(), + terminator: Some(Terminator { + source_info, + kind: TerminatorKind::SwitchInt { + discr: Operand::Move(poll_discr_place), + targets: SwitchTargets::new( + [(poll_ready_discr, ready_block), (poll_pending_discr, yield_block)] + .into_iter(), + unreachable_block, + ), + }, + }), + is_cleanup: false, + }) +} + +// Gather blocks, reachable through 'drop' targets of Yield and Drop terminators (chained) +fn gather_dropline_blocks<'tcx>(body: &mut Body<'tcx>) -> DenseBitSet { + let mut dropline: DenseBitSet = DenseBitSet::new_empty(body.basic_blocks.len()); + for (bb, data) in traversal::reverse_postorder(body) { + if dropline.contains(bb) { + data.terminator().successors().for_each(|v| { + dropline.insert(v); + }); + } else { + match data.terminator().kind { + TerminatorKind::Yield { drop: Some(v), .. } => { + dropline.insert(v); + } + TerminatorKind::Drop { drop: Some(v), .. } => { + dropline.insert(v); + } + _ => (), + } + } + } + dropline +} + +/// Cleanup all async drops (reset to sync) +fn cleanup_async_drops<'tcx>(body: &mut Body<'tcx>) { + for block in body.basic_blocks_mut() { + if let TerminatorKind::Drop { + place: _, + target: _, + unwind: _, + replace: _, + ref mut drop, + ref mut async_fut, + } = block.terminator_mut().kind + { + if drop.is_some() || async_fut.is_some() { + *drop = None; + *async_fut = None; + } + } + } +} + +fn has_expandable_async_drops<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + coroutine_ty: Ty<'tcx>, +) -> bool { + for bb in START_BLOCK..body.basic_blocks.next_index() { + // Drops in unwind path (cleanup blocks) are not expanded to async drops, only sync drops in unwind path + if body[bb].is_cleanup { + continue; + } + let TerminatorKind::Drop { place, target: _, unwind: _, replace: _, drop: _, async_fut } = + body[bb].terminator().kind + else { + continue; + }; + let place_ty = place.ty(&body.local_decls, tcx).ty; + if place_ty == coroutine_ty { + continue; + } + if async_fut.is_none() { + continue; + } + return true; + } + return false; +} + +/// Expand Drop terminator for async drops into mainline poll-switch and dropline poll-switch +fn expand_async_drops<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + context_mut_ref: Ty<'tcx>, + coroutine_kind: hir::CoroutineKind, + coroutine_ty: Ty<'tcx>, +) { + let dropline = gather_dropline_blocks(body); + // Clean drop and async_fut fields if potentially async drop is not expanded (stays sync) + let remove_asyncness = |block: &mut BasicBlockData<'tcx>| { + if let TerminatorKind::Drop { + place: _, + target: _, + unwind: _, + replace: _, + ref mut drop, + ref mut async_fut, + } = block.terminator_mut().kind + { + *drop = None; + *async_fut = None; + } + }; + for bb in START_BLOCK..body.basic_blocks.next_index() { + // Drops in unwind path (cleanup blocks) are not expanded to async drops, only sync drops in unwind path + if body[bb].is_cleanup { + remove_asyncness(&mut body[bb]); + continue; + } + let TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut } = + body[bb].terminator().kind + else { + continue; + }; + + let place_ty = place.ty(&body.local_decls, tcx).ty; + if place_ty == coroutine_ty { + remove_asyncness(&mut body[bb]); + continue; + } + + let Some(fut_local) = async_fut else { + remove_asyncness(&mut body[bb]); + continue; + }; + + let is_dropline_bb = dropline.contains(bb); + + if !is_dropline_bb && drop.is_none() { + remove_asyncness(&mut body[bb]); + continue; + } + + let fut_place = Place::from(fut_local); + let fut_ty = fut_place.ty(&body.local_decls, tcx).ty; + + // poll-code: + // state_call_drop: + // #bb_pin: fut_pin = Pin::new_unchecked(&mut fut) + // #bb_call: rv = call fut.poll() (or future_drop_poll(fut) for internal future drops) + // #bb_check: match (rv) + // pending => return rv (yield) + // ready => *continue_bb|drop_bb* + + // Compute Poll<> (aka Poll with void return) + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + let poll_decl = LocalDecl::new(poll_enum, body.span); + let poll_unit_place = Place::from(body.local_decls.push(poll_decl)); + + // First state-loop yield for mainline + let context_ref_place = + Place::from(body.local_decls.push(LocalDecl::new(context_mut_ref, body.span))); + let source_info = body[bb].terminator.as_ref().unwrap().source_info; + let arg = Rvalue::Use(Operand::Move(Place::from(CTX_ARG))); + body[bb].statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new((context_ref_place, arg))), + }); + let yield_block = insert_term_block(body, TerminatorKind::Unreachable); // `kind` replaced later to yield + let switch_block = + build_poll_switch(tcx, body, poll_enum, &poll_unit_place, target, yield_block); + let (pin_bb, fut_pin_place) = + build_pin_fut(tcx, body, fut_place.clone(), UnwindAction::Continue); + let call_bb = build_poll_call( + tcx, + body, + &poll_unit_place, + switch_block, + &fut_pin_place, + fut_ty, + &context_ref_place, + unwind, + ); + + // Second state-loop yield for transition to dropline (when coroutine async drop started) + let mut dropline_transition_bb: Option = None; + let mut dropline_yield_bb: Option = None; + let mut dropline_context_ref: Option> = None; + let mut dropline_call_bb: Option = None; + if !is_dropline_bb { + let context_ref_place2: Place<'_> = + Place::from(body.local_decls.push(LocalDecl::new(context_mut_ref, body.span))); + let drop_yield_block = insert_term_block(body, TerminatorKind::Unreachable); // `kind` replaced later to yield + let drop_switch_block = build_poll_switch( + tcx, + body, + poll_enum, + &poll_unit_place, + drop.unwrap(), + drop_yield_block, + ); + let (pin_bb2, fut_pin_place2) = + build_pin_fut(tcx, body, fut_place, UnwindAction::Continue); + let drop_call_bb = build_poll_call( + tcx, + body, + &poll_unit_place, + drop_switch_block, + &fut_pin_place2, + fut_ty, + &context_ref_place2, + unwind, + ); + dropline_transition_bb = Some(pin_bb2); + dropline_yield_bb = Some(drop_yield_block); + dropline_context_ref = Some(context_ref_place2); + dropline_call_bb = Some(drop_call_bb); + } + + // value needed only for return-yields or gen-coroutines, so just const here + let value = Operand::Constant(Box::new(ConstOperand { + span: body.span, + user_ty: None, + const_: Const::from_bool(tcx, false), + })); + use rustc_middle::mir::AssertKind::ResumedAfterDrop; + let panic_bb = insert_panic_block(tcx, body, ResumedAfterDrop(coroutine_kind)); + + if is_dropline_bb { + body[yield_block].terminator_mut().kind = TerminatorKind::Yield { + value: value.clone(), + resume: panic_bb, + resume_arg: context_ref_place, + drop: Some(pin_bb), + }; + } else { + body[yield_block].terminator_mut().kind = TerminatorKind::Yield { + value: value.clone(), + resume: pin_bb, + resume_arg: context_ref_place, + drop: dropline_transition_bb, + }; + body[dropline_yield_bb.unwrap()].terminator_mut().kind = TerminatorKind::Yield { + value, + resume: panic_bb, + resume_arg: dropline_context_ref.unwrap(), + drop: dropline_transition_bb, + }; + } + + if let TerminatorKind::Call { ref mut target, .. } = body[pin_bb].terminator_mut().kind { + *target = Some(call_bb); + } else { + bug!() + } + if !is_dropline_bb { + if let TerminatorKind::Call { ref mut target, .. } = + body[dropline_transition_bb.unwrap()].terminator_mut().kind + { + *target = dropline_call_bb; + } else { + bug!() + } + } + + body[bb].terminator_mut().kind = TerminatorKind::Goto { target: pin_bb }; + } } fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local { @@ -1035,9 +1470,8 @@ fn insert_switch<'tcx>( body: &mut Body<'tcx>, cases: Vec<(usize, BasicBlock)>, transform: &TransformVisitor<'tcx>, - default: TerminatorKind<'tcx>, + default_block: BasicBlock, ) { - let default_block = insert_term_block(body, default); let (assign, discr) = transform.get_discr(body); let switch_targets = SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block); @@ -1071,18 +1505,24 @@ fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { // coroutine's resume function. let typing_env = body.typing_env(tcx); - let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, typing_env }; + let mut elaborator = DropShimElaborator { + body, + patch: MirPatch::new(body), + tcx, + typing_env, + produce_async_drops: false, + }; for (block, block_data) in body.basic_blocks.iter_enumerated() { - let (target, unwind, source_info) = match block_data.terminator() { + let (target, unwind, source_info, dropline) = match block_data.terminator() { Terminator { source_info, - kind: TerminatorKind::Drop { place, target, unwind, replace: _ }, + kind: TerminatorKind::Drop { place, target, unwind, replace: _, drop, async_fut: _ }, } => { if let Some(local) = place.as_local() && local == SELF_ARG { - (target, unwind, source_info) + (target, unwind, source_info, *drop) } else { continue; } @@ -1107,6 +1547,7 @@ fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { *target, unwind, block, + dropline, ); } elaborator.patch.apply(body); @@ -1136,7 +1577,8 @@ fn create_coroutine_drop_shim<'tcx>( // The returned state and the poisoned state fall through to the default // case which is just to return - insert_switch(&mut body, cases, transform, TerminatorKind::Return); + let default_block = insert_term_block(&mut body, TerminatorKind::Return); + insert_switch(&mut body, cases, transform, default_block); for block in body.basic_blocks_mut() { let kind = &mut block.terminator_mut().kind; @@ -1172,6 +1614,163 @@ fn create_coroutine_drop_shim<'tcx>( body } +// Create async drop shim function to drop coroutine itself +fn create_coroutine_drop_shim_async<'tcx>( + tcx: TyCtxt<'tcx>, + transform: &TransformVisitor<'tcx>, + coroutine_ty: Ty<'tcx>, + body: &Body<'tcx>, + drop_clean: BasicBlock, + can_unwind: bool, +) -> Body<'tcx> { + let mut body = body.clone(); + // Take the coroutine info out of the body, since the drop shim is + // not a coroutine body itself; it just has its drop built out of it. + let _ = body.coroutine.take(); + + FixReturnPendingVisitor { tcx }.visit_body(&mut body); + + // Poison the coroutine when it unwinds + if can_unwind { + generate_poison_block_and_redirect_unwinds_there(transform, &mut body); + } + + let source_info = SourceInfo::outermost(body.span); + + let mut cases = create_cases(&mut body, transform, Operation::Drop); + + cases.insert(0, (CoroutineArgs::UNRESUMED, drop_clean)); + + use rustc_middle::mir::AssertKind::ResumedAfterPanic; + // Panic when resumed on the returned or poisoned state + if can_unwind { + cases.insert( + 1, + ( + CoroutineArgs::POISONED, + insert_panic_block(tcx, &mut body, ResumedAfterPanic(transform.coroutine_kind)), + ), + ); + } + + // RETURNED state also goes to default_block with `return Ready<()>`. + // For fully-polled coroutine, async drop has nothing to do. + let default_block = insert_poll_ready_block(tcx, &mut body); + insert_switch(&mut body, cases, transform, default_block); + + for block in body.basic_blocks_mut() { + let kind = &mut block.terminator_mut().kind; + if let TerminatorKind::CoroutineDrop = *kind { + *kind = TerminatorKind::Return; + block.statements.push(return_poll_ready_assign(tcx, source_info)); + } + } + + // Replace the return variable: Poll to Poll<()> + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(poll_enum, source_info); + + make_coroutine_state_argument_indirect(tcx, &mut body); + + match transform.coroutine_kind { + // Iterator::next doesn't accept a pinned argument, + // unlike for all other coroutine kinds. + CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => {} + _ => { + make_coroutine_state_argument_pinned(tcx, &mut body); + } + } + + // Make sure we remove dead blocks to remove + // unrelated code from the resume part of the function + simplify::remove_dead_blocks(&mut body); + + pm::run_passes_no_validate( + tcx, + &mut body, + &[&abort_unwinding_calls::AbortUnwindingCalls], + None, + ); + + // Update the body's def to become the drop glue. + let coroutine_instance = body.source.instance; + let drop_instance = InstanceKind::FutureDropPollShim( + tcx.lang_items().async_drop_in_place_poll_fn().unwrap(), + coroutine_ty, + coroutine_ty, + ); + + // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible + // filename. + body.source.instance = coroutine_instance; + dump_mir(tcx, false, "coroutine_drop_async", &0, &body, |_, _| Ok(())); + body.source.instance = drop_instance; + + body +} + +// Create async drop shim proxy function for future_drop_poll +// It is just { call coroutine_drop(); return Poll::Ready(); } +fn create_coroutine_drop_shim_proxy_async<'tcx>( + tcx: TyCtxt<'tcx>, + body: &Body<'tcx>, + coroutine_ty: Ty<'tcx>, +) -> Body<'tcx> { + let mut body = body.clone(); + // Take the coroutine info out of the body, since the drop shim is + // not a coroutine body itself; it just has its drop built out of it. + let _ = body.coroutine.take(); + let basic_blocks: IndexVec> = IndexVec::new(); + body.basic_blocks = BasicBlocks::new(basic_blocks); + body.var_debug_info.clear(); + + // Keeping return value and args + body.local_decls.truncate(1 + body.arg_count); + + let source_info = SourceInfo::outermost(body.span); + + // Replace the return variable: Poll to Poll<()> + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let poll_enum = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(poll_enum, source_info); + + // call coroutine_drop() + let call_bb = body.basic_blocks_mut().push(BasicBlockData { + statements: Vec::new(), + terminator: None, + is_cleanup: false, + }); + + // return Poll::Ready() + let ret_bb = insert_poll_ready_block(tcx, &mut body); + + let kind = TerminatorKind::Drop { + place: Place::from(SELF_ARG), + target: ret_bb, + unwind: UnwindAction::Continue, + replace: false, + drop: None, + async_fut: None, + }; + body.basic_blocks_mut()[call_bb].terminator = Some(Terminator { source_info, kind }); + + let coroutine_instance = body.source.instance; + let drop_instance = InstanceKind::FutureDropPollShim( + tcx.lang_items().async_drop_in_place_poll_fn().unwrap(), + coroutine_ty, + coroutine_ty, + ); + + // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible + // filename. + body.source.instance = coroutine_instance; + dump_mir(tcx, false, "coroutine_drop_proxy_async", &0, &body, |_, _| Ok(())); + body.source.instance = drop_instance; + + body +} + fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock { let source_info = SourceInfo::outermost(body.span); body.basic_blocks_mut().push(BasicBlockData { @@ -1181,6 +1780,34 @@ fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> }) } +fn return_poll_ready_assign<'tcx>(tcx: TyCtxt<'tcx>, source_info: SourceInfo) -> Statement<'tcx> { + // Poll::Ready(()) + let poll_def_id = tcx.require_lang_item(LangItem::Poll, None); + let args = tcx.mk_args(&[tcx.types.unit.into()]); + let val = Operand::Constant(Box::new(ConstOperand { + span: source_info.span, + user_ty: None, + const_: Const::zero_sized(tcx.types.unit), + })); + let ready_val = Rvalue::Aggregate( + Box::new(AggregateKind::Adt(poll_def_id, VariantIdx::from_usize(0), args, None, None)), + IndexVec::from_raw(vec![val]), + ); + Statement { + kind: StatementKind::Assign(Box::new((Place::return_place(), ready_val))), + source_info, + } +} + +fn insert_poll_ready_block<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> BasicBlock { + let source_info = SourceInfo::outermost(body.span); + body.basic_blocks_mut().push(BasicBlockData { + statements: [return_poll_ready_assign(tcx, source_info)].to_vec(), + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }) +} + fn insert_panic_block<'tcx>( tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, @@ -1262,45 +1889,50 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool { false } +// Poison the coroutine when it unwinds +fn generate_poison_block_and_redirect_unwinds_there<'tcx>( + transform: &TransformVisitor<'tcx>, + body: &mut Body<'tcx>, +) { + let source_info = SourceInfo::outermost(body.span); + let poison_block = body.basic_blocks_mut().push(BasicBlockData { + statements: vec![ + transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info), + ], + terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }), + is_cleanup: true, + }); + + for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() { + let source_info = block.terminator().source_info; + + if let TerminatorKind::UnwindResume = block.terminator().kind { + // An existing `Resume` terminator is redirected to jump to our dedicated + // "poisoning block" above. + if idx != poison_block { + *block.terminator_mut() = + Terminator { source_info, kind: TerminatorKind::Goto { target: poison_block } }; + } + } else if !block.is_cleanup { + // Any terminators that *can* unwind but don't have an unwind target set are also + // pointed at our poisoning block (unless they're part of the cleanup path). + if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { + *unwind = UnwindAction::Cleanup(poison_block); + } + } + } +} + fn create_coroutine_resume_function<'tcx>( tcx: TyCtxt<'tcx>, transform: TransformVisitor<'tcx>, body: &mut Body<'tcx>, can_return: bool, + can_unwind: bool, ) { - let can_unwind = can_unwind(tcx, body); - // Poison the coroutine when it unwinds if can_unwind { - let source_info = SourceInfo::outermost(body.span); - let poison_block = body.basic_blocks_mut().push(BasicBlockData { - statements: vec![ - transform.set_discr(VariantIdx::new(CoroutineArgs::POISONED), source_info), - ], - terminator: Some(Terminator { source_info, kind: TerminatorKind::UnwindResume }), - is_cleanup: true, - }); - - for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() { - let source_info = block.terminator().source_info; - - if let TerminatorKind::UnwindResume = block.terminator().kind { - // An existing `Resume` terminator is redirected to jump to our dedicated - // "poisoning block" above. - if idx != poison_block { - *block.terminator_mut() = Terminator { - source_info, - kind: TerminatorKind::Goto { target: poison_block }, - }; - } - } else if !block.is_cleanup { - // Any terminators that *can* unwind but don't have an unwind target set are also - // pointed at our poisoning block (unless they're part of the cleanup path). - if let Some(unwind @ UnwindAction::Continue) = block.terminator_mut().unwind_mut() { - *unwind = UnwindAction::Cleanup(poison_block); - } - } - } + generate_poison_block_and_redirect_unwinds_there(&transform, body); } let mut cases = create_cases(body, &transform, Operation::Resume); @@ -1325,7 +1957,13 @@ fn create_coroutine_resume_function<'tcx>( let block = match transform.coroutine_kind { CoroutineKind::Desugared(CoroutineDesugaring::Async, _) | CoroutineKind::Coroutine(_) => { - insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind)) + // For async_drop_in_place::{closure} we just keep return Poll::Ready, + // because async drop of such coroutine keeps polling original coroutine + if tcx.is_templated_coroutine(body.source.def_id()) { + insert_poll_ready_block(tcx, body) + } else { + insert_panic_block(tcx, body, ResumedAfterReturn(transform.coroutine_kind)) + } } CoroutineKind::Desugared(CoroutineDesugaring::AsyncGen, _) | CoroutineKind::Desugared(CoroutineDesugaring::Gen, _) => { @@ -1335,7 +1973,8 @@ fn create_coroutine_resume_function<'tcx>( cases.insert(1, (CoroutineArgs::RETURNED, block)); } - insert_switch(body, cases, &transform, TerminatorKind::Unreachable); + let default_block = insert_term_block(body, TerminatorKind::Unreachable); + insert_switch(body, cases, &transform, default_block); make_coroutine_state_argument_indirect(tcx, body); @@ -1357,16 +1996,31 @@ fn create_coroutine_resume_function<'tcx>( dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(())); } -fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock { - let return_block = insert_term_block(body, TerminatorKind::Return); +fn insert_clean_drop<'tcx>( + tcx: TyCtxt<'tcx>, + body: &mut Body<'tcx>, + has_async_drops: bool, +) -> BasicBlock { + let source_info = SourceInfo::outermost(body.span); + let return_block = if has_async_drops { + insert_poll_ready_block(tcx, body) + } else { + insert_term_block(body, TerminatorKind::Return) + }; + + // FIXME: When move insert_clean_drop + elaborate_coroutine_drops before async drops expand, + // also set dropline here: + // let dropline = if has_async_drops { Some(return_block) } else { None }; + let dropline = None; let term = TerminatorKind::Drop { place: Place::from(SELF_ARG), target: return_block, unwind: UnwindAction::Continue, replace: false, + drop: dropline, + async_fut: None, }; - let source_info = SourceInfo::outermost(body.span); // Create a block to destroy an unresumed coroutines. This can only destroy upvars. body.basic_blocks_mut().push(BasicBlockData { @@ -1421,7 +2075,7 @@ fn create_cases<'tcx>( if operation == Operation::Resume { // Move the resume argument to the destination place of the `Yield` terminator - let resume_arg = Local::new(2); // 0 = return, 1 = self + let resume_arg = CTX_ARG; statements.push(Statement { source_info, kind: StatementKind::Assign(Box::new(( @@ -1528,7 +2182,9 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { }; let old_ret_ty = body.return_ty(); - assert!(body.coroutine_drop().is_none()); + assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none()); + + dump_mir(tcx, false, "coroutine_before", &0, body, |_, _| Ok(())); // The first argument is the coroutine type passed by value let coroutine_ty = body.local_decls.raw[1].ty; @@ -1577,19 +2233,32 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { // RETURN_PLACE then is a fresh unused local with type ret_ty. let old_ret_local = replace_local(RETURN_PLACE, new_ret_ty, body, tcx); + // We need to insert clean drop for unresumed state and perform drop elaboration + // (finally in open_drop_for_tuple) before async drop expansion. + // Async drops, produced by this drop elaboration, will be expanded, + // and corresponding futures kept in layout. + let has_async_drops = matches!( + coroutine_kind, + CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) + ) && has_expandable_async_drops(tcx, body, coroutine_ty); + // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies. if matches!( coroutine_kind, CoroutineKind::Desugared(CoroutineDesugaring::Async | CoroutineDesugaring::AsyncGen, _) ) { - transform_async_context(tcx, body); + let context_mut_ref = transform_async_context(tcx, body); + expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty); + dump_mir(tcx, false, "coroutine_async_drop_expand", &0, body, |_, _| Ok(())); + } else { + cleanup_async_drops(body); } // We also replace the resume argument and insert an `Assign`. // This is needed because the resume argument `_2` might be live across a `yield`, in which // case there is no `Assign` to it that the transform can turn into a store to the coroutine // state. After the yield the slot in the coroutine state would then be uninitialized. - let resume_local = Local::new(2); + let resume_local = CTX_ARG; let resume_ty = body.local_decls[resume_local].ty; let old_resume_local = replace_local(resume_local, resume_ty, body, tcx); @@ -1669,10 +2338,14 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { body.coroutine.as_mut().unwrap().resume_ty = None; body.coroutine.as_mut().unwrap().coroutine_layout = Some(layout); + // FIXME: Drops, produced by insert_clean_drop + elaborate_coroutine_drops, + // are currently sync only. To allow async for them, we need to move those calls + // before expand_async_drops, and fix the related problems. + // // Insert `drop(coroutine_struct)` which is used to drop upvars for coroutines in // the unresumed state. // This is expanded to a drop ladder in `elaborate_coroutine_drops`. - let drop_clean = insert_clean_drop(body); + let drop_clean = insert_clean_drop(tcx, body, has_async_drops); dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(())); @@ -1683,13 +2356,38 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform { dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(())); - // Create a copy of our MIR and use it to create the drop shim for the coroutine - let drop_shim = create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean); + let can_unwind = can_unwind(tcx, body); - body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim); + // Create a copy of our MIR and use it to create the drop shim for the coroutine + if has_async_drops { + // If coroutine has async drops, generating async drop shim + let mut drop_shim = create_coroutine_drop_shim_async( + tcx, + &transform, + coroutine_ty, + body, + drop_clean, + can_unwind, + ); + // Run derefer to fix Derefs that are not in the first place + deref_finder(tcx, &mut drop_shim); + body.coroutine.as_mut().unwrap().coroutine_drop_async = Some(drop_shim); + } else { + // If coroutine has no async drops, generating sync drop shim + let mut drop_shim = + create_coroutine_drop_shim(tcx, &transform, coroutine_ty, body, drop_clean); + // Run derefer to fix Derefs that are not in the first place + deref_finder(tcx, &mut drop_shim); + body.coroutine.as_mut().unwrap().coroutine_drop = Some(drop_shim); + + // For coroutine with sync drop, generating async proxy for `future_drop_poll` call + let mut proxy_shim = create_coroutine_drop_shim_proxy_async(tcx, body, coroutine_ty); + deref_finder(tcx, &mut proxy_shim); + body.coroutine.as_mut().unwrap().coroutine_drop_proxy_async = Some(proxy_shim); + } // Create the Coroutine::resume / Future::poll function - create_coroutine_resume_function(tcx, transform, body, can_return); + create_coroutine_resume_function(tcx, transform, body, can_return, can_unwind); // Run derefer to fix Derefs that are not in the first place deref_finder(tcx, body); diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs index cb869160c8054..95809607bdbec 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drops.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs @@ -155,6 +155,14 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for ElaborateDropsCtxt<'a, 'tcx> { self.env.typing_env } + fn allow_async_drops(&self) -> bool { + true + } + + fn terminator_loc(&self, bb: BasicBlock) -> Location { + self.patch.terminator_loc(self.body, bb) + } + #[instrument(level = "debug", skip(self), ret)] fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle { let ((maybe_init, maybe_uninit), multipart) = match mode { @@ -325,7 +333,9 @@ impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> { // This function should mirror what `collect_drop_flags` does. for (bb, data) in self.body.basic_blocks.iter_enumerated() { let terminator = data.terminator(); - let TerminatorKind::Drop { place, target, unwind, replace } = terminator.kind else { + let TerminatorKind::Drop { place, target, unwind, replace, drop, async_fut: _ } = + terminator.kind + else { continue; }; @@ -361,7 +371,16 @@ impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> { } }; self.init_data.seek_before(self.body.terminator_loc(bb)); - elaborate_drop(self, terminator.source_info, place, path, target, unwind, bb) + elaborate_drop( + self, + terminator.source_info, + place, + path, + target, + unwind, + bb, + drop, + ) } LookupResult::Parent(None) => {} LookupResult::Parent(Some(_)) => { diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index ab617e2ce6fd6..4ed61024f4f7a 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -388,7 +388,15 @@ impl<'tcx> Inliner<'tcx> for NormalInliner<'tcx> { let term = blk.terminator(); let caller_attrs = tcx.codegen_fn_attrs(self.caller_def_id()); - if let TerminatorKind::Drop { ref place, target, unwind, replace: _ } = term.kind { + if let TerminatorKind::Drop { + ref place, + target, + unwind, + replace: _, + drop: _, + async_fut: _, + } = term.kind + { work_list.push(target); // If the place doesn't actually need dropping, treat it like a regular goto. @@ -712,6 +720,20 @@ fn check_mir_is_available<'tcx, I: Inliner<'tcx>>( debug!("still needs substitution"); return Err("implementation limitation"); } + InstanceKind::AsyncDropGlue(_, ty) | InstanceKind::AsyncDropGlueCtorShim(_, ty) => { + return if ty.still_further_specializable() { + Err("still needs substitution") + } else { + Ok(()) + }; + } + InstanceKind::FutureDropPollShim(_, ty, ty2) => { + return if ty.still_further_specializable() || ty2.still_further_specializable() { + Err("still needs substitution") + } else { + Ok(()) + }; + } // This cannot result in an immediate cycle since the callee MIR is a shim, which does // not get any optimizations run on it. Any subsequent inlining may cause cycles, but we @@ -725,8 +747,7 @@ fn check_mir_is_available<'tcx, I: Inliner<'tcx>>( | InstanceKind::DropGlue(..) | InstanceKind::CloneShim(..) | InstanceKind::ThreadLocalShim(..) - | InstanceKind::FnPtrAddrShim(..) - | InstanceKind::AsyncDropGlueCtorShim(..) => return Ok(()), + | InstanceKind::FnPtrAddrShim(..) => return Ok(()), } if inliner.tcx().is_constructor(callee_def_id) { @@ -1330,8 +1351,8 @@ fn try_instance_mir<'tcx>( tcx: TyCtxt<'tcx>, instance: InstanceKind<'tcx>, ) -> Result<&'tcx Body<'tcx>, &'static str> { - if let ty::InstanceKind::DropGlue(_, Some(ty)) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)) = instance + if let ty::InstanceKind::DropGlue(_, Some(ty)) | ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) = + instance && let ty::Adt(def, args) = ty.kind() { let fields = def.all_fields(); diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs index a40768300f5d0..292278800f8a2 100644 --- a/compiler/rustc_mir_transform/src/inline/cycle.rs +++ b/compiler/rustc_mir_transform/src/inline/cycle.rs @@ -95,7 +95,10 @@ pub(crate) fn mir_callgraph_reachable<'tcx>( InstanceKind::FnPtrAddrShim(..) => { continue; } - InstanceKind::DropGlue(..) | InstanceKind::AsyncDropGlueCtorShim(..) => { + InstanceKind::DropGlue(..) + | InstanceKind::FutureDropPollShim(..) + | InstanceKind::AsyncDropGlue(..) + | InstanceKind::AsyncDropGlueCtorShim(..) => { // FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this // needs some more analysis. diff --git a/compiler/rustc_mir_transform/src/known_panics_lint.rs b/compiler/rustc_mir_transform/src/known_panics_lint.rs index 59de6ca84a71b..8d15b3d6e252f 100644 --- a/compiler/rustc_mir_transform/src/known_panics_lint.rs +++ b/compiler/rustc_mir_transform/src/known_panics_lint.rs @@ -888,7 +888,11 @@ impl CanConstProp { }; for (local, val) in cpv.can_const_prop.iter_enumerated_mut() { let ty = body.local_decls[local].ty; - if ty.is_union() { + if ty.is_templated_coroutine(tcx) { + // No const propagation for templated coroutine (AsyncDropGlue) + *val = ConstPropMode::NoPropagation; + continue; + } else if ty.is_union() { // Unions are incompatible with the current implementation of // const prop because Rust has no concept of an active // variant of a union diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index b572f6ca0b36a..7f2a218df4265 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -30,7 +30,7 @@ use rustc_middle::mir::{ MirPhase, Operand, Place, ProjectionElem, Promoted, RuntimePhase, Rvalue, START_BLOCK, SourceInfo, Statement, StatementKind, TerminatorKind, }; -use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt}; +use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::util::Providers; use rustc_middle::{bug, query, span_bug}; use rustc_mir_build::builder::build_mir; @@ -214,9 +214,11 @@ pub fn provide(providers: &mut Providers) { mir_const_qualif, mir_promoted, mir_drops_elaborated_and_const_checked, + templated_mir_drops_elaborated_and_const_checked, mir_for_ctfe, mir_coroutine_witnesses: coroutine::mir_coroutine_witnesses, optimized_mir, + templated_optimized_mir, is_mir_available, is_ctfe_mir_available: is_mir_available, mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable, @@ -517,6 +519,21 @@ fn mir_drops_elaborated_and_const_checked(tcx: TyCtxt<'_>, def: LocalDefId) -> & tcx.alloc_steal_mir(body) } +/// mir_drops_elaborated_and_const_checked simplified analog for templated coroutine +fn templated_mir_drops_elaborated_and_const_checked<'tcx>( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, +) -> &'tcx Steal> { + let ty::Coroutine(def_id, _) = ty.kind() else { + bug!(); + }; + assert!(ty.is_templated_coroutine(tcx)); + + let instance = ty::InstanceKind::AsyncDropGlue(*def_id, ty); + let body = tcx.mir_shims(instance).clone(); + tcx.alloc_steal_mir(body) +} + // Made public so that `mir_drops_elaborated_and_const_checked` can be overridden // by custom rustc drivers, running all the steps by themselves. See #114628. pub fn run_analysis_to_runtime_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { @@ -720,6 +737,11 @@ fn optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> &Body<'_> { tcx.arena.alloc(inner_optimized_mir(tcx, did)) } +/// Optimize the templated MIR and prepare it for codegen. +fn templated_optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Body<'tcx> { + tcx.arena.alloc(inner_templated_optimized_mir(tcx, ty)) +} + fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> { if tcx.is_constructor(did.to_def_id()) { // There's no reason to run all of the MIR passes on constructors when @@ -764,6 +786,29 @@ fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> { body } +fn inner_templated_optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Body<'tcx> { + debug!("about to call templated_mir_drops_elaborated..."); + let body = tcx.templated_mir_drops_elaborated_and_const_checked(ty).steal(); + let mut body = remap_mir_for_const_eval_select(tcx, body, hir::Constness::NotConst); + + if body.tainted_by_errors.is_some() { + return body; + } + + // If `mir_drops_elaborated_and_const_checked` found that the current body has unsatisfiable + // predicates, it will shrink the MIR to a single `unreachable` terminator. + // More generally, if MIR is a lone `unreachable`, there is nothing to optimize. + if let TerminatorKind::Unreachable = body.basic_blocks[START_BLOCK].terminator().kind + && body.basic_blocks[START_BLOCK].statements.is_empty() + { + return body; + } + + run_optimization_passes(tcx, &mut body); + + body +} + /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for /// constant evaluation once all generic parameters become known. fn promoted_mir(tcx: TyCtxt<'_>, def: LocalDefId) -> &IndexVec> { diff --git a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs index 50d10883d2cad..04946b97606eb 100644 --- a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs +++ b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs @@ -133,6 +133,8 @@ impl<'a, 'mir, 'tcx> DropsReachable<'a, 'mir, 'tcx> { target: _, unwind: _, replace: _, + drop: _, + async_fut: _, } = &terminator.kind && place_has_common_prefix(dropped_place, self.place) { diff --git a/compiler/rustc_mir_transform/src/remove_zsts.rs b/compiler/rustc_mir_transform/src/remove_zsts.rs index 78d94a038671d..f4e8b1ed1caa0 100644 --- a/compiler/rustc_mir_transform/src/remove_zsts.rs +++ b/compiler/rustc_mir_transform/src/remove_zsts.rs @@ -59,6 +59,13 @@ fn trivially_zst<'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'tcx>) -> Option { | ty::RawPtr(..) | ty::Ref(..) | ty::FnPtr(..) => Some(false), + ty::Coroutine(def_id, _) => { + if tcx.is_templated_coroutine(*def_id) { + Some(false) + } else { + None + } + } // check `layout_of` to see (including unreachable things we won't actually see) _ => None, } diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index f7ec0f740d300..9fca1886a2f5a 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -7,6 +7,7 @@ use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; use rustc_index::{Idx, IndexVec}; use rustc_middle::mir::patch::MirPatch; +use rustc_middle::mir::visit::{MutVisitor, PlaceContext}; use rustc_middle::mir::*; use rustc_middle::query::Providers; use rustc_middle::ty::adjustment::PointerCoercion; @@ -15,7 +16,7 @@ use rustc_middle::ty::{ }; use rustc_middle::{bug, span_bug}; use rustc_mir_dataflow::elaborate_drops::{self, DropElaborator, DropFlagMode, DropStyle}; -use rustc_span::source_map::Spanned; +use rustc_span::source_map::{Spanned, dummy_spanned}; use rustc_span::{DUMMY_SP, Span}; use tracing::{debug, instrument}; @@ -30,6 +31,40 @@ pub(super) fn provide(providers: &mut Providers) { providers.mir_shims = make_shim; } +// Replace Pin<&mut ImplCoroutine> accesses (_1.0) into Pin<&mut ProxyCoroutine> acceses +struct FixProxyFutureDropVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + replace_to: Local, +} + +impl<'tcx> MutVisitor<'tcx> for FixProxyFutureDropVisitor<'tcx> { + fn tcx(&self) -> TyCtxt<'tcx> { + self.tcx + } + + fn visit_place( + &mut self, + place: &mut Place<'tcx>, + _context: PlaceContext, + _location: Location, + ) { + if place.local == Local::from_u32(1) { + if place.projection.len() == 1 { + assert!(matches!( + place.projection.first(), + Some(ProjectionElem::Field(FieldIdx::ZERO, _)) + )); + *place = Place::from(self.replace_to); + } else if place.projection.len() == 2 { + assert!(matches!(place.projection[0], ProjectionElem::Field(FieldIdx::ZERO, _))); + assert!(matches!(place.projection[1], ProjectionElem::Deref)); + *place = + Place::from(self.replace_to).project_deeper(&[ProjectionElem::Deref], self.tcx); + } + } + } +} + fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body<'tcx> { debug!("make_shim({:?})", instance); @@ -130,8 +165,47 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceKind<'tcx>) -> Body< ty::InstanceKind::ThreadLocalShim(..) => build_thread_local_shim(tcx, instance), ty::InstanceKind::CloneShim(def_id, ty) => build_clone_shim(tcx, def_id, ty), ty::InstanceKind::FnPtrAddrShim(def_id, ty) => build_fn_ptr_addr_shim(tcx, def_id, ty), + ty::InstanceKind::FutureDropPollShim(def_id, proxy_ty, impl_ty) => { + let mut body = + async_destructor_ctor::build_future_drop_poll_shim(tcx, def_id, proxy_ty, impl_ty); + + pm::run_passes( + tcx, + &mut body, + &[ + &mentioned_items::MentionedItems, + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + ], + Some(MirPhase::Runtime(RuntimePhase::Optimized)), + pm::Optimizations::Allowed, + ); + debug!("make_shim({:?}) = {:?}", instance, body); + return body; + } + ty::InstanceKind::AsyncDropGlue(def_id, ty) => { + let mut body = async_destructor_ctor::build_async_drop_shim(tcx, def_id, ty); + + pm::run_passes( + tcx, + &mut body, + &[ + &mentioned_items::MentionedItems, + &simplify::SimplifyCfg::MakeShim, + //&crate::reveal_all::RevealAll, + &crate::coroutine::StateTransform, + ], + Some(MirPhase::Runtime(RuntimePhase::PostCleanup)), + pm::Optimizations::Allowed, + ); + debug!("make_shim({:?}) = {:?}", instance, body); + return body; + } + ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) => { - async_destructor_ctor::build_async_destructor_ctor_shim(tcx, def_id, ty) + let body = async_destructor_ctor::build_async_destructor_ctor_shim(tcx, def_id, ty); + debug!("make_shim({:?}) = {:?}", instance, body); + return body; } ty::InstanceKind::Virtual(..) => { bug!("InstanceKind::Virtual ({:?}) is for direct calls only", instance) @@ -279,8 +353,13 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) if ty.is_some() { let patch = { let typing_env = ty::TypingEnv::post_analysis(tcx, def_id); - let mut elaborator = - DropShimElaborator { body: &body, patch: MirPatch::new(&body), tcx, typing_env }; + let mut elaborator = DropShimElaborator { + body: &body, + patch: MirPatch::new(&body), + tcx, + typing_env, + produce_async_drops: false, + }; let dropee = tcx.mk_place_deref(dropee_ptr); let resume_block = elaborator.patch.resume_block(); elaborate_drops::elaborate_drop( @@ -291,6 +370,7 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option>) return_block, elaborate_drops::Unwind::To(resume_block), START_BLOCK, + None, ); elaborator.patch }; @@ -339,6 +419,7 @@ pub(super) struct DropShimElaborator<'a, 'tcx> { pub patch: MirPatch<'tcx>, pub tcx: TyCtxt<'tcx>, pub typing_env: ty::TypingEnv<'tcx>, + pub produce_async_drops: bool, } impl fmt::Debug for DropShimElaborator<'_, '_> { @@ -363,6 +444,13 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> { self.typing_env } + fn terminator_loc(&self, bb: BasicBlock) -> Location { + self.patch.terminator_loc(self.body, bb) + } + fn allow_async_drops(&self) -> bool { + self.produce_async_drops + } + fn drop_style(&self, _path: Self::Path, mode: DropFlagMode) -> DropStyle { match mode { DropFlagMode::Shallow => { @@ -619,6 +707,8 @@ impl<'tcx> CloneShimBuilder<'tcx> { target: unwind, unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup), replace: false, + drop: None, + async_fut: None, }, /* is_cleanup */ true, ); @@ -913,6 +1003,8 @@ fn build_call_shim<'tcx>( target: BasicBlock::new(2), unwind: UnwindAction::Continue, replace: false, + drop: None, + async_fut: None, }, false, ); @@ -946,6 +1038,8 @@ fn build_call_shim<'tcx>( target: BasicBlock::new(4), unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup), replace: false, + drop: None, + async_fut: None, }, /* is_cleanup */ true, ); diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs index 94b1b4b1855b5..b7ecd0b4c5fc2 100644 --- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs +++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs @@ -1,639 +1,492 @@ -use std::iter; - -use itertools::Itertools; -use rustc_abi::{FieldIdx, VariantIdx}; -use rustc_const_eval::interpret; use rustc_hir::def_id::DefId; use rustc_hir::lang_items::LangItem; +use rustc_hir::{CoroutineDesugaring, CoroutineKind, CoroutineSource, Safety}; use rustc_index::{Idx, IndexVec}; -use rustc_middle::mir::*; -use rustc_middle::ty::adjustment::PointerCoercion; -use rustc_middle::ty::util::{AsyncDropGlueMorphology, Discr}; -use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_middle::{bug, span_bug}; -use rustc_span::source_map::respan; -use rustc_span::{Span, Symbol}; -use rustc_target::spec::PanicStrategy; -use tracing::debug; - -use super::{local_decls_for_sig, new_body}; - -pub(super) fn build_async_destructor_ctor_shim<'tcx>( +use rustc_middle::mir::patch::MirPatch; +use rustc_middle::mir::{ + BasicBlock, BasicBlockData, Body, Local, LocalDecl, MirSource, Operand, Place, Rvalue, + SourceInfo, Statement, StatementKind, Terminator, TerminatorKind, +}; +use rustc_middle::ty::{self, EarlyBinder, Ty, TyCtxt}; +use rustc_mir_dataflow::elaborate_drops; + +use super::*; + +// For coroutine, its async drop function layout is proxy with impl coroutine layout ref, so +// in async destructor ctor function we create +// `async_drop_in_place(*ImplCoroutine) { return ProxyCoroutine { &*ImplCoroutine } }`. +// In case of `async_drop_in_place::{{closure}}>` (and further), +// we need to take impl coroutine ref from arg proxy layout and copy its to the new proxy layout +fn build_async_destructor_ctor_shim_for_coroutine<'tcx>( tcx: TyCtxt<'tcx>, def_id: DefId, - ty: Option>, + ty: Ty<'tcx>, ) -> Body<'tcx> { - debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty); - - AsyncDestructorCtorShimBuilder::new(tcx, def_id, ty).build() -} - -/// Builder for async_drop_in_place shim. Functions as a stack machine -/// to build up an expression using combinators. Stack contains pairs -/// of locals and types. Combinator is a not yet instantiated pair of a -/// function and a type, is considered to be an operator which consumes -/// operands from the stack by instantiating its function and its type -/// with operand types and moving locals into the function call. Top -/// pair is considered to be the last operand. -// FIXME: add mir-opt tests -struct AsyncDestructorCtorShimBuilder<'tcx> { - tcx: TyCtxt<'tcx>, - def_id: DefId, - self_ty: Option>, - span: Span, - source_info: SourceInfo, - typing_env: ty::TypingEnv<'tcx>, - - stack: Vec>, - last_bb: BasicBlock, - top_cleanup_bb: Option, - - locals: IndexVec>, - bbs: IndexVec>, -} - -#[derive(Clone, Copy)] -enum SurfaceDropKind { - Async, - Sync, -} - -impl<'tcx> AsyncDestructorCtorShimBuilder<'tcx> { - const SELF_PTR: Local = Local::from_u32(1); const INPUT_COUNT: usize = 1; - const MAX_STACK_LEN: usize = 2; - - fn new(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Option>) -> Self { - let args = if let Some(ty) = self_ty { - tcx.mk_args(&[ty.into()]) - } else { - ty::GenericArgs::identity_for_item(tcx, def_id) - }; - let sig = tcx.fn_sig(def_id).instantiate(tcx, args); - let sig = tcx.instantiate_bound_regions_with_erased(sig); - let span = tcx.def_span(def_id); - - let source_info = SourceInfo::outermost(span); - - debug_assert_eq!(sig.inputs().len(), Self::INPUT_COUNT); - let locals = local_decls_for_sig(&sig, span); - - // Usual case: noop() + unwind resume + return - let mut bbs = IndexVec::with_capacity(3); - let typing_env = ty::TypingEnv::post_analysis(tcx, def_id); - AsyncDestructorCtorShimBuilder { - tcx, - def_id, - self_ty, - span, - source_info, - typing_env, + let first_arg = Local::new(1); - stack: Vec::with_capacity(Self::MAX_STACK_LEN), - last_bb: bbs.push(BasicBlockData::new(None, false)), - top_cleanup_bb: match tcx.sess.panic_strategy() { - PanicStrategy::Unwind => { - // Don't drop input arg because it's just a pointer - Some(bbs.push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { - source_info, - kind: TerminatorKind::UnwindResume, - }), - is_cleanup: true, - })) - } - PanicStrategy::Abort => None, - }, + debug!("build_async_destructor_ctor_shim_for_coroutine(def_id={:?}, ty={:?})", def_id, ty); - locals, - bbs, - } - } + assert!(ty.is_coroutine()); + let args = tcx.mk_args(&[ty.into()]); - fn build(self) -> Body<'tcx> { - let (tcx, Some(self_ty)) = (self.tcx, self.self_ty) else { - return self.build_zst_output(); - }; - match self_ty.async_drop_glue_morphology(tcx) { - AsyncDropGlueMorphology::Noop => span_bug!( - self.span, - "async drop glue shim generator encountered type with noop async drop glue morphology" - ), - AsyncDropGlueMorphology::DeferredDropInPlace => { - return self.build_deferred_drop_in_place(); - } - AsyncDropGlueMorphology::Custom => (), - } - - let surface_drop_kind = || { - let adt_def = self_ty.ty_adt_def()?; - if adt_def.async_destructor(tcx).is_some() { - Some(SurfaceDropKind::Async) - } else if adt_def.destructor(tcx).is_some() { - Some(SurfaceDropKind::Sync) - } else { - None - } - }; + let sig = tcx.fn_sig(def_id).instantiate(tcx, args); + let sig = tcx.instantiate_bound_regions_with_erased(sig); + let span = tcx.def_span(def_id); + let source_info = SourceInfo::outermost(span); - match self_ty.kind() { - ty::Array(elem_ty, _) => self.build_slice(true, *elem_ty), - ty::Slice(elem_ty) => self.build_slice(false, *elem_ty), + debug_assert_eq!(sig.inputs().len(), INPUT_COUNT); + let locals = local_decls_for_sig(&sig, span); - ty::Tuple(elem_tys) => self.build_chain(None, elem_tys.iter()), - ty::Adt(adt_def, args) if adt_def.is_struct() => { - let field_tys = adt_def.non_enum_variant().fields.iter().map(|f| f.ty(tcx, args)); - self.build_chain(surface_drop_kind(), field_tys) - } - ty::Closure(_, args) => self.build_chain(None, args.as_closure().upvar_tys().iter()), - ty::CoroutineClosure(_, args) => { - self.build_chain(None, args.as_coroutine_closure().upvar_tys().iter()) - } + let cor_def_id = tcx.lang_items().async_drop_in_place_poll_fn().unwrap(); - ty::Adt(adt_def, args) if adt_def.is_enum() => { - self.build_enum(*adt_def, *args, surface_drop_kind()) - } + let mut statements = Vec::new(); + let mut dropee_ptr = Place::from(first_arg); - ty::Adt(adt_def, _) => { - assert!(adt_def.is_union()); - match surface_drop_kind().unwrap() { - SurfaceDropKind::Async => self.build_fused_async_surface(), - SurfaceDropKind::Sync => self.build_fused_sync_surface(), + // if arg layout is a proxy layout, we need to take its ref field for final impl coroutine + fn find_impl_coroutine<'tcx>(tcx: TyCtxt<'tcx>, mut cor_ty: Ty<'tcx>) -> Ty<'tcx> { + let mut ty = cor_ty; + loop { + if let ty::Coroutine(def_id, args) = ty.kind() { + cor_ty = ty; + if tcx.is_templated_coroutine(*def_id) { + ty = args.first().unwrap().expect_ty(); + continue; + } else { + return cor_ty; } + } else { + return cor_ty; } - - ty::Bound(..) - | ty::Foreign(_) - | ty::Placeholder(_) - | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_) | ty::TyVar(_)) - | ty::Param(_) - | ty::Alias(..) => { - bug!("Building async destructor for unexpected type: {self_ty:?}") - } - - _ => { - bug!( - "Building async destructor constructor shim is not yet implemented for type: {self_ty:?}" - ) - } - } - } - - fn build_enum( - mut self, - adt_def: ty::AdtDef<'tcx>, - args: ty::GenericArgsRef<'tcx>, - surface_drop: Option, - ) -> Body<'tcx> { - let tcx = self.tcx; - - let surface = match surface_drop { - None => None, - Some(kind) => { - self.put_self(); - Some(match kind { - SurfaceDropKind::Async => self.combine_async_surface(), - SurfaceDropKind::Sync => self.combine_sync_surface(), - }) - } - }; - - let mut other = None; - for (variant_idx, discr) in adt_def.discriminants(tcx) { - let variant = adt_def.variant(variant_idx); - - let mut chain = None; - for (field_idx, field) in variant.fields.iter_enumerated() { - let field_ty = field.ty(tcx, args); - self.put_variant_field(variant.name, variant_idx, field_idx, field_ty); - let defer = self.combine_defer(field_ty); - chain = Some(match chain { - None => defer, - Some(chain) => self.combine_chain(chain, defer), - }) - } - let variant_dtor = chain.unwrap_or_else(|| self.put_noop()); - - other = Some(match other { - None => variant_dtor, - Some(other) => { - self.put_self(); - self.put_discr(discr); - self.combine_either(other, variant_dtor) - } - }); } - let variants_dtor = other.unwrap_or_else(|| self.put_noop()); - - let dtor = match surface { - None => variants_dtor, - Some(surface) => self.combine_chain(surface, variants_dtor), - }; - self.combine_fuse(dtor); - self.return_() } - - fn build_chain(mut self, surface_drop: Option, elem_tys: I) -> Body<'tcx> - where - I: Iterator> + ExactSizeIterator, - { - let surface = match surface_drop { - None => None, - Some(kind) => { - self.put_self(); - Some(match kind { - SurfaceDropKind::Async => self.combine_async_surface(), - SurfaceDropKind::Sync => self.combine_sync_surface(), - }) - } - }; - - let mut chain = None; - for (field_idx, field_ty) in elem_tys.enumerate().map(|(i, ty)| (FieldIdx::new(i), ty)) { - self.put_field(field_idx, field_ty); - let defer = self.combine_defer(field_ty); - chain = Some(match chain { - None => defer, - Some(chain) => self.combine_chain(chain, defer), - }) - } - let chain = chain.unwrap_or_else(|| self.put_noop()); - - let dtor = match surface { - None => chain, - Some(surface) => self.combine_chain(surface, chain), - }; - self.combine_fuse(dtor); - self.return_() + let impl_cor_ty = find_impl_coroutine(tcx, ty); + let impl_ptr_ty = Ty::new_mut_ptr(tcx, impl_cor_ty); + if impl_cor_ty != ty { + dropee_ptr = dropee_ptr.project_deeper( + &[PlaceElem::Deref, PlaceElem::Field(FieldIdx::ZERO, impl_ptr_ty)], + tcx, + ); } - fn build_zst_output(mut self) -> Body<'tcx> { - self.put_zst_output(); - self.return_() - } + let resume_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None)); + let resume_ty = Ty::new_adt(tcx, resume_adt, ty::List::empty()); + + let cor_args = ty::CoroutineArgs::new( + tcx, + ty::CoroutineArgsParts { + parent_args: args, + kind_ty: tcx.types.unit, + resume_ty, + yield_ty: tcx.types.unit, + return_ty: tcx.types.unit, + witness: Ty::new_coroutine_witness(tcx, cor_def_id, args), + tupled_upvars_ty: Ty::new_tup(tcx, &[impl_ptr_ty]), + }, + ) + .args; + + let mut blocks = IndexVec::with_capacity(1); + statements.push(Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::return_place(), + Rvalue::Aggregate( + Box::new(AggregateKind::Coroutine(cor_def_id, cor_args)), + [Operand::Move(dropee_ptr)].into(), + ), + ))), + }); + blocks.push(BasicBlockData { + statements, + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), + is_cleanup: false, + }); + + let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty)); + let mut body = new_body(source, blocks, locals, sig.inputs().len(), span); + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + pm::run_passes( + tcx, + &mut body, + &[ + &mentioned_items::MentionedItems, + &simplify::SimplifyCfg::MakeShim, + //&crate::reveal_all::RevealAll, + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + ], + None, + pm::Optimizations::Allowed, + ); + body +} - fn build_deferred_drop_in_place(mut self) -> Body<'tcx> { - self.put_self(); - let deferred = self.combine_deferred_drop_in_place(); - self.combine_fuse(deferred); - self.return_() - } +pub(super) fn build_async_destructor_ctor_shim<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + ty: Ty<'tcx>, +) -> Body<'tcx> { + debug!("build_async_destructor_ctor_shim(def_id={:?}, ty={:?})", def_id, ty); - fn build_fused_async_surface(mut self) -> Body<'tcx> { - self.put_self(); - let surface = self.combine_async_surface(); - self.combine_fuse(surface); - self.return_() + if ty.is_coroutine() { + build_async_destructor_ctor_shim_for_coroutine(tcx, def_id, ty) + } else { + build_async_destructor_ctor_shim_not_coroutine(tcx, def_id, ty) } +} - fn build_fused_sync_surface(mut self) -> Body<'tcx> { - self.put_self(); - let surface = self.combine_sync_surface(); - self.combine_fuse(surface); - self.return_() - } +fn build_async_destructor_ctor_shim_not_coroutine<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + ty: Ty<'tcx>, +) -> Body<'tcx> { + debug_assert_eq!(Some(def_id), tcx.lang_items().async_drop_in_place_fn()); + let generic_body = tcx.optimized_mir(def_id); + let args = tcx.mk_args(&[ty.into()]); + let mut body = EarlyBinder::bind(generic_body.clone()).instantiate(tcx, args); + + pm::run_passes( + tcx, + &mut body, + &[ + &simplify::SimplifyCfg::MakeShim, + //&crate::reveal_all::RevealAll, + &abort_unwinding_calls::AbortUnwindingCalls, + &add_call_guards::CriticalCallEdges, + ], + None, + pm::Optimizations::Allowed, + ); + body +} - fn build_slice(mut self, is_array: bool, elem_ty: Ty<'tcx>) -> Body<'tcx> { - if is_array { - self.put_array_as_slice(elem_ty) +// build_drop_shim analog for async drop glue (for generated coroutine poll function) +pub(super) fn build_async_drop_shim<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + ty: Ty<'tcx>, +) -> Body<'tcx> { + debug!("build_async_drop_shim(def_id={:?}, ty={:?})", def_id, ty); + let ty::Coroutine(_, parent_args) = ty.kind() else { + bug!(); + }; + let typing_env = ty::TypingEnv::fully_monomorphized(); + //let param_env = tcx.param_env_reveal_all_normalized(def_id); + + let drop_ty = parent_args.first().unwrap().expect_ty(); + let drop_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty); + + assert!(tcx.is_coroutine(def_id)); + let coroutine_kind = tcx.coroutine_kind(def_id).unwrap(); + + assert!(matches!( + coroutine_kind, + CoroutineKind::Desugared(CoroutineDesugaring::Async, CoroutineSource::Fn) + )); + + let needs_async_drop = drop_ty.needs_async_drop(tcx, typing_env); + let needs_sync_drop = !needs_async_drop && drop_ty.needs_drop(tcx, typing_env); + + let resume_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None)); + let resume_ty = Ty::new_adt(tcx, resume_adt, ty::List::empty()); + + let fn_sig = ty::Binder::dummy(tcx.mk_fn_sig( + [ty, resume_ty], + tcx.types.unit, + false, + Safety::Safe, + rustc_target::spec::abi::Abi::Rust, + )); + let sig = tcx.instantiate_bound_regions_with_erased(fn_sig); + + assert!(!drop_ty.is_coroutine()); + let span = tcx.def_span(def_id); + let source_info = SourceInfo::outermost(span); + + // The first argument (index 0), but add 1 for the return value. + let coroutine_layout = Place::from(Local::new(1 + 0)); + let coroutine_layout_dropee = + tcx.mk_place_field(coroutine_layout, FieldIdx::new(0), drop_ptr_ty); + + let return_block = BasicBlock::new(1); + let mut blocks = IndexVec::with_capacity(2); + let block = |blocks: &mut IndexVec<_, _>, kind| { + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { source_info, kind }), + is_cleanup: false, + }) + }; + block( + &mut blocks, + if needs_sync_drop { + TerminatorKind::Drop { + place: tcx.mk_place_deref(coroutine_layout_dropee), + target: return_block, + unwind: UnwindAction::Continue, + replace: false, + drop: None, + async_fut: None, + } } else { - self.put_self() - } - let dtor = self.combine_slice(elem_ty); - self.combine_fuse(dtor); - self.return_() - } - - fn put_zst_output(&mut self) { - let return_ty = self.locals[RETURN_PLACE].ty; - self.put_operand(Operand::Constant(Box::new(ConstOperand { - span: self.span, - user_ty: None, - const_: Const::zero_sized(return_ty), - }))); - } - - /// Puts `to_drop: *mut Self` on top of the stack. - fn put_self(&mut self) { - self.put_operand(Operand::Copy(Self::SELF_PTR.into())) + TerminatorKind::Goto { target: return_block } + }, + ); + block(&mut blocks, TerminatorKind::Return); + + let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlue(def_id, ty)); + let mut body = + new_body(source, blocks, local_decls_for_sig(&sig, span), sig.inputs().len(), span); + + body.coroutine = Some(Box::new(CoroutineInfo::initial( + coroutine_kind, + parent_args.as_coroutine().yield_ty(), + parent_args.as_coroutine().resume_ty(), + ))); + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + if !needs_async_drop { + // Returning noop body for types without `need async drop` + // (or sync Drop in case of !`need async drop` && `need drop`) + return body; } - /// Given that `Self is [ElemTy; N]` puts `to_drop: *mut [ElemTy]` - /// on top of the stack. - fn put_array_as_slice(&mut self, elem_ty: Ty<'tcx>) { - let slice_ptr_ty = Ty::new_mut_ptr(self.tcx, Ty::new_slice(self.tcx, elem_ty)); - self.put_temp_rvalue(Rvalue::Cast( - CastKind::PointerCoercion(PointerCoercion::Unsize, CoercionSource::Implicit), - Operand::Copy(Self::SELF_PTR.into()), - slice_ptr_ty, - )) - } - - /// If given Self is a struct puts `to_drop: *mut FieldTy` on top - /// of the stack. - fn put_field(&mut self, field: FieldIdx, field_ty: Ty<'tcx>) { - let place = Place { - local: Self::SELF_PTR, - projection: self - .tcx - .mk_place_elems(&[PlaceElem::Deref, PlaceElem::Field(field, field_ty)]), - }; - self.put_temp_rvalue(Rvalue::RawPtr(RawPtrKind::Mut, place)) - } - - /// If given Self is an enum puts `to_drop: *mut FieldTy` on top of - /// the stack. - fn put_variant_field( - &mut self, - variant_sym: Symbol, - variant: VariantIdx, - field: FieldIdx, - field_ty: Ty<'tcx>, - ) { - let place = Place { - local: Self::SELF_PTR, - projection: self.tcx.mk_place_elems(&[ - PlaceElem::Deref, - PlaceElem::Downcast(Some(variant_sym), variant), - PlaceElem::Field(field, field_ty), - ]), - }; - self.put_temp_rvalue(Rvalue::RawPtr(RawPtrKind::Mut, place)) - } - - /// If given Self is an enum puts `to_drop: *mut FieldTy` on top of - /// the stack. - fn put_discr(&mut self, discr: Discr<'tcx>) { - let (size, _) = discr.ty.int_size_and_signed(self.tcx); - self.put_operand(Operand::const_from_scalar( - self.tcx, - discr.ty, - interpret::Scalar::from_uint(discr.val, size), - self.span, - )); - } - - /// Puts `x: RvalueType` on top of the stack. - fn put_temp_rvalue(&mut self, rvalue: Rvalue<'tcx>) { - let last_bb = &mut self.bbs[self.last_bb]; - debug_assert!(last_bb.terminator.is_none()); - let source_info = self.source_info; - - let local_ty = rvalue.ty(&self.locals, self.tcx); - // We need to create a new local to be able to "consume" it with - // a combinator - let local = self.locals.push(LocalDecl::with_source_info(local_ty, source_info)); - last_bb.statements.extend_from_slice(&[ - Statement { source_info, kind: StatementKind::StorageLive(local) }, - Statement { - source_info, - kind: StatementKind::Assign(Box::new((local.into(), rvalue))), - }, - ]); - - self.put_operand(Operand::Move(local.into())); - } - - /// Puts operand on top of the stack. - fn put_operand(&mut self, operand: Operand<'tcx>) { - if let Some(top_cleanup_bb) = &mut self.top_cleanup_bb { - let source_info = self.source_info; - match &operand { - Operand::Copy(_) | Operand::Constant(_) => { - *top_cleanup_bb = self.bbs.push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { - source_info, - kind: TerminatorKind::Goto { target: *top_cleanup_bb }, - }), - is_cleanup: true, - }); - } - Operand::Move(place) => { - let local = place.as_local().unwrap(); - *top_cleanup_bb = self.bbs.push(BasicBlockData { - statements: Vec::new(), - terminator: Some(Terminator { - source_info, - kind: if self.locals[local].ty.needs_drop(self.tcx, self.typing_env) { - TerminatorKind::Drop { - place: local.into(), - target: *top_cleanup_bb, - unwind: UnwindAction::Terminate( - UnwindTerminateReason::InCleanup, - ), - replace: false, - } - } else { - TerminatorKind::Goto { target: *top_cleanup_bb } - }, - }), - is_cleanup: true, - }); - } - }; + let mut dropee_ptr = Place::from(body.local_decls.push(LocalDecl::new(drop_ptr_ty, span))); + let st_kind = StatementKind::Assign(Box::new(( + dropee_ptr, + Rvalue::Use(Operand::Move(coroutine_layout_dropee)), + ))); + body.basic_blocks_mut()[START_BLOCK].statements.push(Statement { source_info, kind: st_kind }); + + if tcx.sess.opts.unstable_opts.mir_emit_retag { + // We want to treat the function argument as if it was passed by `&mut`. As such, we + // generate + // ``` + // temp = &mut *arg; + // Retag(temp, FnEntry) + // ``` + // It's important that we do this first, before anything that depends on `dropee_ptr` + // has been put into the body. + let reborrow = Rvalue::Ref( + tcx.lifetimes.re_erased, + BorrowKind::Mut { kind: MutBorrowKind::Default }, + tcx.mk_place_deref(dropee_ptr), + ); + let ref_ty = reborrow.ty(&body.local_decls, tcx); + dropee_ptr = body.local_decls.push(LocalDecl::new(ref_ty, span)).into(); + let new_statements = [ + StatementKind::Assign(Box::new((dropee_ptr, reborrow))), + StatementKind::Retag(RetagKind::FnEntry, Box::new(dropee_ptr)), + ]; + for s in new_statements { + body.basic_blocks_mut()[START_BLOCK] + .statements + .push(Statement { source_info, kind: s }); } - self.stack.push(operand); } - /// Puts `noop: async_drop::Noop` on top of the stack - fn put_noop(&mut self) -> Ty<'tcx> { - self.apply_combinator(0, LangItem::AsyncDropNoop, &[]) - } + let dropline = body.basic_blocks.last_index(); - fn combine_async_surface(&mut self) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::SurfaceAsyncDropInPlace, &[self.self_ty.unwrap().into()]) - } + let patch = { + let mut elaborator = DropShimElaborator { + body: &body, + patch: MirPatch::new(&body), + tcx, + typing_env, + produce_async_drops: true, + }; + let dropee = tcx.mk_place_deref(dropee_ptr); + let resume_block = elaborator.patch.resume_block(); + elaborate_drops::elaborate_drop( + &mut elaborator, + source_info, + dropee, + (), + return_block, + elaborate_drops::Unwind::To(resume_block), + START_BLOCK, + dropline, + ); + elaborator.patch + }; + patch.apply(&mut body); - fn combine_sync_surface(&mut self) -> Ty<'tcx> { - self.apply_combinator( - 1, - LangItem::AsyncDropSurfaceDropInPlace, - &[self.self_ty.unwrap().into()], - ) - } + body +} - fn combine_deferred_drop_in_place(&mut self) -> Ty<'tcx> { - self.apply_combinator( - 1, - LangItem::AsyncDropDeferredDropInPlace, - &[self.self_ty.unwrap().into()], +pub(super) fn build_future_drop_poll_shim<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + proxy_ty: Ty<'tcx>, + impl_ty: Ty<'tcx>, +) -> Body<'tcx> { + let instance = ty::InstanceKind::FutureDropPollShim(def_id, proxy_ty, impl_ty); + let ty::Coroutine(coroutine_def_id, impl_args) = impl_ty.kind() else { + bug!("FutureDropPollShim not for coroutine impl type: ({:?})", instance); + }; + + let span = tcx.def_span(def_id); + let source_info = SourceInfo::outermost(span); + + let pin_proxy_layout_local = Local::new(1); + let cor_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, impl_ty); + let proxy_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, proxy_ty); + // taking _1.0.0 (impl from Pin, impl from proxy) + let proxy_ref_place = Place::from(pin_proxy_layout_local) + .project_deeper(&[PlaceElem::Field(FieldIdx::ZERO, proxy_ref)], tcx); + let impl_ref_place = |proxy_ref_local: Local| { + Place::from(proxy_ref_local).project_deeper( + &[ + PlaceElem::Deref, + PlaceElem::Downcast(None, VariantIdx::ZERO), + PlaceElem::Field(FieldIdx::ZERO, cor_ref), + ], + tcx, ) - } - - fn combine_fuse(&mut self, inner_future_ty: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::AsyncDropFuse, &[inner_future_ty.into()]) - } - - fn combine_slice(&mut self, elem_ty: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::AsyncDropSlice, &[elem_ty.into()]) - } - - fn combine_defer(&mut self, to_drop_ty: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(1, LangItem::AsyncDropDefer, &[to_drop_ty.into()]) - } + }; + + if tcx.is_templated_coroutine(*coroutine_def_id) { + // ret_ty = `Poll<()>` + let poll_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Poll, None)); + let ret_ty = Ty::new_adt(tcx, poll_adt_ref, tcx.mk_args(&[tcx.types.unit.into()])); + // env_ty = `Pin<&mut proxy_ty>` + let pin_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Pin, None)); + let env_ty = Ty::new_adt(tcx, pin_adt_ref, tcx.mk_args(&[proxy_ref.into()])); + // sig = `fn (Pin<&mut proxy_ty>, &mut Context) -> Poll<()>` + let sig = tcx.mk_fn_sig( + [env_ty, Ty::new_task_context(tcx)], + ret_ty, + false, + hir::Safety::Safe, + rustc_target::spec::abi::Abi::Rust, + ); + let mut locals = local_decls_for_sig(&sig, span); + let mut blocks = IndexVec::with_capacity(3); - fn combine_chain(&mut self, first: Ty<'tcx>, second: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator(2, LangItem::AsyncDropChain, &[first.into(), second.into()]) - } + let proxy_ref_local = locals.push(LocalDecl::new(proxy_ref, span)); + let cor_ref_local = locals.push(LocalDecl::new(cor_ref, span)); + let cor_ref_place = Place::from(cor_ref_local); - fn combine_either(&mut self, other: Ty<'tcx>, matched: Ty<'tcx>) -> Ty<'tcx> { - self.apply_combinator( - 4, - LangItem::AsyncDropEither, - &[other.into(), matched.into(), self.self_ty.unwrap().into()], - ) - } + let call_bb = BasicBlock::new(1); + let return_bb = BasicBlock::new(2); - fn return_(mut self) -> Body<'tcx> { - let last_bb = &mut self.bbs[self.last_bb]; - debug_assert!(last_bb.terminator.is_none()); - let source_info = self.source_info; - - let (1, Some(output)) = (self.stack.len(), self.stack.pop()) else { - span_bug!( - self.span, - "async destructor ctor shim builder finished with invalid number of stack items: expected 1 found {}", - self.stack.len(), - ) + let assign1 = Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::from(proxy_ref_local), + Rvalue::CopyForDeref(proxy_ref_place), + ))), }; - #[cfg(debug_assertions)] - if let Some(ty) = self.self_ty { - debug_assert_eq!( - output.ty(&self.locals, self.tcx), - ty.async_destructor_ty(self.tcx), - "output async destructor types did not match for type: {ty:?}", - ); - } + let assign2 = Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + cor_ref_place, + Rvalue::CopyForDeref(impl_ref_place(proxy_ref_local)), + ))), + }; + + // cor_pin_ty = `Pin<&mut cor_ref>` + let cor_pin_ty = Ty::new_adt(tcx, pin_adt_ref, tcx.mk_args(&[cor_ref.into()])); + let cor_pin_place = Place::from(locals.push(LocalDecl::new(cor_pin_ty, span))); - let dead_storage = match &output { - Operand::Move(place) => Some(Statement { + let pin_fn = tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)); + // call Pin::new_unchecked(&mut impl_cor) + blocks.push(BasicBlockData { + statements: vec![assign1, assign2], + terminator: Some(Terminator { source_info, - kind: StatementKind::StorageDead(place.as_local().unwrap()), + kind: TerminatorKind::Call { + func: Operand::function_handle(tcx, pin_fn, [cor_ref.into()], span), + args: [dummy_spanned(Operand::Move(cor_ref_place))].into(), + destination: cor_pin_place, + target: Some(call_bb), + unwind: UnwindAction::Continue, + call_source: CallSource::Misc, + fn_span: span, + }, }), - _ => None, - }; - - last_bb.statements.extend( - iter::once(Statement { + is_cleanup: false, + }); + // When dropping async drop coroutine, we continue its execution: + // we call impl::poll (impl_layout, ctx) + let poll_fn = tcx.require_lang_item(LangItem::FuturePoll, None); + let resume_ctx = Place::from(Local::new(2)); + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { source_info, - kind: StatementKind::Assign(Box::new((RETURN_PLACE.into(), Rvalue::Use(output)))), - }) - .chain(dead_storage), - ); - - last_bb.terminator = Some(Terminator { source_info, kind: TerminatorKind::Return }); - - let source = MirSource::from_instance(ty::InstanceKind::AsyncDropGlueCtorShim( - self.def_id, - self.self_ty, - )); - new_body(source, self.bbs, self.locals, Self::INPUT_COUNT, self.span) - } - - fn apply_combinator( - &mut self, - arity: usize, - function: LangItem, - args: &[ty::GenericArg<'tcx>], - ) -> Ty<'tcx> { - let function = self.tcx.require_lang_item(function, Some(self.span)); - let operands_split = self - .stack - .len() - .checked_sub(arity) - .expect("async destructor ctor shim combinator tried to consume too many items"); - let operands = &self.stack[operands_split..]; - - let func_ty = Ty::new_fn_def(self.tcx, function, args.iter().copied()); - let func_sig = func_ty.fn_sig(self.tcx).no_bound_vars().unwrap(); - #[cfg(debug_assertions)] - operands.iter().zip(func_sig.inputs()).for_each(|(operand, expected_ty)| { - let operand_ty = operand.ty(&self.locals, self.tcx); - if operand_ty == *expected_ty { - return; - } - - // If projection of Discriminant then compare with `Ty::discriminant_ty` - if let ty::Alias(ty::Projection, ty::AliasTy { args, def_id, .. }) = expected_ty.kind() - && self.tcx.is_lang_item(*def_id, LangItem::Discriminant) - && args.first().unwrap().as_type().unwrap().discriminant_ty(self.tcx) == operand_ty - { - return; - } - - span_bug!( - self.span, - "Operand type and combinator argument type are not equal. - operand_ty: {:?} - argument_ty: {:?} -", - operand_ty, - expected_ty - ); + kind: TerminatorKind::Call { + func: Operand::function_handle(tcx, poll_fn, [impl_ty.into()], span), + args: [ + dummy_spanned(Operand::Move(cor_pin_place)), + dummy_spanned(Operand::Move(resume_ctx)), + ] + .into(), + destination: Place::return_place(), + target: Some(return_bb), + unwind: UnwindAction::Continue, + call_source: CallSource::Misc, + fn_span: span, + }, + }), + is_cleanup: false, }); - - let target = self.bbs.push(BasicBlockData { - statements: operands - .iter() - .rev() - .filter_map(|o| { - if let Operand::Move(Place { local, projection }) = o { - assert!(projection.is_empty()); - Some(Statement { - source_info: self.source_info, - kind: StatementKind::StorageDead(*local), - }) - } else { - None - } - }) - .collect(), - terminator: None, + blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { source_info, kind: TerminatorKind::Return }), is_cleanup: false, }); - let dest_ty = func_sig.output(); - let dest = - self.locals.push(LocalDecl::with_source_info(dest_ty, self.source_info).immutable()); - - let unwind = if let Some(top_cleanup_bb) = &mut self.top_cleanup_bb { - for _ in 0..arity { - *top_cleanup_bb = - self.bbs[*top_cleanup_bb].terminator().successors().exactly_one().ok().unwrap(); - } - UnwindAction::Cleanup(*top_cleanup_bb) - } else { - UnwindAction::Unreachable - }; + let source = MirSource::from_instance(instance); + let mut body = new_body(source, blocks, locals, sig.inputs().len(), span); + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + return body; + } + // future drop poll for async drop must be resolved to standart poll (AsyncDropGlue) + assert!(!tcx.is_templated_coroutine(*coroutine_def_id)); + + // converting `(_1: Pin<&mut CorLayout>, _2: &mut Context<'_>) -> Poll<()>` + // into `(_1: Pin<&mut ProxyLayout>, _2: &mut Context<'_>) -> Poll<()>` + // let mut _x: &mut CorLayout = &*_1.0.0; + // Replace old _1.0 accesses into _x accesses; + let body = tcx.optimized_mir(*coroutine_def_id).future_drop_poll().unwrap(); + let mut body: Body<'tcx> = EarlyBinder::bind(body.clone()).instantiate(tcx, impl_args); + body.source.instance = instance; + body.phase = MirPhase::Runtime(RuntimePhase::Initial); + body.var_debug_info.clear(); + let pin_adt_ref = tcx.adt_def(tcx.require_lang_item(LangItem::Pin, Some(span))); + let args = tcx.mk_args(&[proxy_ref.into()]); + let pin_proxy_ref = Ty::new_adt(tcx, pin_adt_ref, args); + + let proxy_ref_local = body.local_decls.push(LocalDecl::new(proxy_ref, span)); + let cor_ref_local = body.local_decls.push(LocalDecl::new(cor_ref, span)); + FixProxyFutureDropVisitor { tcx, replace_to: cor_ref_local }.visit_body(&mut body); + // Now changing first arg from Pin<&mut ImplCoroutine> to Pin<&mut ProxyCoroutine> + body.local_decls[pin_proxy_layout_local] = LocalDecl::new(pin_proxy_ref, span); - let last_bb = &mut self.bbs[self.last_bb]; - debug_assert!(last_bb.terminator.is_none()); - last_bb.statements.push(Statement { - source_info: self.source_info, - kind: StatementKind::StorageLive(dest), - }); - last_bb.terminator = Some(Terminator { - source_info: self.source_info, - kind: TerminatorKind::Call { - func: Operand::Constant(Box::new(ConstOperand { - span: self.span, - user_ty: None, - const_: Const::Val(ConstValue::ZeroSized, func_ty), - })), - destination: dest.into(), - target: Some(target), - unwind, - call_source: CallSource::Misc, - fn_span: self.span, - args: self.stack.drain(operands_split..).map(|o| respan(self.span, o)).collect(), + { + let bb: &mut BasicBlockData<'tcx> = &mut body.basic_blocks_mut()[START_BLOCK]; + // _tmp = _1.0 : Pin<&ProxyLayout> ==> &ProxyLayout + bb.statements.insert( + 0, + Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::from(proxy_ref_local), + Rvalue::CopyForDeref(proxy_ref_place), + ))), }, - }); - - self.put_operand(Operand::Move(dest.into())); - self.last_bb = target; - - dest_ty + ); + bb.statements.insert( + 1, + Statement { + source_info, + kind: StatementKind::Assign(Box::new(( + Place::from(cor_ref_local), + Rvalue::CopyForDeref(impl_ref_place(proxy_ref_local)), + ))), + }, + ); } + body } diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index 4ac3a268c9c33..89d2ce5a81b81 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -373,9 +373,12 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> { ); } } - TerminatorKind::Drop { target, unwind, .. } => { + TerminatorKind::Drop { target, unwind, drop, .. } => { self.check_edge(location, *target, EdgeKind::Normal); self.check_unwind_edge(location, *unwind); + if let Some(drop) = drop { + self.check_edge(location, *drop, EdgeKind::Normal); + } } TerminatorKind::Call { func, args, .. } | TerminatorKind::TailCall { func, args, .. } => { @@ -745,7 +748,9 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { // since we may be in the process of computing this MIR in the // first place. let layout = if def_id == self.caller_body.source.def_id() { - self.caller_body.coroutine_layout_raw() + self.caller_body + .coroutine_layout_raw() + .or_else(|| self.tcx.coroutine_layout(def_id, args)) } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id) && let ty::ClosureKind::FnOnce = args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap() @@ -755,7 +760,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { // Same if this is the by-move body of a coroutine-closure. self.caller_body.coroutine_layout_raw() } else { - self.tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty()) + self.tcx.coroutine_layout(def_id, args) }; let Some(layout) = layout else { diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index ae31ed5939174..69ecf452fbb49 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -930,14 +930,16 @@ fn visit_instance_use<'tcx>( ty::InstanceKind::ThreadLocalShim(..) => { bug!("{:?} being reified", instance); } - ty::InstanceKind::DropGlue(_, None) | ty::InstanceKind::AsyncDropGlueCtorShim(_, None) => { + ty::InstanceKind::DropGlue(_, None) => { // Don't need to emit noop drop glue if we are calling directly. if !is_direct_call { output.push(create_fn_mono_item(tcx, instance, source)); } } ty::InstanceKind::DropGlue(_, Some(_)) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, Some(_)) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::AsyncDropGlue(_, _) + | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) | ty::InstanceKind::VTableShim(..) | ty::InstanceKind::ReifyShim(..) | ty::InstanceKind::ClosureOnceShim { .. } diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index d826d03918e08..21c54c914a62c 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -643,6 +643,8 @@ fn characteristic_def_id_of_mono_item<'tcx>( | ty::InstanceKind::CloneShim(..) | ty::InstanceKind::ThreadLocalShim(..) | ty::InstanceKind::FnPtrAddrShim(..) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::AsyncDropGlue(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) => return None, }; @@ -795,7 +797,9 @@ fn mono_item_visibility<'tcx>( let def_id = match instance.def { InstanceKind::Item(def_id) | InstanceKind::DropGlue(def_id, Some(_)) - | InstanceKind::AsyncDropGlueCtorShim(def_id, Some(_)) => def_id, + | InstanceKind::FutureDropPollShim(def_id, _, _) + | InstanceKind::AsyncDropGlue(def_id, _) + | InstanceKind::AsyncDropGlueCtorShim(def_id, _) => def_id, // We match the visibility of statics here InstanceKind::ThreadLocalShim(def_id) => { @@ -811,7 +815,6 @@ fn mono_item_visibility<'tcx>( | InstanceKind::ClosureOnceShim { .. } | InstanceKind::ConstructCoroutineInClosureShim { .. } | InstanceKind::DropGlue(..) - | InstanceKind::AsyncDropGlueCtorShim(..) | InstanceKind::CloneShim(..) | InstanceKind::FnPtrAddrShim(..) => return Visibility::Hidden, }; diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index d0b01b14d6358..0f3961870cd8d 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -252,11 +252,6 @@ where goal: Goal, ) -> Result, NoSolution>; - fn consider_builtin_async_destruct_candidate( - ecx: &mut EvalCtxt<'_, D>, - goal: Goal, - ) -> Result, NoSolution>; - fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, @@ -469,9 +464,6 @@ where Some(TraitSolverLangItem::DiscriminantKind) => { G::consider_builtin_discriminant_kind_candidate(self, goal) } - Some(TraitSolverLangItem::AsyncDestruct) => { - G::consider_builtin_async_destruct_candidate(self, goal) - } Some(TraitSolverLangItem::Destruct) => { G::consider_builtin_destruct_candidate(self, goal) } diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs index fb53e778e6030..f83ac302b186b 100644 --- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs @@ -336,13 +336,6 @@ where unreachable!("DiscriminantKind is not const") } - fn consider_builtin_async_destruct_candidate( - _ecx: &mut EvalCtxt<'_, D>, - _goal: Goal, - ) -> Result, NoSolution> { - unreachable!("AsyncDestruct is not const") - } - fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs index c8ea0b119ffbd..ec7b5f6e1a8c7 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs @@ -871,64 +871,6 @@ where }) } - fn consider_builtin_async_destruct_candidate( - ecx: &mut EvalCtxt<'_, D>, - goal: Goal, - ) -> Result, NoSolution> { - let self_ty = goal.predicate.self_ty(); - let async_destructor_ty = match self_ty.kind() { - ty::Bool - | ty::Char - | ty::Int(..) - | ty::Uint(..) - | ty::Float(..) - | ty::Array(..) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Infer(ty::IntVar(..) | ty::FloatVar(..)) - | ty::Never - | ty::Adt(_, _) - | ty::Str - | ty::Slice(_) - | ty::Tuple(_) - | ty::Error(_) => self_ty.async_destructor_ty(ecx.cx()), - - ty::UnsafeBinder(_) => { - // FIXME(unsafe_binders): Instantiate the binder with placeholders I guess. - todo!() - } - - // We do not call `Ty::async_destructor_ty` on alias, param, or placeholder - // types, which return `::AsyncDestructor` - // (or ICE in the case of placeholders). Projecting a type to itself - // is never really productive. - ty::Alias(_, _) | ty::Param(_) | ty::Placeholder(..) => { - return Err(NoSolution); - } - - ty::Infer(ty::TyVar(_) | ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) - | ty::Foreign(..) - | ty::Bound(..) => panic!( - "unexpected self ty `{:?}` when normalizing `::AsyncDestructor`", - goal.predicate.self_ty() - ), - - ty::Pat(..) | ty::Dynamic(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) => panic!( - "`consider_builtin_async_destruct_candidate` is not yet implemented for type: {self_ty:?}" - ), - }; - - ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| { - ecx.eq(goal.param_env, goal.predicate.term, async_destructor_ty.into()) - .expect("expected goal term to be fully unconstrained"); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) - }) - } - fn consider_builtin_destruct_candidate( _ecx: &mut EvalCtxt<'_, D>, goal: Goal, diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index 46fae5c6fa465..99a7e23ecac89 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -569,19 +569,6 @@ where .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) } - fn consider_builtin_async_destruct_candidate( - ecx: &mut EvalCtxt<'_, D>, - goal: Goal, - ) -> Result, NoSolution> { - if goal.predicate.polarity != ty::PredicatePolarity::Positive { - return Err(NoSolution); - } - - // `AsyncDestruct` is automatically implemented for every type. - ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc) - .enter(|ecx| ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)) - } - fn consider_builtin_destruct_candidate( ecx: &mut EvalCtxt<'_, D>, goal: Goal, diff --git a/compiler/rustc_passes/src/lang_items.rs b/compiler/rustc_passes/src/lang_items.rs index fe1140d893e37..5cf4761768c48 100644 --- a/compiler/rustc_passes/src/lang_items.rs +++ b/compiler/rustc_passes/src/lang_items.rs @@ -57,6 +57,7 @@ impl<'ast, 'tcx> LanguageItemCollector<'ast, 'tcx> { &mut self, actual_target: Target, def_id: LocalDefId, + cor_def_id: Option, attrs: &'ast [ast::Attribute], item_span: Span, generics: Option<&'ast ast::Generics>, @@ -73,6 +74,18 @@ impl<'ast, 'tcx> LanguageItemCollector<'ast, 'tcx> { generics, actual_target, ); + // We need to register LangItem::AsyncDropInPlacePoll + // for async_drop_in_place::{closure} + if cor_def_id.is_some() && lang_item == LangItem::AsyncDropInPlace { + self.collect_item_extended( + LangItem::AsyncDropInPlacePoll, + cor_def_id.unwrap(), + item_span, + attr_span, + generics, + actual_target, + ); + } } // Known lang item with attribute on incorrect target. Some(lang_item) => { @@ -288,10 +301,18 @@ impl<'ast, 'tcx> visit::Visitor<'ast> for LanguageItemCollector<'ast, 'tcx> { unreachable!("macros should have been expanded") } }; + let cor_def_id = if let ast::ItemKind::Fn(box ast::Fn { sig, .. }) = &i.kind + && let Some(kind) = sig.header.coroutine_kind + { + self.resolver.node_id_to_def_id.get(&kind.closure_id()).copied() + } else { + None + }; self.check_for_lang( target, self.resolver.node_id_to_def_id[&i.id], + cor_def_id, &i.attrs, i.span, i.opt_generics(), @@ -307,6 +328,7 @@ impl<'ast, 'tcx> visit::Visitor<'ast> for LanguageItemCollector<'ast, 'tcx> { self.check_for_lang( Target::Variant, self.resolver.node_id_to_def_id[&variant.id], + None, &variant.attrs, variant.span, None, @@ -349,6 +371,7 @@ impl<'ast, 'tcx> visit::Visitor<'ast> for LanguageItemCollector<'ast, 'tcx> { self.check_for_lang( target, self.resolver.node_id_to_def_id[&i.id], + None, &i.attrs, i.span, generics, diff --git a/compiler/rustc_passes/src/lib.rs b/compiler/rustc_passes/src/lib.rs index 1aa077ad2bb57..a15fe82fe9ae0 100644 --- a/compiler/rustc_passes/src/lib.rs +++ b/compiler/rustc_passes/src/lib.rs @@ -8,6 +8,7 @@ #![allow(internal_features)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(box_patterns)] #![feature(let_chains)] #![feature(map_try_insert)] #![feature(rustdoc_internals)] diff --git a/compiler/rustc_smir/src/rustc_smir/context.rs b/compiler/rustc_smir/src/rustc_smir/context.rs index cdc56782a265f..ee2480f045afc 100644 --- a/compiler/rustc_smir/src/rustc_smir/context.rs +++ b/compiler/rustc_smir/src/rustc_smir/context.rs @@ -619,12 +619,6 @@ impl<'tcx> Context for TablesWrapper<'tcx> { matches!(instance.def, ty::InstanceKind::DropGlue(_, None)) } - fn is_empty_async_drop_ctor_shim(&self, def: InstanceDef) -> bool { - let tables = self.0.borrow_mut(); - let instance = tables.instances[def]; - matches!(instance.def, ty::InstanceKind::AsyncDropGlueCtorShim(_, None)) - } - fn mono_instance(&self, def_id: stable_mir::DefId) -> stable_mir::mir::mono::Instance { let mut tables = self.0.borrow_mut(); let def_id = tables[def_id]; diff --git a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs index bdd6e16a7c171..f0c0e16a918eb 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/mir.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/mir.rs @@ -493,6 +493,9 @@ impl<'tcx> Stable<'tcx> for mir::AssertMessage<'tcx> { AssertKind::ResumedAfterPanic(coroutine) => { stable_mir::mir::AssertMessage::ResumedAfterPanic(coroutine.stable(tables)) } + AssertKind::ResumedAfterDrop(coroutine) => { + stable_mir::mir::AssertMessage::ResumedAfterDrop(coroutine.stable(tables)) + } AssertKind::MisalignedPointerDereference { required, found } => { stable_mir::mir::AssertMessage::MisalignedPointerDereference { required: required.stable(tables), @@ -647,13 +650,18 @@ impl<'tcx> Stable<'tcx> for mir::TerminatorKind<'tcx> { mir::TerminatorKind::UnwindTerminate(_) => TerminatorKind::Abort, mir::TerminatorKind::Return => TerminatorKind::Return, mir::TerminatorKind::Unreachable => TerminatorKind::Unreachable, - mir::TerminatorKind::Drop { place, target, unwind, replace: _ } => { - TerminatorKind::Drop { - place: place.stable(tables), - target: target.as_usize(), - unwind: unwind.stable(tables), - } - } + mir::TerminatorKind::Drop { + place, + target, + unwind, + replace: _, + drop: _, + async_fut: _, + } => TerminatorKind::Drop { + place: place.stable(tables), + target: target.as_usize(), + unwind: unwind.stable(tables), + }, mir::TerminatorKind::Call { func, args, diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index a0faf20c79a67..2391e0d7e7727 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -811,6 +811,8 @@ impl<'tcx> Stable<'tcx> for ty::Instance<'tcx> { | ty::InstanceKind::DropGlue(..) | ty::InstanceKind::CloneShim(..) | ty::InstanceKind::FnPtrShim(..) + | ty::InstanceKind::FutureDropPollShim(..) + | ty::InstanceKind::AsyncDropGlue(..) | ty::InstanceKind::AsyncDropGlueCtorShim(..) => { stable_mir::mir::mono::InstanceKind::Shim } diff --git a/compiler/rustc_span/src/def_id.rs b/compiler/rustc_span/src/def_id.rs index f61ce37131eb4..029a384799569 100644 --- a/compiler/rustc_span/src/def_id.rs +++ b/compiler/rustc_span/src/def_id.rs @@ -298,6 +298,20 @@ impl DefId { } } + // For templated coroutine we have non-local async_drop_in_place::coroutine def_id + #[inline] + #[track_caller] + pub fn expect_local_or_templated(self, is_templated: F) -> DefId + where + F: FnOnce(DefId) -> bool, + { + assert!( + self.is_local() || is_templated(self), + "DefId::expect_local_or_templated: `{self:?}` isn't local or templated" + ); + self + } + #[inline] pub fn is_crate_root(self) -> bool { self.index == CRATE_DEF_INDEX diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index c819d43323583..9a8d837a08dc0 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -469,17 +469,9 @@ symbols! { async_call_mut, async_call_once, async_closure, - async_destruct, async_drop, - async_drop_chain, - async_drop_defer, - async_drop_deferred_drop_in_place, - async_drop_either, - async_drop_fuse, async_drop_in_place, - async_drop_noop, - async_drop_slice, - async_drop_surface_drop_in_place, + async_drop_in_place_poll, async_fn, async_fn_in_dyn_trait, async_fn_in_trait, @@ -928,7 +920,6 @@ symbols! { fadd_fast, fake_variadic, fallback, - fallback_surface_drop, fdiv_algebraic, fdiv_fast, feature, @@ -1008,6 +999,7 @@ symbols! { fundamental, fused_iterator, future, + future_drop_poll, future_output, future_trait, gdb_script_file, @@ -1464,14 +1456,18 @@ symbols! { panic_cannot_unwind, panic_const_add_overflow, panic_const_async_fn_resumed, + panic_const_async_fn_resumed_drop, panic_const_async_fn_resumed_panic, panic_const_async_gen_fn_resumed, + panic_const_async_gen_fn_resumed_drop, panic_const_async_gen_fn_resumed_panic, panic_const_coroutine_resumed, + panic_const_coroutine_resumed_drop, panic_const_coroutine_resumed_panic, panic_const_div_by_zero, panic_const_div_overflow, panic_const_gen_fn_none, + panic_const_gen_fn_none_drop, panic_const_gen_fn_none_panic, panic_const_mul_overflow, panic_const_neg_overflow, @@ -1991,7 +1987,6 @@ symbols! { sub_assign, sub_with_overflow, suggestion, - surface_async_drop_in_place, sym, sync, synthetic, diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index 8ae35572d012d..5d040a5e4ccd9 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -59,10 +59,17 @@ pub(super) fn mangle<'tcx>( .print_def_path( def_id, if let ty::InstanceKind::DropGlue(_, _) - | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) = instance.def + | ty::InstanceKind::AsyncDropGlueCtorShim(_, _) + | ty::InstanceKind::FutureDropPollShim(_, _, _) = instance.def { // Add the name of the dropped type to the symbol name &*instance.args + } else if let ty::InstanceKind::AsyncDropGlue(_, ty) = instance.def { + let ty::Coroutine(_, cor_args) = ty.kind() else { + bug!(); + }; + let drop_ty = cor_args.first().unwrap().expect_ty(); + tcx.mk_args(&[GenericArg::from(drop_ty)]) } else { &[] }, @@ -95,6 +102,10 @@ pub(super) fn mangle<'tcx>( _ => {} } + if let ty::InstanceKind::FutureDropPollShim(..) = instance.def { + let _ = printer.write_str("{{drop-shim}}"); + } + printer.path.finish(hash) } diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index 0ac6f17b97bd0..c66ceccf8bcb1 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -55,11 +55,17 @@ pub(super) fn mangle<'tcx>( ty::InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref: false, .. } => { Some("by_ref") } - + ty::InstanceKind::FutureDropPollShim(_, _, _) => Some("drop"), _ => None, }; - if let Some(shim_kind) = shim_kind { + if let ty::InstanceKind::AsyncDropGlue(_, ty) = instance.def { + let ty::Coroutine(_, cor_args) = ty.kind() else { + bug!(); + }; + let drop_ty = cor_args.first().unwrap().expect_ty(); + cx.print_def_path(def_id, tcx.mk_args(&[GenericArg::from(drop_ty)])).unwrap() + } else if let Some(shim_kind) = shim_kind { cx.path_append_ns(|cx| cx.print_def_path(def_id, args), 'S', 0, shim_kind).unwrap() } else { cx.print_def_path(def_id, args).unwrap() diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index de5aaa5bd9729..e0f64bda623b5 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1065,43 +1065,6 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Infer(..) | ty::Error(_) => false, } - } else if tcx.is_lang_item(trait_ref.def_id, LangItem::AsyncDestruct) { - match self_ty.kind() { - ty::Bool - | ty::Char - | ty::Int(_) - | ty::Uint(_) - | ty::Float(_) - | ty::Adt(..) - | ty::Str - | ty::Array(..) - | ty::Slice(_) - | ty::RawPtr(..) - | ty::Ref(..) - | ty::FnDef(..) - | ty::FnPtr(..) - | ty::UnsafeBinder(_) - | ty::Dynamic(..) - | ty::Closure(..) - | ty::CoroutineClosure(..) - | ty::Coroutine(..) - | ty::CoroutineWitness(..) - | ty::Pat(..) - | ty::Never - | ty::Tuple(..) - | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true, - - // type parameters, opaques, and unnormalized projections don't have - // a known async destructor and may need to be normalized further or rely - // on param env for async destructor projections - ty::Param(_) - | ty::Foreign(_) - | ty::Alias(..) - | ty::Bound(..) - | ty::Placeholder(..) - | ty::Infer(_) - | ty::Error(_) => false, - } } else if tcx.is_lang_item(trait_ref.def_id, LangItem::PointeeTrait) { let tail = selcx.tcx().struct_tail_raw( self_ty, @@ -1559,11 +1522,6 @@ fn confirm_builtin_candidate<'cx, 'tcx>( assert_eq!(discriminant_def_id, item_def_id); (self_ty.discriminant_ty(tcx).into(), PredicateObligations::new()) - } else if tcx.is_lang_item(trait_def_id, LangItem::AsyncDestruct) { - let destructor_def_id = tcx.associated_item_def_ids(trait_def_id)[0]; - assert_eq!(destructor_def_id, item_def_id); - - (self_ty.async_destructor_ty(tcx).into(), PredicateObligations::new()) } else if tcx.is_lang_item(trait_def_id, LangItem::PointeeTrait) { let metadata_def_id = tcx.require_lang_item(LangItem::Metadata, None); assert_eq!(metadata_def_id, item_def_id); diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index e495bdbf7825c..62e248d0613c5 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -79,9 +79,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } else if tcx.is_lang_item(def_id, LangItem::DiscriminantKind) { // `DiscriminantKind` is automatically implemented for every type. candidates.vec.push(BuiltinCandidate { has_nested: false }); - } else if tcx.is_lang_item(def_id, LangItem::AsyncDestruct) { - // `AsyncDestruct` is automatically implemented for every type. - candidates.vec.push(BuiltinCandidate { has_nested: false }); } else if tcx.is_lang_item(def_id, LangItem::PointeeTrait) { // `Pointee` is automatically implemented for every type. candidates.vec.push(BuiltinCandidate { has_nested: false }); diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index 169f3a78c26a7..0bca16d5a5ef2 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -591,8 +591,10 @@ fn fn_abi_new_uncached<'tcx>( extra_args }; - let is_drop_in_place = - fn_def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::DropInPlace)); + let is_drop_in_place = fn_def_id.is_some_and(|def_id| { + tcx.is_lang_item(def_id, LangItem::DropInPlace) + || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace) + }); let arg_of = |ty: Ty<'tcx>, arg_idx: Option| -> Result<_, &'tcx FnAbiError<'tcx>> { let span = tracing::debug_span!("arg_of"); diff --git a/compiler/rustc_ty_utils/src/common_traits.rs b/compiler/rustc_ty_utils/src/common_traits.rs index 2157ab3c4022c..9e0217c6ad479 100644 --- a/compiler/rustc_ty_utils/src/common_traits.rs +++ b/compiler/rustc_ty_utils/src/common_traits.rs @@ -22,6 +22,13 @@ fn is_unpin_raw<'tcx>(tcx: TyCtxt<'tcx>, query: ty::PseudoCanonicalInput<'tcx, T is_item_raw(tcx, query, LangItem::Unpin) } +fn is_async_drop_raw<'tcx>( + tcx: TyCtxt<'tcx>, + query: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>, +) -> bool { + is_item_raw(tcx, query, LangItem::AsyncDrop) +} + fn is_item_raw<'tcx>( tcx: TyCtxt<'tcx>, query: ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>, @@ -33,5 +40,12 @@ fn is_item_raw<'tcx>( } pub(crate) fn provide(providers: &mut Providers) { - *providers = Providers { is_copy_raw, is_sized_raw, is_freeze_raw, is_unpin_raw, ..*providers }; + *providers = Providers { + is_copy_raw, + is_sized_raw, + is_freeze_raw, + is_unpin_raw, + is_async_drop_raw, + ..*providers + }; } diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index 8c6e3c86bf5c8..e2e5b1ac7dcd8 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -5,7 +5,6 @@ use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::bug; use rustc_middle::query::Providers; use rustc_middle::traits::{BuiltinImplSource, CodegenObligationError}; -use rustc_middle::ty::util::AsyncDropGlueMorphology; use rustc_middle::ty::{ self, GenericArgsRef, Instance, PseudoCanonicalInput, TyCtxt, TypeVisitableExt, }; @@ -42,20 +41,26 @@ fn resolve_instance_raw<'tcx>( if ty.needs_drop(tcx, typing_env) { debug!(" => nontrivial drop glue"); match *ty.kind() { + ty::Coroutine(coroutine_def_id, ..) => { + // FIXME: sync drop of coroutine with async drop (generate both versions?) + // Currently just ignored + if tcx.optimized_mir(coroutine_def_id).coroutine_drop_async().is_some() { + ty::InstanceKind::DropGlue(def_id, None) + } else { + ty::InstanceKind::DropGlue(def_id, Some(ty)) + } + } ty::Closure(..) | ty::CoroutineClosure(..) - | ty::Coroutine(..) | ty::Tuple(..) | ty::Adt(..) | ty::Dynamic(..) | ty::Array(..) | ty::Slice(..) - | ty::UnsafeBinder(..) => {} + | ty::UnsafeBinder(..) => ty::InstanceKind::DropGlue(def_id, Some(ty)), // Drop shims can only be built from ADTs. _ => return Ok(None), } - - ty::InstanceKind::DropGlue(def_id, Some(ty)) } else { debug!(" => trivial drop glue"); ty::InstanceKind::DropGlue(def_id, None) @@ -63,7 +68,7 @@ fn resolve_instance_raw<'tcx>( } else if tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace) { let ty = args.type_at(0); - if ty.async_drop_glue_morphology(tcx) != AsyncDropGlueMorphology::Noop { + if ty.needs_async_drop(tcx, typing_env) { match *ty.kind() { ty::Closure(..) | ty::CoroutineClosure(..) @@ -77,11 +82,14 @@ fn resolve_instance_raw<'tcx>( _ => return Ok(None), } debug!(" => nontrivial async drop glue ctor"); - ty::InstanceKind::AsyncDropGlueCtorShim(def_id, Some(ty)) + ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) } else { debug!(" => trivial async drop glue ctor"); - ty::InstanceKind::AsyncDropGlueCtorShim(def_id, None) + ty::InstanceKind::AsyncDropGlueCtorShim(def_id, ty) } + } else if tcx.is_lang_item(def_id, LangItem::AsyncDropInPlacePoll) { + let ty = args.type_at(0); + ty::InstanceKind::AsyncDropGlue(def_id, ty) } else { debug!(" => free item"); ty::InstanceKind::Item(def_id) diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index ce8dc3a2515d4..2052f41cb2062 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -1,13 +1,14 @@ use std::fmt::Debug; use std::iter; +use std::ops::Bound; use hir::def_id::DefId; -use rustc_abi::Integer::{I8, I32}; +use rustc_abi::Integer::{I8, I32, I128}; use rustc_abi::Primitive::{self, Float, Int, Pointer}; use rustc_abi::{ AbiAndPrefAlign, AddressSpace, Align, BackendRepr, FIRST_VARIANT, FieldIdx, FieldsShape, - HasDataLayout, Layout, LayoutCalculatorError, LayoutData, Niche, ReprOptions, Scalar, Size, - StructKind, TagEncoding, VariantIdx, Variants, WrappingRange, + HasDataLayout, Layout, LayoutCalculatorError, LayoutData, Niche, ReprFlags, ReprOptions, + Scalar, Size, StructKind, TagEncoding, VariantIdx, Variants, WrappingRange, }; use rustc_index::bit_set::DenseBitSet; use rustc_index::{IndexSlice, IndexVec}; @@ -185,6 +186,59 @@ fn layout_of_uncached<'tcx>( } let tcx = cx.tcx(); + + // layout of `async_drop_in_place::{closure}` in case, + // when T is a coroutine, contains this internal coroutine's ref + if let ty::Coroutine(cor_def, cor_args) = ty.kind() + && tcx.is_templated_coroutine(*cor_def) + { + let arg_cor_ty = cor_args.first().unwrap().expect_ty(); + if arg_cor_ty.is_coroutine() { + fn find_impl_coroutine<'tcx>(tcx: TyCtxt<'tcx>, mut cor_ty: Ty<'tcx>) -> Ty<'tcx> { + let mut ty = cor_ty; + loop { + if let ty::Coroutine(def_id, args) = ty.kind() { + cor_ty = ty; + if tcx.is_templated_coroutine(*def_id) { + ty = args.first().unwrap().expect_ty(); + continue; + } else { + return cor_ty; + } + } else { + return cor_ty; + } + } + } + let repr = ReprOptions { + int: None, + align: None, + pack: None, + flags: ReprFlags::empty(), + field_shuffle_seed: 0, + }; + let impl_cor = find_impl_coroutine(tcx, arg_cor_ty); + let impl_ref = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, impl_cor); + let ref_layout = cx.layout_of(impl_ref)?.layout; + let variants: IndexVec>> = + [IndexVec::from([ref_layout])].into(); + let Ok(layout) = cx.calc.layout_of_struct_or_enum( + &repr, + &variants, + false, + false, + (Bound::Unbounded, Bound::Unbounded), + |_, _| (I128, false), + None::<(VariantIdx, i128)>.into_iter(), + false, + true, + ) else { + return Err(error(cx, LayoutError::SizeOverflow(ty))); + }; + return Ok(tcx.mk_layout(layout)); + } + } + let dl = cx.data_layout(); let scalar_unit = |value: Primitive| { let size = value.size(dl); @@ -871,9 +925,18 @@ fn coroutine_layout<'tcx>( ) -> Result, &'tcx LayoutError<'tcx>> { use SavedLocalEligibility::*; let tcx = cx.tcx(); + let layout = if tcx.is_templated_coroutine(def_id) { + // layout of `async_drop_in_place::{closure}` in case, + // when T is a coroutine, contains this internal coroutine's ref + // and must be proceed above, in layout_of_uncached + assert!(!args.first().unwrap().expect_ty().is_coroutine()); + tcx.templated_coroutine_layout(ty) + } else { + tcx.ordinary_coroutine_layout(def_id, args.as_coroutine().kind_ty()) + }; let instantiate_field = |ty: Ty<'tcx>| EarlyBinder::bind(ty).instantiate(tcx, args); - let Some(info) = tcx.coroutine_layout(def_id, args.as_coroutine().kind_ty()) else { + let Some(info) = layout else { return Err(error(cx, LayoutError::Unknown(ty))); }; let (ineligible_locals, assignments) = coroutine_saved_local_eligibility(info); @@ -1211,7 +1274,7 @@ fn variant_info_for_coroutine<'tcx>( return (vec![], None); }; - let coroutine = cx.tcx().coroutine_layout(def_id, args.as_coroutine().kind_ty()).unwrap(); + let coroutine = cx.tcx().coroutine_layout(def_id, args).unwrap(); let upvar_names = cx.tcx().closure_saved_names_of_captured_variables(def_id); let mut upvars_size = Size::ZERO; diff --git a/compiler/rustc_ty_utils/src/needs_drop.rs b/compiler/rustc_ty_utils/src/needs_drop.rs index 80de7e20951a7..4deeb4d01fb4a 100644 --- a/compiler/rustc_ty_utils/src/needs_drop.rs +++ b/compiler/rustc_ty_utils/src/needs_drop.rs @@ -42,11 +42,11 @@ fn needs_async_drop_raw<'tcx>( let adt_has_async_dtor = |adt_def: ty::AdtDef<'tcx>| adt_def.async_destructor(tcx).map(|_| DtorType::Significant); let res = drop_tys_helper(tcx, query.value, query.typing_env, adt_has_async_dtor, false, false) - .filter(filter_array_elements(tcx, query.typing_env)) + .filter(filter_array_elements_async(tcx, query.typing_env)) .next() .is_some(); - debug!("needs_drop_raw({:?}) = {:?}", query, res); + debug!("needs_async_drop_raw({:?}) = {:?}", query, res); res } @@ -66,6 +66,18 @@ fn filter_array_elements<'tcx>( Err(AlwaysRequiresDrop) => true, } } +fn filter_array_elements_async<'tcx>( + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, +) -> impl Fn(&Result, AlwaysRequiresDrop>) -> bool { + move |ty| match ty { + Ok(ty) => match *ty.kind() { + ty::Array(elem, _) => tcx.needs_async_drop_raw(typing_env.as_query_input(elem)), + _ => true, + }, + Err(AlwaysRequiresDrop) => true, + } +} fn has_significant_drop_raw<'tcx>( tcx: TyCtxt<'tcx>, @@ -414,6 +426,27 @@ fn adt_drop_tys<'tcx>( .collect::, _>>() .map(|components| tcx.mk_type_list(&components)) } + +fn adt_async_drop_tys<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, +) -> Result<&'tcx ty::List>, AlwaysRequiresDrop> { + // This is for the "adt_async_drop_tys" query, that considers all `AsyncDrop` impls. + let adt_has_dtor = + |adt_def: ty::AdtDef<'tcx>| adt_def.async_destructor(tcx).map(|_| DtorType::Significant); + // `tcx.type_of(def_id)` identical to `tcx.make_adt(def, identity_args)` + drop_tys_helper( + tcx, + tcx.type_of(def_id).instantiate_identity(), + ty::TypingEnv::non_body_analysis(tcx, def_id), + adt_has_dtor, + false, + false, + ) + .collect::, _>>() + .map(|components| tcx.mk_type_list(&components)) +} + // If `def_id` refers to a generic ADT, the queries above and below act as if they had been handed // a `tcx.make_ty(def, identity_args)` and as such it is legal to instantiate the generic parameters // of the ADT into the outputted `ty`s. @@ -458,6 +491,7 @@ pub(crate) fn provide(providers: &mut Providers) { needs_async_drop_raw, has_significant_drop_raw, adt_drop_tys, + adt_async_drop_tys, adt_significant_drop_tys, list_significant_drop_tys, ..*providers diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 4e6d645e6fae2..78b17851ae393 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -163,8 +163,6 @@ pub trait Ty>: fn discriminant_ty(self, interner: I) -> I::Ty; - fn async_destructor_ty(self, interner: I) -> I::Ty; - /// Returns `true` when the outermost type cannot be further normalized, /// resolved, or instantiated. This includes all primitive types, but also /// things like ADTs and trait objects, since even if their arguments or diff --git a/compiler/rustc_type_ir/src/lang_items.rs b/compiler/rustc_type_ir/src/lang_items.rs index eeb80bc3ab420..0b2bb73cc9275 100644 --- a/compiler/rustc_type_ir/src/lang_items.rs +++ b/compiler/rustc_type_ir/src/lang_items.rs @@ -2,7 +2,6 @@ /// representation of `LangItem`s used in the underlying compiler implementation. pub enum TraitSolverLangItem { // tidy-alphabetical-start - AsyncDestruct, AsyncFn, AsyncFnKindHelper, AsyncFnKindUpvars, diff --git a/compiler/stable_mir/src/compiler_interface.rs b/compiler/stable_mir/src/compiler_interface.rs index a6f7c254583e2..d3717768213a1 100644 --- a/compiler/stable_mir/src/compiler_interface.rs +++ b/compiler/stable_mir/src/compiler_interface.rs @@ -184,9 +184,6 @@ pub trait Context { /// Check if this is an empty DropGlue shim. fn is_empty_drop_shim(&self, def: InstanceDef) -> bool; - /// Check if this is an empty AsyncDropGlueCtor shim. - fn is_empty_async_drop_ctor_shim(&self, def: InstanceDef) -> bool; - /// Convert a non-generic crate item into an instance. /// This function will panic if the item is generic. fn mono_instance(&self, def_id: DefId) -> Instance; diff --git a/compiler/stable_mir/src/mir/body.rs b/compiler/stable_mir/src/mir/body.rs index bc2e427f50cb0..627be43b268aa 100644 --- a/compiler/stable_mir/src/mir/body.rs +++ b/compiler/stable_mir/src/mir/body.rs @@ -250,6 +250,7 @@ pub enum AssertMessage { RemainderByZero(Operand), ResumedAfterReturn(CoroutineKind), ResumedAfterPanic(CoroutineKind), + ResumedAfterDrop(CoroutineKind), MisalignedPointerDereference { required: Operand, found: Operand }, NullPointerDereference, } @@ -303,6 +304,22 @@ impl AssertMessage { _, )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after panicking"), + AssertMessage::ResumedAfterDrop(CoroutineKind::Coroutine(_)) => { + Ok("coroutine resumed after async drop") + } + AssertMessage::ResumedAfterDrop(CoroutineKind::Desugared( + CoroutineDesugaring::Async, + _, + )) => Ok("`async fn` resumed after async drop"), + AssertMessage::ResumedAfterDrop(CoroutineKind::Desugared( + CoroutineDesugaring::Gen, + _, + )) => Ok("`async gen fn` resumed after async drop"), + AssertMessage::ResumedAfterDrop(CoroutineKind::Desugared( + CoroutineDesugaring::AsyncGen, + _, + )) => Ok("`gen fn` should just keep returning `AssertMessage::None` after async drop"), + AssertMessage::BoundsCheck { .. } => Ok("index out of bounds"), AssertMessage::MisalignedPointerDereference { .. } => { Ok("misaligned pointer dereference") diff --git a/compiler/stable_mir/src/mir/mono.rs b/compiler/stable_mir/src/mir/mono.rs index 22507a49411f9..b34a35c5a9b58 100644 --- a/compiler/stable_mir/src/mir/mono.rs +++ b/compiler/stable_mir/src/mir/mono.rs @@ -162,10 +162,7 @@ impl Instance { /// When generating code for a Drop terminator, users can ignore an empty drop glue. /// These shims are only needed to generate a valid Drop call done via VTable. pub fn is_empty_shim(&self) -> bool { - self.kind == InstanceKind::Shim - && with(|cx| { - cx.is_empty_drop_shim(self.def) || cx.is_empty_async_drop_ctor_shim(self.def) - }) + self.kind == InstanceKind::Shim && with(|cx| cx.is_empty_drop_shim(self.def)) } /// Try to constant evaluate the instance into a constant with the given type. diff --git a/compiler/stable_mir/src/mir/pretty.rs b/compiler/stable_mir/src/mir/pretty.rs index 8278afb7a2f17..44ac51b11ed27 100644 --- a/compiler/stable_mir/src/mir/pretty.rs +++ b/compiler/stable_mir/src/mir/pretty.rs @@ -301,7 +301,9 @@ fn pretty_assert_message(writer: &mut W, msg: &AssertMessage) -> io::R AssertMessage::NullPointerDereference => { write!(writer, "\"null pointer dereference occurred\"") } - AssertMessage::ResumedAfterReturn(_) | AssertMessage::ResumedAfterPanic(_) => { + AssertMessage::ResumedAfterReturn(_) + | AssertMessage::ResumedAfterPanic(_) + | AssertMessage::ResumedAfterDrop(_) => { write!(writer, "{}", msg.description().unwrap()) } } diff --git a/compiler/stable_mir/src/mir/visit.rs b/compiler/stable_mir/src/mir/visit.rs index d985a98fcbf01..021a950297e95 100644 --- a/compiler/stable_mir/src/mir/visit.rs +++ b/compiler/stable_mir/src/mir/visit.rs @@ -428,8 +428,8 @@ pub trait MirVisitor { } AssertMessage::ResumedAfterReturn(_) | AssertMessage::ResumedAfterPanic(_) - | AssertMessage::NullPointerDereference => { - //nothing to visit + | AssertMessage::NullPointerDereference + | AssertMessage::ResumedAfterDrop(_) => { //nothing to visit } AssertMessage::MisalignedPointerDereference { required, found } => { self.visit_operand(required, location); diff --git a/library/core/src/future/async_drop.rs b/library/core/src/future/async_drop.rs index f1778a4d782af..fc4f95a98b42d 100644 --- a/library/core/src/future/async_drop.rs +++ b/library/core/src/future/async_drop.rs @@ -1,284 +1,51 @@ #![unstable(feature = "async_drop", issue = "126482")] -use crate::fmt; -use crate::future::{Future, IntoFuture}; -use crate::intrinsics::discriminant_value; -use crate::marker::{DiscriminantKind, PhantomPinned}; -use crate::mem::MaybeUninit; -use crate::pin::Pin; -use crate::task::{Context, Poll, ready}; - -/// Asynchronously drops a value by running `AsyncDrop::async_drop` -/// on a value and its fields recursively. -#[unstable(feature = "async_drop", issue = "126482")] -pub fn async_drop(value: T) -> AsyncDropOwning { - AsyncDropOwning { value: MaybeUninit::new(value), dtor: None, _pinned: PhantomPinned } -} - -/// A future returned by the [`async_drop`]. -#[unstable(feature = "async_drop", issue = "126482")] -pub struct AsyncDropOwning { - value: MaybeUninit, - dtor: Option>, - _pinned: PhantomPinned, -} - -#[unstable(feature = "async_drop", issue = "126482")] -impl fmt::Debug for AsyncDropOwning { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("AsyncDropOwning").finish_non_exhaustive() - } -} - -#[unstable(feature = "async_drop", issue = "126482")] -impl Future for AsyncDropOwning { - type Output = (); - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // SAFETY: Self is pinned thus it is ok to store references to self - unsafe { - let this = self.get_unchecked_mut(); - let dtor = Pin::new_unchecked( - this.dtor.get_or_insert_with(|| async_drop_in_place(this.value.as_mut_ptr())), - ); - // AsyncDestuctors are idempotent so Self gets idempotency as well - dtor.poll(cx) - } - } -} +#[allow(unused_imports)] +use core::future::Future; -#[lang = "async_drop_in_place"] -#[allow(unconditional_recursion)] -// FIXME: Consider if `#[rustc_diagnostic_item = "ptr_drop_in_place"]` is needed? -unsafe fn async_drop_in_place_raw( - to_drop: *mut T, -) -> ::AsyncDestructor { - // Code here does not matter - this is replaced by the - // real async drop glue constructor by the compiler. - - // SAFETY: see comment above - unsafe { async_drop_in_place_raw(to_drop) } -} +#[allow(unused_imports)] +use crate::pin::Pin; +#[allow(unused_imports)] +use crate::task::{Context, Poll}; -/// Creates the asynchronous destructor of the pointed-to value. -/// -/// # Safety -/// -/// Behavior is undefined if any of the following conditions are violated: -/// -/// * `to_drop` must be [valid](crate::ptr#safety) for both reads and writes. -/// -/// * `to_drop` must be properly aligned, even if `T` has size 0. +/// Async version of Drop trait. /// -/// * `to_drop` must be nonnull, even if `T` has size 0. +/// When a value is no longer needed, Rust will run a "destructor" on that value. +/// The most common way that a value is no longer needed is when it goes out of +/// scope. Destructors may still run in other circumstances, but we're going to +/// focus on scope for the examples here. To learn about some of those other cases, +/// please see [the reference] section on destructors. /// -/// * The value `to_drop` points to must be valid for async dropping, -/// which may mean it must uphold additional invariants. These -/// invariants depend on the type of the value being dropped. For -/// instance, when dropping a Box, the box's pointer to the heap must -/// be valid. +/// [the reference]: https://doc.rust-lang.org/reference/destructors.html /// -/// * While `async_drop_in_place` is executing or the returned async -/// destructor is alive, the only way to access parts of `to_drop` -/// is through the `self: Pin<&mut Self>` references supplied to -/// the `AsyncDrop::async_drop` methods that `async_drop_in_place` -/// or `AsyncDropInPlace::poll` invokes. This usually means the -/// returned future stores the `to_drop` pointer and user is required -/// to guarantee that dropped value doesn't move. +/// ## `Copy` and ([`Drop`]|`AsyncDrop`) are exclusive /// -#[unstable(feature = "async_drop", issue = "126482")] -pub unsafe fn async_drop_in_place(to_drop: *mut T) -> AsyncDropInPlace { - // SAFETY: `async_drop_in_place_raw` has the same safety requirements - unsafe { AsyncDropInPlace(async_drop_in_place_raw(to_drop)) } -} - -/// A future returned by the [`async_drop_in_place`]. -#[unstable(feature = "async_drop", issue = "126482")] -pub struct AsyncDropInPlace(::AsyncDestructor); - -#[unstable(feature = "async_drop", issue = "126482")] -impl fmt::Debug for AsyncDropInPlace { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("AsyncDropInPlace").finish_non_exhaustive() - } -} - -#[unstable(feature = "async_drop", issue = "126482")] -impl Future for AsyncDropInPlace { - type Output = (); - - #[inline(always)] - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // SAFETY: This code simply forwards poll call to the inner future - unsafe { Pin::new_unchecked(&mut self.get_unchecked_mut().0) }.poll(cx) - } -} - -// FIXME(zetanumbers): Add same restrictions on AsyncDrop impls as -// with Drop impls -/// Custom code within the asynchronous destructor. +/// You cannot implement both [`Copy`] and ([`Drop`]|`AsyncDrop`) on the same type. Types that +/// are `Copy` get implicitly duplicated by the compiler, making it very +/// hard to predict when, and how often destructors will be executed. As such, +/// these types cannot have destructors. +#[cfg(not(bootstrap))] #[unstable(feature = "async_drop", issue = "126482")] #[lang = "async_drop"] pub trait AsyncDrop { - /// A future returned by the [`AsyncDrop::async_drop`] to be part - /// of the async destructor. - #[unstable(feature = "async_drop", issue = "126482")] - type Dropper<'a>: Future - where - Self: 'a; - - /// Constructs the asynchronous destructor for this type. - #[unstable(feature = "async_drop", issue = "126482")] - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_>; -} - -#[lang = "async_destruct"] -#[rustc_deny_explicit_impl] -#[rustc_do_not_implement_via_object] -trait AsyncDestruct { - type AsyncDestructor: Future; -} - -/// Basically calls `AsyncDrop::async_drop` with pointer. Used to simplify -/// generation of the code for `async_drop_in_place_raw` -#[lang = "surface_async_drop_in_place"] -async unsafe fn surface_async_drop_in_place(ptr: *mut T) { - // SAFETY: We call this from async drop `async_drop_in_place_raw` - // which has the same safety requirements - unsafe { ::async_drop(Pin::new_unchecked(&mut *ptr)).await } -} - -/// Basically calls `Drop::drop` with pointer. Used to simplify generation -/// of the code for `async_drop_in_place_raw` -#[allow(drop_bounds)] -#[lang = "async_drop_surface_drop_in_place"] -async unsafe fn surface_drop_in_place(ptr: *mut T) { - // SAFETY: We call this from async drop `async_drop_in_place_raw` - // which has the same safety requirements - unsafe { crate::ops::fallback_surface_drop(&mut *ptr) } -} - -/// Wraps a future to continue outputting `Poll::Ready(())` once after -/// wrapped future completes by returning `Poll::Ready(())` on poll. This -/// is useful for constructing async destructors to guarantee this -/// "fuse" property -// -// FIXME: Consider optimizing combinators to not have to use fuse in majority -// of cases, perhaps by adding `#[(rustc_)idempotent(_future)]` attribute for -// async functions and blocks with the unit return type. However current layout -// optimizations currently encode `None` case into the async block's discriminant. -struct Fuse { - inner: Option, -} - -#[lang = "async_drop_fuse"] -fn fuse(inner: T) -> Fuse { - Fuse { inner: Some(inner) } -} - -impl Future for Fuse -where - T: Future, -{ - type Output = (); - - fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - // SAFETY: pin projection into `self.inner` - unsafe { - let this = self.get_unchecked_mut(); - if let Some(inner) = &mut this.inner { - ready!(Pin::new_unchecked(inner).poll(cx)); - this.inner = None; - } - } - Poll::Ready(()) - } -} - -/// Async destructor for arrays and slices. -#[lang = "async_drop_slice"] -async unsafe fn slice(s: *mut [T]) { - let len = s.len(); - let ptr = s.as_mut_ptr(); - for i in 0..len { - // SAFETY: we iterate over elements of `s` slice - unsafe { async_drop_in_place_raw(ptr.add(i)).await } - } -} - -/// Constructs a chain of two futures, which awaits them sequentially as -/// a future. -#[lang = "async_drop_chain"] -async fn chain(first: F, last: G) -where - F: IntoFuture, - G: IntoFuture, -{ - first.await; - last.await; -} - -/// Basically a lazy version of `async_drop_in_place`. Returns a future -/// that would call `AsyncDrop::async_drop` on a first poll. -/// -/// # Safety -/// -/// Same as `async_drop_in_place` except is lazy to avoid creating -/// multiple mutable references. -#[lang = "async_drop_defer"] -async unsafe fn defer(to_drop: *mut T) { - // SAFETY: same safety requirements as `async_drop_in_place` - unsafe { async_drop_in_place(to_drop) }.await -} - -/// If `T`'s discriminant is equal to the stored one then awaits `M` -/// otherwise awaits the `O`. -/// -/// # Safety -/// -/// Users should carefully manage the returned future, since it would -/// try creating an immutable reference from `this` and get pointee's -/// discriminant. -// FIXME(zetanumbers): Send and Sync impls -#[lang = "async_drop_either"] -async unsafe fn either, M: IntoFuture, T>( - other: O, - matched: M, - this: *mut T, - discr: ::Discriminant, -) { - // SAFETY: Guaranteed by the safety section of this funtion's documentation - if unsafe { discriminant_value(&*this) } == discr { - drop(other); - matched.await - } else { - drop(matched); - other.await - } -} - -#[lang = "async_drop_deferred_drop_in_place"] -async unsafe fn deferred_drop_in_place(to_drop: *mut T) { - // SAFETY: same safety requirements as with drop_in_place (implied by - // function's name) - unsafe { crate::ptr::drop_in_place(to_drop) } -} - -/// Used for noop async destructors. We don't use [`core::future::Ready`] -/// because it panics after its second poll, which could be potentially -/// bad if that would happen during the cleanup. -#[derive(Clone, Copy)] -struct Noop; - -#[lang = "async_drop_noop"] -fn noop() -> Noop { - Noop -} - -impl Future for Noop { - type Output = (); - - fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll { - Poll::Ready(()) - } + /// Executes the async destructor for this type. + /// + /// This method is called implicitly when the value goes out of scope, + /// and cannot be called explicitly. + /// + /// When this method has been called, `self` has not yet been deallocated. + /// That only happens after the method is over. + /// + /// # Panics + #[allow(async_fn_in_trait)] + async fn drop(self: Pin<&mut Self>); +} + +/// Async drop. +#[cfg(not(bootstrap))] +#[unstable(feature = "async_drop", issue = "126482")] +#[lang = "async_drop_in_place"] +pub async unsafe fn async_drop_in_place(_to_drop: *mut T) { + // Code here does not matter - this is replaced by the + // real implementation by the compiler. } diff --git a/library/core/src/future/mod.rs b/library/core/src/future/mod.rs index e5a368796ec93..5416754382a3c 100644 --- a/library/core/src/future/mod.rs +++ b/library/core/src/future/mod.rs @@ -20,8 +20,9 @@ mod pending; mod poll_fn; mod ready; +#[cfg(not(bootstrap))] #[unstable(feature = "async_drop", issue = "126482")] -pub use async_drop::{AsyncDrop, AsyncDropInPlace, async_drop, async_drop_in_place}; +pub use async_drop::{AsyncDrop, async_drop_in_place}; #[stable(feature = "into_future", since = "1.64.0")] pub use into_future::IntoFuture; #[stable(feature = "future_readiness_fns", since = "1.48.0")] diff --git a/library/core/src/ops/drop.rs b/library/core/src/ops/drop.rs index e024b7fb4d301..5d040804a8d1c 100644 --- a/library/core/src/ops/drop.rs +++ b/library/core/src/ops/drop.rs @@ -240,10 +240,3 @@ pub trait Drop { #[stable(feature = "rust1", since = "1.0.0")] fn drop(&mut self); } - -/// Fallback function to call surface level `Drop::drop` function -#[allow(drop_bounds)] -#[lang = "fallback_surface_drop"] -pub(crate) fn fallback_surface_drop(x: &mut T) { - ::drop(x) -} diff --git a/library/core/src/ops/mod.rs b/library/core/src/ops/mod.rs index 7b2ced2cc4bdc..62d9689a84a08 100644 --- a/library/core/src/ops/mod.rs +++ b/library/core/src/ops/mod.rs @@ -176,7 +176,6 @@ pub use self::deref::Receiver; pub use self::deref::{Deref, DerefMut}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::drop::Drop; -pub(crate) use self::drop::fallback_surface_drop; #[stable(feature = "rust1", since = "1.0.0")] pub use self::function::{Fn, FnMut, FnOnce}; #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/core/src/panicking.rs b/library/core/src/panicking.rs index d36e677d21a18..98dda86742008 100644 --- a/library/core/src/panicking.rs +++ b/library/core/src/panicking.rs @@ -155,30 +155,26 @@ pub const fn panic(expr: &'static str) -> ! { // reducing binary size impact. macro_rules! panic_const { ($($lang:ident = $message:expr,)+) => { - pub mod panic_const { - use super::*; - - $( - /// This is a panic called with a message that's a result of a MIR-produced Assert. - // - // never inline unless panic_immediate_abort to avoid code - // bloat at the call sites as much as possible - #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)] - #[cfg_attr(feature = "panic_immediate_abort", inline)] - #[track_caller] - #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable - #[lang = stringify!($lang)] - pub const fn $lang() -> ! { - // Use Arguments::new_const instead of format_args!("{expr}") to potentially - // reduce size overhead. The format_args! macro uses str's Display trait to - // write expr, which calls Formatter::pad, which must accommodate string - // truncation and padding (even though none is used here). Using - // Arguments::new_const may allow the compiler to omit Formatter::pad from the - // output binary, saving up to a few kilobytes. - panic_fmt(fmt::Arguments::new_const(&[$message])); - } - )+ - } + $( + /// This is a panic called with a message that's a result of a MIR-produced Assert. + // + // never inline unless panic_immediate_abort to avoid code + // bloat at the call sites as much as possible + #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)] + #[cfg_attr(feature = "panic_immediate_abort", inline)] + #[track_caller] + #[rustc_const_stable_indirect] // must follow stable const rules since it is exposed to stable + #[lang = stringify!($lang)] + pub const fn $lang() -> ! { + // Use Arguments::new_const instead of format_args!("{expr}") to potentially + // reduce size overhead. The format_args! macro uses str's Display trait to + // write expr, which calls Formatter::pad, which must accommodate string + // truncation and padding (even though none is used here). Using + // Arguments::new_const may allow the compiler to omit Formatter::pad from the + // output binary, saving up to a few kilobytes. + panic_fmt(fmt::Arguments::new_const(&[$message])); + } + )+ } } @@ -186,25 +182,37 @@ macro_rules! panic_const { // slightly different forms. It's not clear if there's a good way to deduplicate without adding // special cases to the compiler (e.g., a const generic function wouldn't have a single definition // shared across crates, which is exactly what we want here). -panic_const! { - panic_const_add_overflow = "attempt to add with overflow", - panic_const_sub_overflow = "attempt to subtract with overflow", - panic_const_mul_overflow = "attempt to multiply with overflow", - panic_const_div_overflow = "attempt to divide with overflow", - panic_const_rem_overflow = "attempt to calculate the remainder with overflow", - panic_const_neg_overflow = "attempt to negate with overflow", - panic_const_shr_overflow = "attempt to shift right with overflow", - panic_const_shl_overflow = "attempt to shift left with overflow", - panic_const_div_by_zero = "attempt to divide by zero", - panic_const_rem_by_zero = "attempt to calculate the remainder with a divisor of zero", - panic_const_coroutine_resumed = "coroutine resumed after completion", - panic_const_async_fn_resumed = "`async fn` resumed after completion", - panic_const_async_gen_fn_resumed = "`async gen fn` resumed after completion", - panic_const_gen_fn_none = "`gen fn` should just keep returning `None` after completion", - panic_const_coroutine_resumed_panic = "coroutine resumed after panicking", - panic_const_async_fn_resumed_panic = "`async fn` resumed after panicking", - panic_const_async_gen_fn_resumed_panic = "`async gen fn` resumed after panicking", - panic_const_gen_fn_none_panic = "`gen fn` should just keep returning `None` after panicking", +pub mod panic_const { + use super::*; + panic_const! { + panic_const_add_overflow = "attempt to add with overflow", + panic_const_sub_overflow = "attempt to subtract with overflow", + panic_const_mul_overflow = "attempt to multiply with overflow", + panic_const_div_overflow = "attempt to divide with overflow", + panic_const_rem_overflow = "attempt to calculate the remainder with overflow", + panic_const_neg_overflow = "attempt to negate with overflow", + panic_const_shr_overflow = "attempt to shift right with overflow", + panic_const_shl_overflow = "attempt to shift left with overflow", + panic_const_div_by_zero = "attempt to divide by zero", + panic_const_rem_by_zero = "attempt to calculate the remainder with a divisor of zero", + panic_const_coroutine_resumed = "coroutine resumed after completion", + panic_const_async_fn_resumed = "`async fn` resumed after completion", + panic_const_async_gen_fn_resumed = "`async gen fn` resumed after completion", + panic_const_gen_fn_none = "`gen fn` should just keep returning `None` after completion", + panic_const_coroutine_resumed_panic = "coroutine resumed after panicking", + panic_const_async_fn_resumed_panic = "`async fn` resumed after panicking", + panic_const_async_gen_fn_resumed_panic = "`async gen fn` resumed after panicking", + panic_const_gen_fn_none_panic = "`gen fn` should just keep returning `None` after panicking", + } + // Separated panic constants list for async drop feature + // (May be joined when the corresponding lang items will be in the bootstrap) + #[cfg(not(bootstrap))] + panic_const! { + panic_const_coroutine_resumed_drop = "coroutine resumed after async drop", + panic_const_async_fn_resumed_drop = "`async fn` resumed after async drop", + panic_const_async_gen_fn_resumed_drop = "`async gen fn` resumed after async drop", + panic_const_gen_fn_none_drop = "`gen fn` resumed after async drop", + } } /// Like `panic`, but without unwinding and track_caller to reduce the impact on codesize on the caller. diff --git a/src/tools/miri/tests/pass/async-drop.rs b/src/tools/miri/tests/pass/async-drop.rs index 6d556b77795d9..e7bc8531293c3 100644 --- a/src/tools/miri/tests/pass/async-drop.rs +++ b/src/tools/miri/tests/pass/async-drop.rs @@ -4,16 +4,16 @@ // WARNING: If you would ever want to modify this test, // please consider modifying rustc's async drop test at -// `tests/ui/async-await/async-drop.rs`. +// `tests/ui/async-await/async-drop/async-drop-initial.rs`. -#![feature(async_drop, impl_trait_in_assoc_type)] +#![feature(async_drop, impl_trait_in_assoc_type, noop_waker, async_closure)] #![allow(incomplete_features, dead_code)] // FIXME(zetanumbers): consider AsyncDestruct::async_drop cleanup tests -use core::future::{AsyncDrop, Future, async_drop_in_place}; +use core::future::{async_drop_in_place, AsyncDrop, Future}; use core::hint::black_box; use core::mem::{self, ManuallyDrop}; -use core::pin::{Pin, pin}; +use core::pin::{pin, Pin}; use core::task::{Context, Poll, Waker}; async fn test_async_drop(x: T) { @@ -68,7 +68,8 @@ fn main() { test_async_drop(SyncThenAsync { i: 15, a: AsyncInt(16), b: SyncInt(17), c: AsyncInt(18) }) .await; - let async_drop_fut = pin!(core::future::async_drop(AsyncInt(19))); + let mut ptr19 = mem::MaybeUninit::new(AsyncInt(19)); + let async_drop_fut = pin!(unsafe { async_drop_in_place(ptr19.as_mut_ptr()) }); test_idempotency(async_drop_fut).await; let foo = AsyncInt(20); @@ -89,13 +90,14 @@ fn main() { struct AsyncInt(i32); +impl Drop for AsyncInt { + fn drop(&mut self) { + println!("AsyncInt::drop: {}", self.0); + } +} impl AsyncDrop for AsyncInt { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncInt::Dropper::poll: {}", self.0); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncInt::async_drop: {}", self.0); } } @@ -124,16 +126,14 @@ struct AsyncReference<'a> { foo: &'a AsyncInt, } +impl Drop for AsyncReference<'_> { + fn drop(&mut self) { + println!("AsyncReference::drop: {}", self.foo.0); + } +} impl AsyncDrop for AsyncReference<'_> { - type Dropper<'a> - = impl Future - where - Self: 'a; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncReference::Dropper::poll: {}", self.foo.0); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncReference::async_drop: {}", self.foo.0); } } @@ -145,13 +145,14 @@ struct AsyncStruct { b: AsyncInt, } +impl Drop for AsyncStruct { + fn drop(&mut self) { + println!("AsyncStruct::drop: {}", self.i); + } +} impl AsyncDrop for AsyncStruct { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncStruct::Dropper::poll: {}", self.i); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncStruct::async_drop: {}", self.i); } } @@ -160,23 +161,34 @@ enum AsyncEnum { B(SyncInt), } +impl Drop for AsyncEnum { + fn drop(&mut self) { + let new_self = match self { + AsyncEnum::A(foo) => { + println!("AsyncEnum(A)::drop: {}", foo.0); + AsyncEnum::B(SyncInt(foo.0)) + } + AsyncEnum::B(foo) => { + println!("AsyncEnum(B)::drop: {}", foo.0); + AsyncEnum::A(AsyncInt(foo.0)) + } + }; + mem::forget(mem::replace(&mut *self, new_self)); + } +} impl AsyncDrop for AsyncEnum { - type Dropper<'a> = impl Future; - - fn async_drop(mut self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - let new_self = match &*self { - AsyncEnum::A(foo) => { - println!("AsyncEnum(A)::Dropper::poll: {}", foo.0); - AsyncEnum::B(SyncInt(foo.0)) - } - AsyncEnum::B(foo) => { - println!("AsyncEnum(B)::Dropper::poll: {}", foo.0); - AsyncEnum::A(AsyncInt(foo.0)) - } - }; - mem::forget(mem::replace(&mut *self, new_self)); - } + async fn drop(mut self: Pin<&mut Self>) { + let new_self = match &*self { + AsyncEnum::A(foo) => { + println!("AsyncEnum(A)::async_drop: {}", foo.0); + AsyncEnum::B(SyncInt(foo.0)) + } + AsyncEnum::B(foo) => { + println!("AsyncEnum(B)::async_drop: {}", foo.0); + AsyncEnum::A(AsyncInt(foo.0)) + } + }; + mem::forget(mem::replace(&mut *self, new_self)); } } @@ -186,14 +198,19 @@ union AsyncUnion { unsigned: u32, } +impl Drop for AsyncUnion { + fn drop(&mut self) { + println!( + "AsyncUnion::drop: {}, {}", + unsafe { self.signed }, + unsafe { self.unsigned }, + ); + } +} impl AsyncDrop for AsyncUnion { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncUnion::Dropper::poll: {}, {}", unsafe { self.signed }, unsafe { - self.unsigned - }); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncUnion::async_drop: {}, {}", unsafe { self.signed }, unsafe { + self.unsigned + }); } } diff --git a/src/tools/miri/tests/pass/async-drop.stack.stdout b/src/tools/miri/tests/pass/async-drop.stack.stdout index 9cae4331caf92..fc53df2f1b485 100644 --- a/src/tools/miri/tests/pass/async-drop.stack.stdout +++ b/src/tools/miri/tests/pass/async-drop.stack.stdout @@ -1,22 +1,23 @@ -AsyncInt::Dropper::poll: 0 -AsyncInt::Dropper::poll: 1 -AsyncInt::Dropper::poll: 2 -AsyncInt::Dropper::poll: 3 -AsyncInt::Dropper::poll: 4 -AsyncStruct::Dropper::poll: 6 -AsyncInt::Dropper::poll: 7 -AsyncInt::Dropper::poll: 8 -AsyncReference::Dropper::poll: 10 -AsyncInt::Dropper::poll: 11 -AsyncEnum(A)::Dropper::poll: 12 +AsyncInt::async_drop: 0 +AsyncInt::async_drop: 1 +AsyncInt::async_drop: 2 +AsyncInt::async_drop: 3 +AsyncInt::async_drop: 4 +AsyncStruct::async_drop: 6 +AsyncInt::async_drop: 7 +AsyncInt::async_drop: 8 +AsyncReference::async_drop: 10 +AsyncInt::async_drop: 11 +AsyncEnum(A)::async_drop: 12 SyncInt::drop: 12 -AsyncEnum(B)::Dropper::poll: 13 -AsyncInt::Dropper::poll: 13 +AsyncEnum(B)::async_drop: 13 +AsyncInt::async_drop: 13 SyncInt::drop: 14 SyncThenAsync::drop: 15 -AsyncInt::Dropper::poll: 16 +AsyncInt::async_drop: 16 SyncInt::drop: 17 -AsyncInt::Dropper::poll: 18 -AsyncInt::Dropper::poll: 19 -AsyncInt::Dropper::poll: 20 -AsyncUnion::Dropper::poll: 21, 21 +AsyncInt::async_drop: 18 +AsyncInt::async_drop: 19 +AsyncInt::async_drop: 20 +AsyncUnion::async_drop: 21, 21 +AsyncInt::async_drop: 10 diff --git a/src/tools/miri/tests/pass/async-drop.tree.stdout b/src/tools/miri/tests/pass/async-drop.tree.stdout index 9cae4331caf92..fc53df2f1b485 100644 --- a/src/tools/miri/tests/pass/async-drop.tree.stdout +++ b/src/tools/miri/tests/pass/async-drop.tree.stdout @@ -1,22 +1,23 @@ -AsyncInt::Dropper::poll: 0 -AsyncInt::Dropper::poll: 1 -AsyncInt::Dropper::poll: 2 -AsyncInt::Dropper::poll: 3 -AsyncInt::Dropper::poll: 4 -AsyncStruct::Dropper::poll: 6 -AsyncInt::Dropper::poll: 7 -AsyncInt::Dropper::poll: 8 -AsyncReference::Dropper::poll: 10 -AsyncInt::Dropper::poll: 11 -AsyncEnum(A)::Dropper::poll: 12 +AsyncInt::async_drop: 0 +AsyncInt::async_drop: 1 +AsyncInt::async_drop: 2 +AsyncInt::async_drop: 3 +AsyncInt::async_drop: 4 +AsyncStruct::async_drop: 6 +AsyncInt::async_drop: 7 +AsyncInt::async_drop: 8 +AsyncReference::async_drop: 10 +AsyncInt::async_drop: 11 +AsyncEnum(A)::async_drop: 12 SyncInt::drop: 12 -AsyncEnum(B)::Dropper::poll: 13 -AsyncInt::Dropper::poll: 13 +AsyncEnum(B)::async_drop: 13 +AsyncInt::async_drop: 13 SyncInt::drop: 14 SyncThenAsync::drop: 15 -AsyncInt::Dropper::poll: 16 +AsyncInt::async_drop: 16 SyncInt::drop: 17 -AsyncInt::Dropper::poll: 18 -AsyncInt::Dropper::poll: 19 -AsyncInt::Dropper::poll: 20 -AsyncUnion::Dropper::poll: 21, 21 +AsyncInt::async_drop: 18 +AsyncInt::async_drop: 19 +AsyncInt::async_drop: 20 +AsyncUnion::async_drop: 21, 21 +AsyncInt::async_drop: 10 diff --git a/tests/crashes/128695.rs b/tests/crashes/128695.rs deleted file mode 100644 index 661f427dc0e99..0000000000000 --- a/tests/crashes/128695.rs +++ /dev/null @@ -1,11 +0,0 @@ -//@ known-bug: rust-lang/rust#128695 -//@ edition: 2021 - -use core::pin::{pin, Pin}; - -fn main() { - let fut = pin!(async { - let async_drop_fut = pin!(core::future::async_drop(async {})); - (async_drop_fut).await; - }); -} diff --git a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#0}.built.after.mir b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#0}.built.after.mir index 8a584853e0009..4d484b16b5072 100644 --- a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#0}.built.after.mir +++ b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#0}.built.after.mir @@ -34,14 +34,18 @@ yields () StorageDead(_5); StorageDead(_4); StorageDead(_3); - drop(_1) -> [return: bb1, unwind: bb2]; + drop(_1) -> [return: bb1, unwind: bb3, drop: bb2]; } bb1: { return; } - bb2 (cleanup): { + bb2: { + coroutine_drop; + } + + bb3 (cleanup): { resume; } } diff --git a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#1}.built.after.mir b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#1}.built.after.mir index a9e08d2e8f60f..a99d8a4057a8d 100644 --- a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#1}.built.after.mir +++ b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#0}-{closure#1}.built.after.mir @@ -34,14 +34,18 @@ yields () StorageDead(_5); StorageDead(_4); StorageDead(_3); - drop(_1) -> [return: bb1, unwind: bb2]; + drop(_1) -> [return: bb1, unwind: bb3, drop: bb2]; } bb1: { return; } - bb2 (cleanup): { + bb2: { + coroutine_drop; + } + + bb3 (cleanup): { resume; } } diff --git a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#0}.built.after.mir b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#0}.built.after.mir index c6721085eb2e1..f50ad689f447e 100644 --- a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#0}.built.after.mir +++ b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#0}.built.after.mir @@ -34,14 +34,18 @@ yields () StorageDead(_5); StorageDead(_4); StorageDead(_3); - drop(_1) -> [return: bb1, unwind: bb2]; + drop(_1) -> [return: bb1, unwind: bb3, drop: bb2]; } bb1: { return; } - bb2 (cleanup): { + bb2: { + coroutine_drop; + } + + bb3 (cleanup): { resume; } } diff --git a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#1}.built.after.mir b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#1}.built.after.mir index 4452ae7812e36..45f67f527d0e6 100644 --- a/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#1}.built.after.mir +++ b/tests/mir-opt/async_closure_shims.main-{closure#0}-{closure#1}-{closure#1}.built.after.mir @@ -34,14 +34,18 @@ yields () StorageDead(_5); StorageDead(_4); StorageDead(_3); - drop(_1) -> [return: bb1, unwind: bb2]; + drop(_1) -> [return: bb1, unwind: bb3, drop: bb2]; } bb1: { return; } - bb2 (cleanup): { + bb2: { + coroutine_drop; + } + + bb3 (cleanup): { resume; } } diff --git a/tests/ui/async-await/async-drop/async-drop-future-from-future.rs b/tests/ui/async-await/async-drop/async-drop-future-from-future.rs new file mode 100644 index 0000000000000..44dcbd14f4886 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-future-from-future.rs @@ -0,0 +1,101 @@ +//@ run-pass +//@ check-run-results +// Future `bar` with internal async drop `Foo` will have async drop itself. +// And we trying to drop this future in sync context (`block_on` func) + +#![feature(async_drop)] +#![allow(incomplete_features)] + +use std::mem::ManuallyDrop; + +//@ edition: 2021 + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + println!("Foo::new({})", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + println!("Foo::drop({})", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + println!("Foo::async drop({})", self.my_resource_handle); + } +} + +fn main() { + block_on(bar(10)); + println!("done") +} + +async fn baz(ident_base: usize) { + let mut _first = Foo::new(ident_base); +} + +async fn bar(ident_base: usize) { + let mut _first = Foo::new(ident_base); + baz(ident_base + 1).await; +} + +fn block_on(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let rv = loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + }; + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } + rv +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-future-from-future.run.stdout b/tests/ui/async-await/async-drop/async-drop-future-from-future.run.stdout new file mode 100644 index 0000000000000..c2663b3f23806 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-future-from-future.run.stdout @@ -0,0 +1,5 @@ +Foo::new(10) +Foo::new(11) +Foo::async drop(11) +Foo::async drop(10) +done diff --git a/tests/ui/async-await/async-drop/async-drop-future-in-sync-context.rs b/tests/ui/async-await/async-drop/async-drop-future-in-sync-context.rs new file mode 100644 index 0000000000000..417dce62dba90 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-future-in-sync-context.rs @@ -0,0 +1,82 @@ +//@ run-pass +//@ check-run-results +// Future `bar` with internal async drop `Foo` will have async drop itself. +// And we trying to drop this future in sync context (`block_on` func) + +#![feature(async_drop)] +#![allow(incomplete_features)] + +//@ edition: 2021 + +use std::{ + future::{Future, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + println!("Foo::new({})", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + println!("Foo::drop({})", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + println!("Foo::async drop({})", self.my_resource_handle); + } +} + +fn main() { + block_on(bar(10)); + println!("done") +} + +async fn bar(ident_base: usize) { + let mut _first = Foo::new(ident_base); +} + +fn block_on(fut: F) -> F::Output +where + F: Future, +{ + let mut fut = pin!(fut); + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + } +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-future-in-sync-context.run.stdout b/tests/ui/async-await/async-drop/async-drop-future-in-sync-context.run.stdout new file mode 100644 index 0000000000000..ad0b083ca0910 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-future-in-sync-context.run.stdout @@ -0,0 +1,3 @@ +Foo::new(10) +Foo::async drop(10) +done diff --git a/tests/ui/async-await/async-drop/async-drop-glue-array.rs b/tests/ui/async-await/async-drop/async-drop-glue-array.rs new file mode 100644 index 0000000000000..21c08da2337ea --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-glue-array.rs @@ -0,0 +1,112 @@ +//@ run-pass +//@ check-run-results +// struct `Foo` has both sync and async drop. +// Struct `Complex` contains three `Foo` fields and has complex async drop glue. + +#![feature(async_drop)] +#![allow(incomplete_features)] + +use std::mem::ManuallyDrop; + +//@ edition: 2021 + +#[inline(never)] +fn myprintln(msg: &str, my_resource_handle: usize) { + println!("{} : {}", msg, my_resource_handle); +} + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + myprintln("Foo::new()", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + myprintln("Foo::drop()", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + myprintln("Foo::async drop()", self.my_resource_handle); + } +} + +fn main() { + { + let _ = Foo::new(7); + } + println!("Middle"); + { + block_on(bar(10)); + } + println!("Done") +} + +async fn bar(ident_base: usize) { + let _vec: [Foo; 4] = [ + Foo::new(ident_base), + Foo::new(ident_base + 1), + Foo::new(ident_base + 2), + Foo::new(ident_base + 3) + ]; +} + +fn block_on(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let rv = loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + }; + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } + rv +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-glue-array.run.stdout b/tests/ui/async-await/async-drop/async-drop-glue-array.run.stdout new file mode 100644 index 0000000000000..56b61103cc4e0 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-glue-array.run.stdout @@ -0,0 +1,12 @@ +Foo::new() : 7 +Foo::drop() : 7 +Middle +Foo::new() : 10 +Foo::new() : 11 +Foo::new() : 12 +Foo::new() : 13 +Foo::async drop() : 10 +Foo::async drop() : 11 +Foo::async drop() : 12 +Foo::async drop() : 13 +Done diff --git a/tests/ui/async-await/async-drop/async-drop-glue-generic.rs b/tests/ui/async-await/async-drop/async-drop-glue-generic.rs new file mode 100644 index 0000000000000..9e8ae13032423 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-glue-generic.rs @@ -0,0 +1,111 @@ +//@ run-pass +//@ check-run-results +// struct `Foo` has both sync and async drop. + +#![feature(async_drop)] +#![allow(incomplete_features)] + +use std::mem::ManuallyDrop; + +//@ edition: 2021 + +#[inline(never)] +fn myprintln(msg: &str, my_resource_handle: usize) { + println!("{} : {}", msg, my_resource_handle); +} + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + myprintln("Foo::new()", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + myprintln("Foo::drop()", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + myprintln("Foo::async drop()", self.my_resource_handle); + } +} + +fn main() { + { + let _ = Foo::new(7); + } + println!("Middle"); + { + block_on(bar(6, 10)); + } + println!("Done") +} + +async fn bar(_arg: T, ident_base: usize) { + let _vec: [Foo; 4] = [ + Foo::new(ident_base), + Foo::new(ident_base + 1), + Foo::new(ident_base + 2), + Foo::new(ident_base + 3) + ]; +} + +fn block_on(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let rv = loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + }; + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } + rv +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-glue-generic.run.stdout b/tests/ui/async-await/async-drop/async-drop-glue-generic.run.stdout new file mode 100644 index 0000000000000..56b61103cc4e0 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-glue-generic.run.stdout @@ -0,0 +1,12 @@ +Foo::new() : 7 +Foo::drop() : 7 +Middle +Foo::new() : 10 +Foo::new() : 11 +Foo::new() : 12 +Foo::new() : 13 +Foo::async drop() : 10 +Foo::async drop() : 11 +Foo::async drop() : 12 +Foo::async drop() : 13 +Done diff --git a/tests/ui/async-await/async-drop/async-drop-glue.rs b/tests/ui/async-await/async-drop/async-drop-glue.rs new file mode 100644 index 0000000000000..dc4bb527a51ca --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-glue.rs @@ -0,0 +1,124 @@ +//@ run-pass +//@ check-run-results +// struct `Foo` has both sync and async drop. +// Struct `Complex` contains three `Foo` fields and has complex async drop glue. + +#![feature(async_drop)] +#![allow(incomplete_features)] + +use std::mem::ManuallyDrop; + +//@ edition: 2021 + +#[inline(never)] +fn myprintln(msg: &str, my_resource_handle: usize) { + println!("{} : {}", msg, my_resource_handle); +} + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +#[allow(dead_code)] +struct Complex { + field1: Foo, + field2: Foo, + field3: Foo, +} + +impl Complex { + fn new(my_resource_handle: usize) -> Self { + myprintln("Complex::new()", my_resource_handle); + let field1 = Foo::new(my_resource_handle); + let field2 = Foo::new(my_resource_handle + 1); + let field3 = Foo::new(my_resource_handle + 2); + Complex { field1, field2, field3 } + } +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + myprintln("Foo::new()", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + myprintln("Foo::drop()", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + myprintln("Foo::async drop()", self.my_resource_handle); + } +} + +fn main() { + { + let _ = Foo::new(7); + } + println!("Middle"); + { + block_on(bar(10)); + } + println!("Done") +} + +async fn bar(ident_base: usize) { + let _complex = Complex::new(ident_base); +} + +fn block_on(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let rv = loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + }; + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } + rv +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-glue.run.stdout b/tests/ui/async-await/async-drop/async-drop-glue.run.stdout new file mode 100644 index 0000000000000..e21a9dd34fa9b --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-glue.run.stdout @@ -0,0 +1,11 @@ +Foo::new() : 7 +Foo::drop() : 7 +Middle +Complex::new() : 10 +Foo::new() : 10 +Foo::new() : 11 +Foo::new() : 12 +Foo::async drop() : 10 +Foo::async drop() : 11 +Foo::async drop() : 12 +Done diff --git a/tests/ui/async-await/async-drop.rs b/tests/ui/async-await/async-drop/async-drop-initial.rs similarity index 53% rename from tests/ui/async-await/async-drop.rs rename to tests/ui/async-await/async-drop/async-drop-initial.rs index b1af81423cea5..80b34840c8bef 100644 --- a/tests/ui/async-await/async-drop.rs +++ b/tests/ui/async-await/async-drop/async-drop-initial.rs @@ -52,21 +52,20 @@ fn main() { let i = 13; let fut = pin!(async { - test_async_drop(Int(0), 0).await; - // FIXME(#63818): niches in coroutines are disabled. - // Some of these sizes should be smaller, as indicated in comments. - test_async_drop(AsyncInt(0), /*104*/ 112).await; - test_async_drop([AsyncInt(1), AsyncInt(2)], /*152*/ 168).await; - test_async_drop((AsyncInt(3), AsyncInt(4)), /*488*/ 528).await; - test_async_drop(5, 0).await; + test_async_drop(Int(0), 16).await; + test_async_drop(AsyncInt(0), 32).await; + test_async_drop([AsyncInt(1), AsyncInt(2)], 104).await; + test_async_drop((AsyncInt(3), AsyncInt(4)), 120).await; + test_async_drop(5, 16).await; let j = 42; - test_async_drop(&i, 0).await; - test_async_drop(&j, 0).await; - test_async_drop(AsyncStruct { b: AsyncInt(8), a: AsyncInt(7), i: 6 }, /*1688*/ 1792).await; - test_async_drop(ManuallyDrop::new(AsyncInt(9)), 0).await; + test_async_drop(&i, 16).await; + test_async_drop(&j, 16).await; + test_async_drop(AsyncStruct { b: AsyncInt(8), a: AsyncInt(7), i: 6 }, 168).await; + test_async_drop(ManuallyDrop::new(AsyncInt(9)), 16).await; let foo = AsyncInt(10); - test_async_drop(AsyncReference { foo: &foo }, /*104*/ 112).await; + test_async_drop(AsyncReference { foo: &foo }, 32).await; + let _ = ManuallyDrop::new(foo); let foo = AsyncInt(11); test_async_drop( @@ -75,21 +74,22 @@ fn main() { let foo = AsyncInt(10); foo }, - /*120*/ 136, + 48, ) .await; - test_async_drop(AsyncEnum::A(AsyncInt(12)), /*680*/ 736).await; - test_async_drop(AsyncEnum::B(SyncInt(13)), /*680*/ 736).await; + test_async_drop(AsyncEnum::A(AsyncInt(12)), 104).await; + test_async_drop(AsyncEnum::B(SyncInt(13)), 104).await; - test_async_drop(SyncInt(14), /*16*/ 24).await; + test_async_drop(SyncInt(14), 16).await; test_async_drop( SyncThenAsync { i: 15, a: AsyncInt(16), b: SyncInt(17), c: AsyncInt(18) }, - /*3064*/ 3296, + 120, ) .await; - let async_drop_fut = pin!(core::future::async_drop(AsyncInt(19))); + let mut ptr19 = mem::MaybeUninit::new(AsyncInt(19)); + let async_drop_fut = pin!(unsafe { async_drop_in_place(ptr19.as_mut_ptr()) }); test_idempotency(async_drop_fut).await; let foo = AsyncInt(20); @@ -101,11 +101,11 @@ fn main() { black_box(core::future::ready(())).await; foo }, - /*120*/ 136, + 48, ) .await; - test_async_drop(AsyncUnion { signed: 21 }, /*32*/ 40).await; + test_async_drop(AsyncUnion { signed: 21 }, 32).await; }); let res = fut.poll(&mut cx); assert_eq!(res, Poll::Ready(())); @@ -113,13 +113,14 @@ fn main() { struct AsyncInt(i32); +impl Drop for AsyncInt { + fn drop(&mut self) { + println!("AsyncInt::drop: {}", self.0); + } +} impl AsyncDrop for AsyncInt { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncInt::Dropper::poll: {}", self.0); - } + async fn drop(self: Pin<&mut Self>) { + println!("AsyncInt::Dropper::poll: {}", self.0); } } @@ -148,13 +149,15 @@ struct AsyncReference<'a> { foo: &'a AsyncInt, } -impl AsyncDrop for AsyncReference<'_> { - type Dropper<'a> = impl Future where Self: 'a; +impl Drop for AsyncReference<'_> { + fn drop(&mut self) { + println!("AsyncReference::drop: {}", self.foo.0); + } +} - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncReference::Dropper::poll: {}", self.foo.0); - } +impl AsyncDrop for AsyncReference<'_> { + async fn drop(self: Pin<&mut Self>) { + println!("AsyncReference::Dropper::poll: {}", self.foo.0); } } @@ -166,13 +169,15 @@ struct AsyncStruct { b: AsyncInt, } -impl AsyncDrop for AsyncStruct { - type Dropper<'a> = impl Future; +impl Drop for AsyncStruct { + fn drop(&mut self) { + println!("AsyncStruct::drop: {}", self.i); + } +} - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!("AsyncStruct::Dropper::poll: {}", self.i); - } +impl AsyncDrop for AsyncStruct { + async fn drop(self: Pin<&mut Self>) { + println!("AsyncStruct::Dropper::poll: {}", self.i); } } @@ -181,23 +186,34 @@ enum AsyncEnum { B(SyncInt), } +impl Drop for AsyncEnum { + fn drop(&mut self) { + let new_self = match self { + AsyncEnum::A(foo) => { + println!("AsyncEnum(A)::drop: {}", foo.0); + AsyncEnum::B(SyncInt(foo.0)) + } + AsyncEnum::B(foo) => { + println!("AsyncEnum(B)::drop: {}", foo.0); + AsyncEnum::A(AsyncInt(foo.0)) + } + }; + mem::forget(mem::replace(&mut *self, new_self)); + } +} impl AsyncDrop for AsyncEnum { - type Dropper<'a> = impl Future; - - fn async_drop(mut self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - let new_self = match &*self { - AsyncEnum::A(foo) => { - println!("AsyncEnum(A)::Dropper::poll: {}", foo.0); - AsyncEnum::B(SyncInt(foo.0)) - } - AsyncEnum::B(foo) => { - println!("AsyncEnum(B)::Dropper::poll: {}", foo.0); - AsyncEnum::A(AsyncInt(foo.0)) - } - }; - mem::forget(mem::replace(&mut *self, new_self)); - } + async fn drop(mut self: Pin<&mut Self>) { + let new_self = match &*self { + AsyncEnum::A(foo) => { + println!("AsyncEnum(A)::Dropper::poll: {}", foo.0); + AsyncEnum::B(SyncInt(foo.0)) + } + AsyncEnum::B(foo) => { + println!("AsyncEnum(B)::Dropper::poll: {}", foo.0); + AsyncEnum::A(AsyncInt(foo.0)) + } + }; + mem::forget(mem::replace(&mut *self, new_self)); } } @@ -207,16 +223,21 @@ union AsyncUnion { unsigned: u32, } +impl Drop for AsyncUnion { + fn drop(&mut self) { + println!( + "AsyncUnion::drop: {}, {}", + unsafe { self.signed }, + unsafe { self.unsigned }, + ); + } +} impl AsyncDrop for AsyncUnion { - type Dropper<'a> = impl Future; - - fn async_drop(self: Pin<&mut Self>) -> Self::Dropper<'_> { - async move { - println!( - "AsyncUnion::Dropper::poll: {}, {}", - unsafe { self.signed }, - unsafe { self.unsigned }, - ); - } + async fn drop(self: Pin<&mut Self>) { + println!( + "AsyncUnion::Dropper::poll: {}, {}", + unsafe { self.signed }, + unsafe { self.unsigned }, + ); } } diff --git a/tests/ui/async-await/async-drop.run.stdout b/tests/ui/async-await/async-drop/async-drop-initial.run.stdout similarity index 100% rename from tests/ui/async-await/async-drop.run.stdout rename to tests/ui/async-await/async-drop/async-drop-initial.run.stdout diff --git a/tests/ui/async-await/async-drop/async-drop-middle-drop.rs b/tests/ui/async-await/async-drop/async-drop-middle-drop.rs new file mode 100644 index 0000000000000..772a853fe1eff --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-middle-drop.rs @@ -0,0 +1,110 @@ +//@ run-pass +//@ check-run-results +// Test async drop of coroutine `bar` (with internal async drop), +// stopped at the middle of execution, with AsyncDrop object Foo active. + +#![feature(async_drop)] +#![allow(incomplete_features)] + +//@ edition: 2021 + +use std::mem::ManuallyDrop; + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + println!("Foo::new({})", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + println!("Foo::drop({})", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + println!("Foo::async drop({})", self.my_resource_handle); + } +} + +fn main() { + block_on_and_drop_in_the_middle(bar(10)); + println!("done") +} + +pub struct MiddleFuture { + first_call: bool, +} +impl Future for MiddleFuture { + type Output = (); + fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll { + if self.first_call { + println!("MiddleFuture first poll"); + self.first_call = false; + Poll::Pending + } else { + println!("MiddleFuture Ready"); + Poll::Ready(()) + } + } +} + +async fn bar(ident_base: usize) { + let middle = MiddleFuture { first_call: true }; + let mut _first = Foo::new(ident_base); + middle.await; // Hanging `bar` future before Foo drop +} + +fn block_on_and_drop_in_the_middle(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let poll1 = fut.as_mut().poll(&mut context); + assert!(poll1.is_pending()); + + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-middle-drop.run.stdout b/tests/ui/async-await/async-drop/async-drop-middle-drop.run.stdout new file mode 100644 index 0000000000000..60df7674d0a9d --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-middle-drop.run.stdout @@ -0,0 +1,4 @@ +Foo::new(10) +MiddleFuture first poll +Foo::async drop(10) +done diff --git a/tests/ui/async-await/async-drop/async-drop-open.rs b/tests/ui/async-await/async-drop/async-drop-open.rs new file mode 100644 index 0000000000000..d14eff874aeac --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-open.rs @@ -0,0 +1,127 @@ +//@ run-pass +//@ check-run-results +// struct `Foo` has both sync and async drop. +// Struct `Complex` contains three `Foo` fields and one of them is moved out. + +#![feature(async_drop)] +#![allow(incomplete_features)] + +use std::mem::ManuallyDrop; + +//@ edition: 2021 + +#[inline(never)] +fn myprintln(msg: &str, my_resource_handle: usize) { + println!("{} : {}", msg, my_resource_handle); +} + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +#[allow(dead_code)] +struct Complex { + field1: Foo, + field2: Foo, + field3: Foo, +} + +impl Complex { + fn new(my_resource_handle: usize) -> Self { + myprintln("Complex::new()", my_resource_handle); + let field1 = Foo::new(my_resource_handle); + let field2 = Foo::new(my_resource_handle + 1); + let field3 = Foo::new(my_resource_handle + 2); + Complex { field1, field2, field3 } + } +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + myprintln("Foo::new()", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + myprintln("Foo::drop()", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + myprintln("Foo::async drop()", self.my_resource_handle); + } +} + +fn main() { + { + let _ = Foo::new(7); + } + println!("Middle"); + // Inside field1 and field3 of Complex must be dropped (as async drop) + // field2 must be dropped here (as sync drop) + { + let _field2 = block_on(bar(10)); + } + println!("Done") +} + +async fn bar(ident_base: usize) -> Foo { + let complex = Complex::new(ident_base); + complex.field2 +} + +fn block_on(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let rv = loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + }; + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } + rv +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop-open.run.stdout b/tests/ui/async-await/async-drop/async-drop-open.run.stdout new file mode 100644 index 0000000000000..e72dfd8a74390 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop-open.run.stdout @@ -0,0 +1,11 @@ +Foo::new() : 7 +Foo::drop() : 7 +Middle +Complex::new() : 10 +Foo::new() : 10 +Foo::new() : 11 +Foo::new() : 12 +Foo::async drop() : 10 +Foo::async drop() : 12 +Foo::drop() : 11 +Done diff --git a/tests/ui/async-await/async-drop/async-drop.rs b/tests/ui/async-await/async-drop/async-drop.rs new file mode 100644 index 0000000000000..2d9c5934be5d7 --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop.rs @@ -0,0 +1,105 @@ +//@ run-pass +//@ check-run-results +// struct `Foo` has both sync and async drop. +// Sync version is called in sync context, async version is called in async function. + +#![feature(async_drop)] +#![allow(incomplete_features)] + +use std::mem::ManuallyDrop; + +//@ edition: 2021 + +#[inline(never)] +fn myprintln(msg: &str, my_resource_handle: usize) { + println!("{} : {}", msg, my_resource_handle); +} + +use std::{ + future::{Future, async_drop_in_place, AsyncDrop}, + pin::{pin, Pin}, + sync::{mpsc, Arc}, + task::{Context, Poll, Wake, Waker}, +}; + +struct Foo { + my_resource_handle: usize, +} + +impl Foo { + fn new(my_resource_handle: usize) -> Self { + let out = Foo { + my_resource_handle, + }; + myprintln("Foo::new()", my_resource_handle); + out + } +} + +impl Drop for Foo { + fn drop(&mut self) { + myprintln("Foo::drop()", self.my_resource_handle); + } +} + +impl AsyncDrop for Foo { + async fn drop(self: Pin<&mut Self>) { + myprintln("Foo::async drop()", self.my_resource_handle); + } +} + +fn main() { + { + let _ = Foo::new(7); + } + println!("Middle"); + block_on(bar(10)); + println!("Done") +} + +async fn bar(ident_base: usize) { + let mut _first = Foo::new(ident_base); +} + +fn block_on(fut_unpin: F) -> F::Output +where + F: Future, +{ + let mut fut_pin = pin!(ManuallyDrop::new(fut_unpin)); + let mut fut: Pin<&mut F> = unsafe { + Pin::map_unchecked_mut(fut_pin.as_mut(), |x| &mut **x) + }; + let (waker, rx) = simple_waker(); + let mut context = Context::from_waker(&waker); + let rv = loop { + match fut.as_mut().poll(&mut context) { + Poll::Ready(out) => break out, + // expect wake in polls + Poll::Pending => rx.try_recv().unwrap(), + } + }; + let drop_fut_unpin = unsafe { async_drop_in_place(fut.get_unchecked_mut()) }; + let mut drop_fut: Pin<&mut _> = pin!(drop_fut_unpin); + loop { + match drop_fut.as_mut().poll(&mut context) { + Poll::Ready(()) => break, + Poll::Pending => rx.try_recv().unwrap(), + } + } + rv +} + +fn simple_waker() -> (Waker, mpsc::Receiver<()>) { + struct SimpleWaker { + tx: std::sync::mpsc::Sender<()>, + } + + impl Wake for SimpleWaker { + fn wake(self: Arc) { + self.tx.send(()).unwrap(); + } + } + + let (tx, rx) = mpsc::channel(); + (Waker::from(Arc::new(SimpleWaker { tx })), rx) +} diff --git a/tests/ui/async-await/async-drop/async-drop.run.stdout b/tests/ui/async-await/async-drop/async-drop.run.stdout new file mode 100644 index 0000000000000..cb7d0b0fea59d --- /dev/null +++ b/tests/ui/async-await/async-drop/async-drop.run.stdout @@ -0,0 +1,6 @@ +Foo::new() : 7 +Foo::drop() : 7 +Middle +Foo::new() : 10 +Foo::async drop() : 10 +Done diff --git a/tests/crashes/132103.rs b/tests/ui/async-await/async-drop/ex-ice-132103.rs similarity index 70% rename from tests/crashes/132103.rs rename to tests/ui/async-await/async-drop/ex-ice-132103.rs index 5bf4792c44c9f..25dcb4b02c60d 100644 --- a/tests/crashes/132103.rs +++ b/tests/ui/async-await/async-drop/ex-ice-132103.rs @@ -1,5 +1,10 @@ -//@ known-bug: #132103 +//@ run-pass +//! This test used to ICE: rust-lang/rust#132103 +//! Fixed when re-work async drop to templated coroutine scheme. //@compile-flags: -Zvalidate-mir --edition=2018 -Zinline-mir=yes +#![feature(async_drop)] +#![allow(incomplete_features)] + use core::future::{async_drop_in_place, Future}; use core::mem::{self}; use core::pin::pin; @@ -17,5 +22,5 @@ fn main() { let fut = pin!(async { test_async_drop(test_async_drop(0)).await; }); - fut.poll(&mut cx); + let _ = fut.poll(&mut cx); } diff --git a/tests/ui/async-await/async-drop/ex-ice1.rs b/tests/ui/async-await/async-drop/ex-ice1.rs new file mode 100644 index 0000000000000..7c0db66162171 --- /dev/null +++ b/tests/ui/async-await/async-drop/ex-ice1.rs @@ -0,0 +1,13 @@ +//! This test used to ICE: rust-lang/rust#128695 +//! Fixed when re-work async drop to templated coroutine scheme. +//@ edition: 2021 + +use core::pin::{pin, Pin}; + +fn main() { + let fut = pin!(async { + let async_drop_fut = pin!(core::future::async_drop(async {})); //~ ERROR: expected function, found module `core::future::async_drop` + //~^ ERROR: module `async_drop` is private + (async_drop_fut).await; + }); +} diff --git a/tests/ui/async-await/async-drop/ex-ice1.stderr b/tests/ui/async-await/async-drop/ex-ice1.stderr new file mode 100644 index 0000000000000..68533edeb086c --- /dev/null +++ b/tests/ui/async-await/async-drop/ex-ice1.stderr @@ -0,0 +1,19 @@ +error[E0423]: expected function, found module `core::future::async_drop` + --> $DIR/ex-ice1.rs:9:35 + | +LL | let async_drop_fut = pin!(core::future::async_drop(async {})); + | ^^^^^^^^^^^^^^^^^^^^^^^^ not a function + +error[E0603]: module `async_drop` is private + --> $DIR/ex-ice1.rs:9:49 + | +LL | let async_drop_fut = pin!(core::future::async_drop(async {})); + | ^^^^^^^^^^ private module + | +note: the module `async_drop` is defined here + --> $SRC_DIR/core/src/future/mod.rs:LL:COL + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0423, E0603. +For more information about an error, try `rustc --explain E0423`. diff --git a/tests/ui/feature-gates/feature-gate-async-drop.rs b/tests/ui/feature-gates/feature-gate-async-drop.rs new file mode 100644 index 0000000000000..20511b1d8ee86 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-async-drop.rs @@ -0,0 +1,16 @@ +//@ edition: 2021 + +use std::future::AsyncDrop; //~ ERROR use of unstable library feature `async_drop` +use std::pin::Pin; + +struct Foo {} + +impl Drop for Foo { + fn drop(&mut self) {} +} + +impl AsyncDrop for Foo { //~ ERROR use of unstable library feature `async_drop` + async fn drop(self: Pin<&mut Self>) {} //~ ERROR use of unstable library feature `async_drop` +} + +fn main() {} diff --git a/tests/ui/feature-gates/feature-gate-async-drop.stderr b/tests/ui/feature-gates/feature-gate-async-drop.stderr new file mode 100644 index 0000000000000..e795c3a342269 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-async-drop.stderr @@ -0,0 +1,33 @@ +error[E0658]: use of unstable library feature `async_drop` + --> $DIR/feature-gate-async-drop.rs:3:5 + | +LL | use std::future::AsyncDrop; + | ^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #126482 for more information + = help: add `#![feature(async_drop)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable library feature `async_drop` + --> $DIR/feature-gate-async-drop.rs:13:5 + | +LL | async fn drop(self: Pin<&mut Self>) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: see issue #126482 for more information + = help: add `#![feature(async_drop)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: use of unstable library feature `async_drop` + --> $DIR/feature-gate-async-drop.rs:12:6 + | +LL | impl AsyncDrop for Foo { + | ^^^^^^^^^ + | + = note: see issue #126482 for more information + = help: add `#![feature(async_drop)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0658`.