| 1 | //===- Coroutines.cpp -----------------------------------------------------===// |
| 2 | // |
| 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // This file implements the common infrastructure for Coroutine Passes. |
| 10 | // |
| 11 | //===----------------------------------------------------------------------===// |
| 12 | |
| 13 | #include "CoroInternal.h" |
| 14 | #include "llvm/ADT/SmallVector.h" |
| 15 | #include "llvm/ADT/StringRef.h" |
| 16 | #include "llvm/Analysis/CallGraph.h" |
| 17 | #include "llvm/IR/Attributes.h" |
| 18 | #include "llvm/IR/Constants.h" |
| 19 | #include "llvm/IR/DerivedTypes.h" |
| 20 | #include "llvm/IR/Function.h" |
| 21 | #include "llvm/IR/InstIterator.h" |
| 22 | #include "llvm/IR/Instructions.h" |
| 23 | #include "llvm/IR/IntrinsicInst.h" |
| 24 | #include "llvm/IR/Intrinsics.h" |
| 25 | #include "llvm/IR/Module.h" |
| 26 | #include "llvm/IR/Type.h" |
| 27 | #include "llvm/Support/Casting.h" |
| 28 | #include "llvm/Support/ErrorHandling.h" |
| 29 | #include "llvm/Transforms/Coroutines/ABI.h" |
| 30 | #include "llvm/Transforms/Coroutines/CoroInstr.h" |
| 31 | #include "llvm/Transforms/Coroutines/CoroShape.h" |
| 32 | #include "llvm/Transforms/Utils/Local.h" |
| 33 | #include <cassert> |
| 34 | #include <cstddef> |
| 35 | #include <utility> |
| 36 | |
| 37 | using namespace llvm; |
| 38 | |
| 39 | // Construct the lowerer base class and initialize its members. |
| 40 | coro::LowererBase::LowererBase(Module &M) |
| 41 | : TheModule(M), Context(M.getContext()), |
| 42 | Int8Ptr(PointerType::get(C&: Context, AddressSpace: 0)), |
| 43 | ResumeFnType(FunctionType::get(Result: Type::getVoidTy(C&: Context), Params: Int8Ptr, |
| 44 | /*isVarArg=*/false)), |
| 45 | NullPtr(ConstantPointerNull::get(T: Int8Ptr)) {} |
| 46 | |
| 47 | // Creates a call to llvm.coro.subfn.addr to obtain a resume function address. |
| 48 | // It generates the following: |
| 49 | // |
| 50 | // call ptr @llvm.coro.subfn.addr(ptr %Arg, i8 %index) |
| 51 | |
| 52 | CallInst *coro::LowererBase::makeSubFnCall(Value *Arg, int Index, |
| 53 | Instruction *InsertPt) { |
| 54 | auto *IndexVal = ConstantInt::get(Ty: Type::getInt8Ty(C&: Context), V: Index); |
| 55 | auto *Fn = |
| 56 | Intrinsic::getOrInsertDeclaration(M: &TheModule, id: Intrinsic::coro_subfn_addr); |
| 57 | |
| 58 | assert(Index >= CoroSubFnInst::IndexFirst && |
| 59 | Index < CoroSubFnInst::IndexLast && |
| 60 | "makeSubFnCall: Index value out of range" ); |
| 61 | return CallInst::Create(Func: Fn, Args: {Arg, IndexVal}, NameStr: "" , InsertBefore: InsertPt->getIterator()); |
| 62 | } |
| 63 | |
| 64 | // We can only efficiently check for non-overloaded intrinsics. |
| 65 | // The following intrinsics are absent for that reason: |
| 66 | // coro_align, coro_size, coro_suspend_async, coro_suspend_retcon |
| 67 | static Intrinsic::ID NonOverloadedCoroIntrinsics[] = { |
| 68 | Intrinsic::coro_alloc, |
| 69 | Intrinsic::coro_async_context_alloc, |
| 70 | Intrinsic::coro_async_context_dealloc, |
| 71 | Intrinsic::coro_async_resume, |
| 72 | Intrinsic::coro_async_size_replace, |
| 73 | Intrinsic::coro_await_suspend_bool, |
| 74 | Intrinsic::coro_await_suspend_handle, |
| 75 | Intrinsic::coro_await_suspend_void, |
| 76 | Intrinsic::coro_begin, |
| 77 | Intrinsic::coro_begin_custom_abi, |
| 78 | Intrinsic::coro_destroy, |
| 79 | Intrinsic::coro_done, |
| 80 | Intrinsic::coro_end, |
| 81 | Intrinsic::coro_end_async, |
| 82 | Intrinsic::coro_frame, |
| 83 | Intrinsic::coro_free, |
| 84 | Intrinsic::coro_id, |
| 85 | Intrinsic::coro_id_async, |
| 86 | Intrinsic::coro_id_retcon, |
| 87 | Intrinsic::coro_id_retcon_once, |
| 88 | Intrinsic::coro_noop, |
| 89 | Intrinsic::coro_prepare_async, |
| 90 | Intrinsic::coro_prepare_retcon, |
| 91 | Intrinsic::coro_promise, |
| 92 | Intrinsic::coro_resume, |
| 93 | Intrinsic::coro_save, |
| 94 | Intrinsic::coro_subfn_addr, |
| 95 | Intrinsic::coro_suspend, |
| 96 | Intrinsic::coro_is_in_ramp, |
| 97 | }; |
| 98 | |
| 99 | bool coro::isSuspendBlock(BasicBlock *BB) { |
| 100 | return isa<AnyCoroSuspendInst>(Val: BB->front()); |
| 101 | } |
| 102 | |
| 103 | bool coro::declaresAnyIntrinsic(const Module &M) { |
| 104 | return declaresIntrinsics(M, List: NonOverloadedCoroIntrinsics); |
| 105 | } |
| 106 | |
| 107 | // Checks whether the module declares any of the listed intrinsics. |
| 108 | bool coro::declaresIntrinsics(const Module &M, ArrayRef<Intrinsic::ID> List) { |
| 109 | #ifndef NDEBUG |
| 110 | for (Intrinsic::ID ID : List) |
| 111 | assert(!Intrinsic::isOverloaded(ID) && |
| 112 | "Only non-overloaded intrinsics supported" ); |
| 113 | #endif |
| 114 | |
| 115 | for (Intrinsic::ID ID : List) |
| 116 | if (Intrinsic::getDeclarationIfExists(M: &M, id: ID)) |
| 117 | return true; |
| 118 | return false; |
| 119 | } |
| 120 | |
| 121 | // Replace all coro.frees associated with the provided CoroId either with 'null' |
| 122 | // if Elide is true and with its frame parameter otherwise. |
| 123 | void coro::replaceCoroFree(CoroIdInst *CoroId, bool Elide) { |
| 124 | SmallVector<CoroFreeInst *, 4> CoroFrees; |
| 125 | for (User *U : CoroId->users()) |
| 126 | if (auto CF = dyn_cast<CoroFreeInst>(Val: U)) |
| 127 | CoroFrees.push_back(Elt: CF); |
| 128 | |
| 129 | if (CoroFrees.empty()) |
| 130 | return; |
| 131 | |
| 132 | Value *Replacement = |
| 133 | Elide |
| 134 | ? ConstantPointerNull::get(T: PointerType::get(C&: CoroId->getContext(), AddressSpace: 0)) |
| 135 | : CoroFrees.front()->getFrame(); |
| 136 | |
| 137 | for (CoroFreeInst *CF : CoroFrees) { |
| 138 | CF->replaceAllUsesWith(V: Replacement); |
| 139 | CF->eraseFromParent(); |
| 140 | } |
| 141 | } |
| 142 | |
| 143 | void coro::suppressCoroAllocs(CoroIdInst *CoroId) { |
| 144 | SmallVector<CoroAllocInst *, 4> CoroAllocs; |
| 145 | for (User *U : CoroId->users()) |
| 146 | if (auto *CA = dyn_cast<CoroAllocInst>(Val: U)) |
| 147 | CoroAllocs.push_back(Elt: CA); |
| 148 | |
| 149 | if (CoroAllocs.empty()) |
| 150 | return; |
| 151 | |
| 152 | coro::suppressCoroAllocs(Context&: CoroId->getContext(), CoroAllocs); |
| 153 | } |
| 154 | |
| 155 | // Replacing llvm.coro.alloc with false will suppress dynamic |
| 156 | // allocation as it is expected for the frontend to generate the code that |
| 157 | // looks like: |
| 158 | // id = coro.id(...) |
| 159 | // mem = coro.alloc(id) ? malloc(coro.size()) : 0; |
| 160 | // coro.begin(id, mem) |
| 161 | void coro::suppressCoroAllocs(LLVMContext &Context, |
| 162 | ArrayRef<CoroAllocInst *> CoroAllocs) { |
| 163 | auto *False = ConstantInt::getFalse(Context); |
| 164 | for (auto *CA : CoroAllocs) { |
| 165 | CA->replaceAllUsesWith(V: False); |
| 166 | CA->eraseFromParent(); |
| 167 | } |
| 168 | } |
| 169 | |
| 170 | static CoroSaveInst *createCoroSave(CoroBeginInst *CoroBegin, |
| 171 | CoroSuspendInst *SuspendInst) { |
| 172 | Module *M = SuspendInst->getModule(); |
| 173 | auto *Fn = Intrinsic::getOrInsertDeclaration(M, id: Intrinsic::coro_save); |
| 174 | auto *SaveInst = cast<CoroSaveInst>( |
| 175 | Val: CallInst::Create(Func: Fn, Args: CoroBegin, NameStr: "" , InsertBefore: SuspendInst->getIterator())); |
| 176 | assert(!SuspendInst->getCoroSave()); |
| 177 | SuspendInst->setArgOperand(i: 0, v: SaveInst); |
| 178 | return SaveInst; |
| 179 | } |
| 180 | |
| 181 | // Collect "interesting" coroutine intrinsics. |
| 182 | void coro::Shape::analyze(Function &F, |
| 183 | SmallVectorImpl<CoroFrameInst *> &CoroFrames, |
| 184 | SmallVectorImpl<CoroSaveInst *> &UnusedCoroSaves, |
| 185 | CoroPromiseInst *&CoroPromise) { |
| 186 | clear(); |
| 187 | |
| 188 | bool HasFinalSuspend = false; |
| 189 | bool HasUnwindCoroEnd = false; |
| 190 | size_t FinalSuspendIndex = 0; |
| 191 | |
| 192 | for (Instruction &I : instructions(F)) { |
| 193 | // FIXME: coro_await_suspend_* are not proper `IntrinisicInst`s |
| 194 | // because they might be invoked |
| 195 | if (auto AWS = dyn_cast<CoroAwaitSuspendInst>(Val: &I)) { |
| 196 | CoroAwaitSuspends.push_back(Elt: AWS); |
| 197 | } else if (auto II = dyn_cast<IntrinsicInst>(Val: &I)) { |
| 198 | switch (II->getIntrinsicID()) { |
| 199 | default: |
| 200 | continue; |
| 201 | case Intrinsic::coro_size: |
| 202 | CoroSizes.push_back(Elt: cast<CoroSizeInst>(Val: II)); |
| 203 | break; |
| 204 | case Intrinsic::coro_align: |
| 205 | CoroAligns.push_back(Elt: cast<CoroAlignInst>(Val: II)); |
| 206 | break; |
| 207 | case Intrinsic::coro_frame: |
| 208 | CoroFrames.push_back(Elt: cast<CoroFrameInst>(Val: II)); |
| 209 | break; |
| 210 | case Intrinsic::coro_save: |
| 211 | // After optimizations, coro_suspends using this coro_save might have |
| 212 | // been removed, remember orphaned coro_saves to remove them later. |
| 213 | if (II->use_empty()) |
| 214 | UnusedCoroSaves.push_back(Elt: cast<CoroSaveInst>(Val: II)); |
| 215 | break; |
| 216 | case Intrinsic::coro_suspend_async: { |
| 217 | auto *Suspend = cast<CoroSuspendAsyncInst>(Val: II); |
| 218 | Suspend->checkWellFormed(); |
| 219 | CoroSuspends.push_back(Elt: Suspend); |
| 220 | break; |
| 221 | } |
| 222 | case Intrinsic::coro_suspend_retcon: { |
| 223 | auto Suspend = cast<CoroSuspendRetconInst>(Val: II); |
| 224 | CoroSuspends.push_back(Elt: Suspend); |
| 225 | break; |
| 226 | } |
| 227 | case Intrinsic::coro_suspend: { |
| 228 | auto Suspend = cast<CoroSuspendInst>(Val: II); |
| 229 | CoroSuspends.push_back(Elt: Suspend); |
| 230 | if (Suspend->isFinal()) { |
| 231 | if (HasFinalSuspend) |
| 232 | report_fatal_error( |
| 233 | reason: "Only one suspend point can be marked as final" ); |
| 234 | HasFinalSuspend = true; |
| 235 | FinalSuspendIndex = CoroSuspends.size() - 1; |
| 236 | } |
| 237 | break; |
| 238 | } |
| 239 | case Intrinsic::coro_begin: |
| 240 | case Intrinsic::coro_begin_custom_abi: { |
| 241 | auto CB = cast<CoroBeginInst>(Val: II); |
| 242 | |
| 243 | // Ignore coro id's that aren't pre-split. |
| 244 | auto Id = dyn_cast<CoroIdInst>(Val: CB->getId()); |
| 245 | if (Id && !Id->getInfo().isPreSplit()) |
| 246 | break; |
| 247 | |
| 248 | if (CoroBegin) |
| 249 | report_fatal_error( |
| 250 | reason: "coroutine should have exactly one defining @llvm.coro.begin" ); |
| 251 | CB->addRetAttr(Kind: Attribute::NonNull); |
| 252 | CB->addRetAttr(Kind: Attribute::NoAlias); |
| 253 | CB->removeFnAttr(Kind: Attribute::NoDuplicate); |
| 254 | CoroBegin = CB; |
| 255 | break; |
| 256 | } |
| 257 | case Intrinsic::coro_end_async: |
| 258 | case Intrinsic::coro_end: |
| 259 | CoroEnds.push_back(Elt: cast<AnyCoroEndInst>(Val: II)); |
| 260 | if (auto *AsyncEnd = dyn_cast<CoroAsyncEndInst>(Val: II)) { |
| 261 | AsyncEnd->checkWellFormed(); |
| 262 | } |
| 263 | |
| 264 | if (CoroEnds.back()->isUnwind()) |
| 265 | HasUnwindCoroEnd = true; |
| 266 | |
| 267 | if (CoroEnds.back()->isFallthrough() && isa<CoroEndInst>(Val: II)) { |
| 268 | // Make sure that the fallthrough coro.end is the first element in the |
| 269 | // CoroEnds vector. |
| 270 | // Note: I don't think this is neccessary anymore. |
| 271 | if (CoroEnds.size() > 1) { |
| 272 | if (CoroEnds.front()->isFallthrough()) |
| 273 | report_fatal_error( |
| 274 | reason: "Only one coro.end can be marked as fallthrough" ); |
| 275 | std::swap(a&: CoroEnds.front(), b&: CoroEnds.back()); |
| 276 | } |
| 277 | } |
| 278 | break; |
| 279 | case Intrinsic::coro_is_in_ramp: |
| 280 | CoroIsInRampInsts.push_back(Elt: cast<CoroIsInRampInst>(Val: II)); |
| 281 | break; |
| 282 | case Intrinsic::coro_promise: |
| 283 | assert(CoroPromise == nullptr && |
| 284 | "CoroEarly must ensure coro.promise unique" ); |
| 285 | CoroPromise = cast<CoroPromiseInst>(Val: II); |
| 286 | break; |
| 287 | } |
| 288 | } |
| 289 | } |
| 290 | |
| 291 | // If there is no CoroBegin then this is not a coroutine. |
| 292 | if (!CoroBegin) |
| 293 | return; |
| 294 | |
| 295 | // Determination of ABI and initializing lowering info |
| 296 | auto Id = CoroBegin->getId(); |
| 297 | switch (auto IntrID = Id->getIntrinsicID()) { |
| 298 | case Intrinsic::coro_id: { |
| 299 | ABI = coro::ABI::Switch; |
| 300 | SwitchLowering.HasFinalSuspend = HasFinalSuspend; |
| 301 | SwitchLowering.HasUnwindCoroEnd = HasUnwindCoroEnd; |
| 302 | |
| 303 | auto SwitchId = getSwitchCoroId(); |
| 304 | SwitchLowering.ResumeSwitch = nullptr; |
| 305 | SwitchLowering.PromiseAlloca = SwitchId->getPromise(); |
| 306 | SwitchLowering.ResumeEntryBlock = nullptr; |
| 307 | |
| 308 | // Move final suspend to the last element in the CoroSuspends vector. |
| 309 | if (SwitchLowering.HasFinalSuspend && |
| 310 | FinalSuspendIndex != CoroSuspends.size() - 1) |
| 311 | std::swap(a&: CoroSuspends[FinalSuspendIndex], b&: CoroSuspends.back()); |
| 312 | break; |
| 313 | } |
| 314 | case Intrinsic::coro_id_async: { |
| 315 | ABI = coro::ABI::Async; |
| 316 | auto *AsyncId = getAsyncCoroId(); |
| 317 | AsyncId->checkWellFormed(); |
| 318 | AsyncLowering.Context = AsyncId->getStorage(); |
| 319 | AsyncLowering.ContextArgNo = AsyncId->getStorageArgumentIndex(); |
| 320 | AsyncLowering.ContextHeaderSize = AsyncId->getStorageSize(); |
| 321 | AsyncLowering.ContextAlignment = AsyncId->getStorageAlignment().value(); |
| 322 | AsyncLowering.AsyncFuncPointer = AsyncId->getAsyncFunctionPointer(); |
| 323 | AsyncLowering.AsyncCC = F.getCallingConv(); |
| 324 | break; |
| 325 | } |
| 326 | case Intrinsic::coro_id_retcon: |
| 327 | case Intrinsic::coro_id_retcon_once: { |
| 328 | ABI = IntrID == Intrinsic::coro_id_retcon ? coro::ABI::Retcon |
| 329 | : coro::ABI::RetconOnce; |
| 330 | auto ContinuationId = getRetconCoroId(); |
| 331 | ContinuationId->checkWellFormed(); |
| 332 | auto Prototype = ContinuationId->getPrototype(); |
| 333 | RetconLowering.ResumePrototype = Prototype; |
| 334 | RetconLowering.Alloc = ContinuationId->getAllocFunction(); |
| 335 | RetconLowering.Dealloc = ContinuationId->getDeallocFunction(); |
| 336 | RetconLowering.ReturnBlock = nullptr; |
| 337 | RetconLowering.IsFrameInlineInStorage = false; |
| 338 | break; |
| 339 | } |
| 340 | default: |
| 341 | llvm_unreachable("coro.begin is not dependent on a coro.id call" ); |
| 342 | } |
| 343 | } |
| 344 | |
| 345 | // If for some reason, we were not able to find coro.begin, bailout. |
| 346 | void coro::Shape::invalidateCoroutine( |
| 347 | Function &F, SmallVectorImpl<CoroFrameInst *> &CoroFrames) { |
| 348 | assert(!CoroBegin); |
| 349 | { |
| 350 | // Replace coro.frame which are supposed to be lowered to the result of |
| 351 | // coro.begin with poison. |
| 352 | auto *Poison = PoisonValue::get(T: PointerType::get(C&: F.getContext(), AddressSpace: 0)); |
| 353 | for (CoroFrameInst *CF : CoroFrames) { |
| 354 | CF->replaceAllUsesWith(V: Poison); |
| 355 | CF->eraseFromParent(); |
| 356 | } |
| 357 | CoroFrames.clear(); |
| 358 | |
| 359 | // Replace all coro.suspend with poison and remove related coro.saves if |
| 360 | // present. |
| 361 | for (AnyCoroSuspendInst *CS : CoroSuspends) { |
| 362 | CS->replaceAllUsesWith(V: PoisonValue::get(T: CS->getType())); |
| 363 | if (auto *CoroSave = CS->getCoroSave()) |
| 364 | CoroSave->eraseFromParent(); |
| 365 | CS->eraseFromParent(); |
| 366 | } |
| 367 | CoroSuspends.clear(); |
| 368 | |
| 369 | // Replace all coro.ends with unreachable instruction. |
| 370 | for (AnyCoroEndInst *CE : CoroEnds) |
| 371 | changeToUnreachable(I: CE); |
| 372 | } |
| 373 | } |
| 374 | |
| 375 | void coro::SwitchABI::init() { |
| 376 | assert(Shape.ABI == coro::ABI::Switch); |
| 377 | { |
| 378 | for (auto *AnySuspend : Shape.CoroSuspends) { |
| 379 | auto Suspend = dyn_cast<CoroSuspendInst>(Val: AnySuspend); |
| 380 | if (!Suspend) { |
| 381 | #ifndef NDEBUG |
| 382 | AnySuspend->dump(); |
| 383 | #endif |
| 384 | report_fatal_error(reason: "coro.id must be paired with coro.suspend" ); |
| 385 | } |
| 386 | |
| 387 | if (!Suspend->getCoroSave()) |
| 388 | createCoroSave(CoroBegin: Shape.CoroBegin, SuspendInst: Suspend); |
| 389 | } |
| 390 | } |
| 391 | } |
| 392 | |
| 393 | void coro::AsyncABI::init() { assert(Shape.ABI == coro::ABI::Async); } |
| 394 | |
| 395 | void coro::AnyRetconABI::init() { |
| 396 | assert(Shape.ABI == coro::ABI::Retcon || Shape.ABI == coro::ABI::RetconOnce); |
| 397 | { |
| 398 | // Determine the result value types, and make sure they match up with |
| 399 | // the values passed to the suspends. |
| 400 | auto ResultTys = Shape.getRetconResultTypes(); |
| 401 | auto ResumeTys = Shape.getRetconResumeTypes(); |
| 402 | |
| 403 | for (auto *AnySuspend : Shape.CoroSuspends) { |
| 404 | auto Suspend = dyn_cast<CoroSuspendRetconInst>(Val: AnySuspend); |
| 405 | if (!Suspend) { |
| 406 | #ifndef NDEBUG |
| 407 | AnySuspend->dump(); |
| 408 | #endif |
| 409 | report_fatal_error(reason: "coro.id.retcon.* must be paired with " |
| 410 | "coro.suspend.retcon" ); |
| 411 | } |
| 412 | |
| 413 | // Check that the argument types of the suspend match the results. |
| 414 | auto SI = Suspend->value_begin(), SE = Suspend->value_end(); |
| 415 | auto RI = ResultTys.begin(), RE = ResultTys.end(); |
| 416 | for (; SI != SE && RI != RE; ++SI, ++RI) { |
| 417 | auto SrcTy = (*SI)->getType(); |
| 418 | if (SrcTy != *RI) { |
| 419 | // The optimizer likes to eliminate bitcasts leading into variadic |
| 420 | // calls, but that messes with our invariants. Re-insert the |
| 421 | // bitcast and ignore this type mismatch. |
| 422 | if (CastInst::isBitCastable(SrcTy, DestTy: *RI)) { |
| 423 | auto BCI = new BitCastInst(*SI, *RI, "" , Suspend->getIterator()); |
| 424 | SI->set(BCI); |
| 425 | continue; |
| 426 | } |
| 427 | |
| 428 | #ifndef NDEBUG |
| 429 | Suspend->dump(); |
| 430 | Shape.RetconLowering.ResumePrototype->getFunctionType()->dump(); |
| 431 | #endif |
| 432 | report_fatal_error(reason: "argument to coro.suspend.retcon does not " |
| 433 | "match corresponding prototype function result" ); |
| 434 | } |
| 435 | } |
| 436 | if (SI != SE || RI != RE) { |
| 437 | #ifndef NDEBUG |
| 438 | Suspend->dump(); |
| 439 | Shape.RetconLowering.ResumePrototype->getFunctionType()->dump(); |
| 440 | #endif |
| 441 | report_fatal_error(reason: "wrong number of arguments to coro.suspend.retcon" ); |
| 442 | } |
| 443 | |
| 444 | // Check that the result type of the suspend matches the resume types. |
| 445 | Type *SResultTy = Suspend->getType(); |
| 446 | ArrayRef<Type *> SuspendResultTys; |
| 447 | if (SResultTy->isVoidTy()) { |
| 448 | // leave as empty array |
| 449 | } else if (auto SResultStructTy = dyn_cast<StructType>(Val: SResultTy)) { |
| 450 | SuspendResultTys = SResultStructTy->elements(); |
| 451 | } else { |
| 452 | // forms an ArrayRef using SResultTy, be careful |
| 453 | SuspendResultTys = SResultTy; |
| 454 | } |
| 455 | if (SuspendResultTys.size() != ResumeTys.size()) { |
| 456 | #ifndef NDEBUG |
| 457 | Suspend->dump(); |
| 458 | Shape.RetconLowering.ResumePrototype->getFunctionType()->dump(); |
| 459 | #endif |
| 460 | report_fatal_error(reason: "wrong number of results from coro.suspend.retcon" ); |
| 461 | } |
| 462 | for (size_t I = 0, E = ResumeTys.size(); I != E; ++I) { |
| 463 | if (SuspendResultTys[I] != ResumeTys[I]) { |
| 464 | #ifndef NDEBUG |
| 465 | Suspend->dump(); |
| 466 | Shape.RetconLowering.ResumePrototype->getFunctionType()->dump(); |
| 467 | #endif |
| 468 | report_fatal_error(reason: "result from coro.suspend.retcon does not " |
| 469 | "match corresponding prototype function param" ); |
| 470 | } |
| 471 | } |
| 472 | } |
| 473 | } |
| 474 | } |
| 475 | |
| 476 | void coro::Shape::cleanCoroutine( |
| 477 | SmallVectorImpl<CoroFrameInst *> &CoroFrames, |
| 478 | SmallVectorImpl<CoroSaveInst *> &UnusedCoroSaves, CoroPromiseInst *PI) { |
| 479 | // The coro.frame intrinsic is always lowered to the result of coro.begin. |
| 480 | for (CoroFrameInst *CF : CoroFrames) { |
| 481 | CF->replaceAllUsesWith(V: CoroBegin); |
| 482 | CF->eraseFromParent(); |
| 483 | } |
| 484 | CoroFrames.clear(); |
| 485 | |
| 486 | // Remove orphaned coro.saves. |
| 487 | for (CoroSaveInst *CoroSave : UnusedCoroSaves) |
| 488 | CoroSave->eraseFromParent(); |
| 489 | UnusedCoroSaves.clear(); |
| 490 | |
| 491 | if (PI) { |
| 492 | PI->replaceAllUsesWith(V: PI->isFromPromise() |
| 493 | ? cast<Value>(Val: CoroBegin) |
| 494 | : cast<Value>(Val: getPromiseAlloca())); |
| 495 | PI->eraseFromParent(); |
| 496 | } |
| 497 | } |
| 498 | |
| 499 | static void propagateCallAttrsFromCallee(CallInst *Call, Function *Callee) { |
| 500 | Call->setCallingConv(Callee->getCallingConv()); |
| 501 | // TODO: attributes? |
| 502 | } |
| 503 | |
| 504 | static void addCallToCallGraph(CallGraph *CG, CallInst *Call, Function *Callee){ |
| 505 | if (CG) |
| 506 | (*CG)[Call->getFunction()]->addCalledFunction(Call, M: (*CG)[Callee]); |
| 507 | } |
| 508 | |
| 509 | Value *coro::Shape::emitAlloc(IRBuilder<> &Builder, Value *Size, |
| 510 | CallGraph *CG) const { |
| 511 | switch (ABI) { |
| 512 | case coro::ABI::Switch: |
| 513 | llvm_unreachable("can't allocate memory in coro switch-lowering" ); |
| 514 | |
| 515 | case coro::ABI::Retcon: |
| 516 | case coro::ABI::RetconOnce: { |
| 517 | auto Alloc = RetconLowering.Alloc; |
| 518 | Size = Builder.CreateIntCast(V: Size, |
| 519 | DestTy: Alloc->getFunctionType()->getParamType(i: 0), |
| 520 | /*is signed*/ isSigned: false); |
| 521 | auto *Call = Builder.CreateCall(Callee: Alloc, Args: Size); |
| 522 | propagateCallAttrsFromCallee(Call, Callee: Alloc); |
| 523 | addCallToCallGraph(CG, Call, Callee: Alloc); |
| 524 | return Call; |
| 525 | } |
| 526 | case coro::ABI::Async: |
| 527 | llvm_unreachable("can't allocate memory in coro async-lowering" ); |
| 528 | } |
| 529 | llvm_unreachable("Unknown coro::ABI enum" ); |
| 530 | } |
| 531 | |
| 532 | void coro::Shape::emitDealloc(IRBuilder<> &Builder, Value *Ptr, |
| 533 | CallGraph *CG) const { |
| 534 | switch (ABI) { |
| 535 | case coro::ABI::Switch: |
| 536 | llvm_unreachable("can't allocate memory in coro switch-lowering" ); |
| 537 | |
| 538 | case coro::ABI::Retcon: |
| 539 | case coro::ABI::RetconOnce: { |
| 540 | auto Dealloc = RetconLowering.Dealloc; |
| 541 | Ptr = Builder.CreateBitCast(V: Ptr, |
| 542 | DestTy: Dealloc->getFunctionType()->getParamType(i: 0)); |
| 543 | auto *Call = Builder.CreateCall(Callee: Dealloc, Args: Ptr); |
| 544 | propagateCallAttrsFromCallee(Call, Callee: Dealloc); |
| 545 | addCallToCallGraph(CG, Call, Callee: Dealloc); |
| 546 | return; |
| 547 | } |
| 548 | case coro::ABI::Async: |
| 549 | llvm_unreachable("can't allocate memory in coro async-lowering" ); |
| 550 | } |
| 551 | llvm_unreachable("Unknown coro::ABI enum" ); |
| 552 | } |
| 553 | |
| 554 | [[noreturn]] static void fail(const Instruction *I, const char *Reason, |
| 555 | Value *V) { |
| 556 | #ifndef NDEBUG |
| 557 | I->dump(); |
| 558 | if (V) { |
| 559 | errs() << " Value: " ; |
| 560 | V->printAsOperand(llvm::errs()); |
| 561 | errs() << '\n'; |
| 562 | } |
| 563 | #endif |
| 564 | report_fatal_error(reason: Reason); |
| 565 | } |
| 566 | |
| 567 | /// Check that the given value is a well-formed prototype for the |
| 568 | /// llvm.coro.id.retcon.* intrinsics. |
| 569 | static void checkWFRetconPrototype(const AnyCoroIdRetconInst *I, Value *V) { |
| 570 | auto F = dyn_cast<Function>(Val: V->stripPointerCasts()); |
| 571 | if (!F) |
| 572 | fail(I, Reason: "llvm.coro.id.retcon.* prototype not a Function" , V); |
| 573 | |
| 574 | auto FT = F->getFunctionType(); |
| 575 | |
| 576 | if (isa<CoroIdRetconInst>(Val: I)) { |
| 577 | bool ResultOkay; |
| 578 | if (FT->getReturnType()->isPointerTy()) { |
| 579 | ResultOkay = true; |
| 580 | } else if (auto SRetTy = dyn_cast<StructType>(Val: FT->getReturnType())) { |
| 581 | ResultOkay = (!SRetTy->isOpaque() && |
| 582 | SRetTy->getNumElements() > 0 && |
| 583 | SRetTy->getElementType(N: 0)->isPointerTy()); |
| 584 | } else { |
| 585 | ResultOkay = false; |
| 586 | } |
| 587 | if (!ResultOkay) |
| 588 | fail(I, Reason: "llvm.coro.id.retcon prototype must return pointer as first " |
| 589 | "result" , V: F); |
| 590 | |
| 591 | if (FT->getReturnType() != |
| 592 | I->getFunction()->getFunctionType()->getReturnType()) |
| 593 | fail(I, Reason: "llvm.coro.id.retcon prototype return type must be same as" |
| 594 | "current function return type" , V: F); |
| 595 | } else { |
| 596 | // No meaningful validation to do here for llvm.coro.id.unique.once. |
| 597 | } |
| 598 | |
| 599 | if (FT->getNumParams() == 0 || !FT->getParamType(i: 0)->isPointerTy()) |
| 600 | fail(I, Reason: "llvm.coro.id.retcon.* prototype must take pointer as " |
| 601 | "its first parameter" , V: F); |
| 602 | } |
| 603 | |
| 604 | /// Check that the given value is a well-formed allocator. |
| 605 | static void checkWFAlloc(const Instruction *I, Value *V) { |
| 606 | auto F = dyn_cast<Function>(Val: V->stripPointerCasts()); |
| 607 | if (!F) |
| 608 | fail(I, Reason: "llvm.coro.* allocator not a Function" , V); |
| 609 | |
| 610 | auto FT = F->getFunctionType(); |
| 611 | if (!FT->getReturnType()->isPointerTy()) |
| 612 | fail(I, Reason: "llvm.coro.* allocator must return a pointer" , V: F); |
| 613 | |
| 614 | if (FT->getNumParams() != 1 || |
| 615 | !FT->getParamType(i: 0)->isIntegerTy()) |
| 616 | fail(I, Reason: "llvm.coro.* allocator must take integer as only param" , V: F); |
| 617 | } |
| 618 | |
| 619 | /// Check that the given value is a well-formed deallocator. |
| 620 | static void checkWFDealloc(const Instruction *I, Value *V) { |
| 621 | auto F = dyn_cast<Function>(Val: V->stripPointerCasts()); |
| 622 | if (!F) |
| 623 | fail(I, Reason: "llvm.coro.* deallocator not a Function" , V); |
| 624 | |
| 625 | auto FT = F->getFunctionType(); |
| 626 | if (!FT->getReturnType()->isVoidTy()) |
| 627 | fail(I, Reason: "llvm.coro.* deallocator must return void" , V: F); |
| 628 | |
| 629 | if (FT->getNumParams() != 1 || |
| 630 | !FT->getParamType(i: 0)->isPointerTy()) |
| 631 | fail(I, Reason: "llvm.coro.* deallocator must take pointer as only param" , V: F); |
| 632 | } |
| 633 | |
| 634 | static void checkConstantInt(const Instruction *I, Value *V, |
| 635 | const char *Reason) { |
| 636 | if (!isa<ConstantInt>(Val: V)) { |
| 637 | fail(I, Reason, V); |
| 638 | } |
| 639 | } |
| 640 | |
| 641 | void AnyCoroIdRetconInst::checkWellFormed() const { |
| 642 | checkConstantInt(I: this, V: getArgOperand(i: SizeArg), |
| 643 | Reason: "size argument to coro.id.retcon.* must be constant" ); |
| 644 | checkConstantInt(I: this, V: getArgOperand(i: AlignArg), |
| 645 | Reason: "alignment argument to coro.id.retcon.* must be constant" ); |
| 646 | checkWFRetconPrototype(I: this, V: getArgOperand(i: PrototypeArg)); |
| 647 | checkWFAlloc(I: this, V: getArgOperand(i: AllocArg)); |
| 648 | checkWFDealloc(I: this, V: getArgOperand(i: DeallocArg)); |
| 649 | } |
| 650 | |
| 651 | static void checkAsyncFuncPointer(const Instruction *I, Value *V) { |
| 652 | auto *AsyncFuncPtrAddr = dyn_cast<GlobalVariable>(Val: V->stripPointerCasts()); |
| 653 | if (!AsyncFuncPtrAddr) |
| 654 | fail(I, Reason: "llvm.coro.id.async async function pointer not a global" , V); |
| 655 | } |
| 656 | |
| 657 | void CoroIdAsyncInst::checkWellFormed() const { |
| 658 | checkConstantInt(I: this, V: getArgOperand(i: SizeArg), |
| 659 | Reason: "size argument to coro.id.async must be constant" ); |
| 660 | checkConstantInt(I: this, V: getArgOperand(i: AlignArg), |
| 661 | Reason: "alignment argument to coro.id.async must be constant" ); |
| 662 | checkConstantInt(I: this, V: getArgOperand(i: StorageArg), |
| 663 | Reason: "storage argument offset to coro.id.async must be constant" ); |
| 664 | checkAsyncFuncPointer(I: this, V: getArgOperand(i: AsyncFuncPtrArg)); |
| 665 | } |
| 666 | |
| 667 | static void checkAsyncContextProjectFunction(const Instruction *I, |
| 668 | Function *F) { |
| 669 | auto *FunTy = F->getFunctionType(); |
| 670 | if (!FunTy->getReturnType()->isPointerTy()) |
| 671 | fail(I, |
| 672 | Reason: "llvm.coro.suspend.async resume function projection function must " |
| 673 | "return a ptr type" , |
| 674 | V: F); |
| 675 | if (FunTy->getNumParams() != 1 || !FunTy->getParamType(i: 0)->isPointerTy()) |
| 676 | fail(I, |
| 677 | Reason: "llvm.coro.suspend.async resume function projection function must " |
| 678 | "take one ptr type as parameter" , |
| 679 | V: F); |
| 680 | } |
| 681 | |
| 682 | void CoroSuspendAsyncInst::checkWellFormed() const { |
| 683 | checkAsyncContextProjectFunction(I: this, F: getAsyncContextProjectionFunction()); |
| 684 | } |
| 685 | |
| 686 | void CoroAsyncEndInst::checkWellFormed() const { |
| 687 | auto *MustTailCallFunc = getMustTailCallFunction(); |
| 688 | if (!MustTailCallFunc) |
| 689 | return; |
| 690 | auto *FnTy = MustTailCallFunc->getFunctionType(); |
| 691 | if (FnTy->getNumParams() != (arg_size() - 3)) |
| 692 | fail(I: this, |
| 693 | Reason: "llvm.coro.end.async must tail call function argument type must " |
| 694 | "match the tail arguments" , |
| 695 | V: MustTailCallFunc); |
| 696 | } |
| 697 | |