1//===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This contains code dealing with C++ code generation of coroutines.
10//
11//===----------------------------------------------------------------------===//
12
13#include "CGCleanup.h"
14#include "CGDebugInfo.h"
15#include "CodeGenFunction.h"
16#include "clang/AST/StmtCXX.h"
17#include "clang/AST/StmtVisitor.h"
18#include "llvm/ADT/ScopeExit.h"
19
20using namespace clang;
21using namespace CodeGen;
22
23using llvm::Value;
24using llvm::BasicBlock;
25
26namespace {
27enum class AwaitKind { Init, Normal, Yield, Final };
28static constexpr llvm::StringLiteral AwaitKindStr[] = {"init", "await", "yield",
29 "final"};
30}
31
32struct clang::CodeGen::CGCoroData {
33 // What is the current await expression kind and how many
34 // await/yield expressions were encountered so far.
35 // These are used to generate pretty labels for await expressions in LLVM IR.
36 AwaitKind CurrentAwaitKind = AwaitKind::Init;
37 unsigned AwaitNum = 0;
38 unsigned YieldNum = 0;
39
40 // How many co_return statements are in the coroutine. Used to decide whether
41 // we need to add co_return; equivalent at the end of the user authored body.
42 unsigned CoreturnCount = 0;
43
44 // A branch to this block is emitted when coroutine needs to suspend.
45 llvm::BasicBlock *SuspendBB = nullptr;
46
47 // The promise type's 'unhandled_exception' handler, if it defines one.
48 Stmt *ExceptionHandler = nullptr;
49
50 // A temporary i1 alloca that stores whether 'await_resume' threw an
51 // exception. If it did, 'true' is stored in this variable, and the coroutine
52 // body must be skipped. If the promise type does not define an exception
53 // handler, this is null.
54 llvm::Value *ResumeEHVar = nullptr;
55
56 // Stores the jump destination just before the coroutine memory is freed.
57 // This is the destination that every suspend point jumps to for the cleanup
58 // branch.
59 CodeGenFunction::JumpDest CleanupJD;
60
61 // Stores the jump destination just before the final suspend. The co_return
62 // statements jumps to this point after calling return_xxx promise member.
63 CodeGenFunction::JumpDest FinalJD;
64
65 // Stores the llvm.coro.id emitted in the function so that we can supply it
66 // as the first argument to coro.begin, coro.alloc and coro.free intrinsics.
67 // Note: llvm.coro.id returns a token that cannot be directly expressed in a
68 // builtin.
69 llvm::CallInst *CoroId = nullptr;
70
71 // Stores the llvm.coro.begin emitted in the function so that we can replace
72 // all coro.frame intrinsics with direct SSA value of coro.begin that returns
73 // the address of the coroutine frame of the current coroutine.
74 llvm::CallInst *CoroBegin = nullptr;
75
76 // Stores the last emitted coro.free for the deallocate expressions, we use it
77 // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem).
78 llvm::CallInst *LastCoroFree = nullptr;
79
80 // If coro.id came from the builtin, remember the expression to give better
81 // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by
82 // EmitCoroutineBody.
83 CallExpr const *CoroIdExpr = nullptr;
84};
85
86// Defining these here allows to keep CGCoroData private to this file.
87clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {}
88CodeGenFunction::CGCoroInfo::~CGCoroInfo() {}
89
90static void createCoroData(CodeGenFunction &CGF,
91 CodeGenFunction::CGCoroInfo &CurCoro,
92 llvm::CallInst *CoroId,
93 CallExpr const *CoroIdExpr = nullptr) {
94 if (CurCoro.Data) {
95 if (CurCoro.Data->CoroIdExpr)
96 CGF.CGM.Error(loc: CoroIdExpr->getBeginLoc(),
97 error: "only one __builtin_coro_id can be used in a function");
98 else if (CoroIdExpr)
99 CGF.CGM.Error(loc: CoroIdExpr->getBeginLoc(),
100 error: "__builtin_coro_id shall not be used in a C++ coroutine");
101 else
102 llvm_unreachable("EmitCoroutineBodyStatement called twice?");
103
104 return;
105 }
106
107 CurCoro.Data = std::make_unique<CGCoroData>();
108 CurCoro.Data->CoroId = CoroId;
109 CurCoro.Data->CoroIdExpr = CoroIdExpr;
110}
111
112// Synthesize a pretty name for a suspend point.
113static SmallString<32> buildSuspendPrefixStr(CGCoroData &Coro, AwaitKind Kind) {
114 unsigned No = 0;
115 switch (Kind) {
116 case AwaitKind::Init:
117 case AwaitKind::Final:
118 break;
119 case AwaitKind::Normal:
120 No = ++Coro.AwaitNum;
121 break;
122 case AwaitKind::Yield:
123 No = ++Coro.YieldNum;
124 break;
125 }
126 SmallString<32> Prefix(AwaitKindStr[static_cast<unsigned>(Kind)]);
127 if (No > 1) {
128 Twine(No).toVector(Out&: Prefix);
129 }
130 return Prefix;
131}
132
133// Check if function can throw based on prototype noexcept, also works for
134// destructors which are implicitly noexcept but can be marked noexcept(false).
135static bool FunctionCanThrow(const FunctionDecl *D) {
136 const auto *Proto = D->getType()->getAs<FunctionProtoType>();
137 if (!Proto) {
138 // Function proto is not found, we conservatively assume throwing.
139 return true;
140 }
141 return !isNoexceptExceptionSpec(ESpecType: Proto->getExceptionSpecType()) ||
142 Proto->canThrow() != CT_Cannot;
143}
144
145static bool StmtCanThrow(const Stmt *S) {
146 if (const auto *CE = dyn_cast<CallExpr>(Val: S)) {
147 const auto *Callee = CE->getDirectCallee();
148 if (!Callee)
149 // We don't have direct callee. Conservatively assume throwing.
150 return true;
151
152 if (FunctionCanThrow(D: Callee))
153 return true;
154
155 // Fall through to visit the children.
156 }
157
158 if (const auto *TE = dyn_cast<CXXBindTemporaryExpr>(Val: S)) {
159 // Special handling of CXXBindTemporaryExpr here as calling of Dtor of the
160 // temporary is not part of `children()` as covered in the fall through.
161 // We need to mark entire statement as throwing if the destructor of the
162 // temporary throws.
163 const auto *Dtor = TE->getTemporary()->getDestructor();
164 if (FunctionCanThrow(D: Dtor))
165 return true;
166
167 // Fall through to visit the children.
168 }
169
170 for (const auto *child : S->children())
171 if (StmtCanThrow(S: child))
172 return true;
173
174 return false;
175}
176
177// Emit suspend expression which roughly looks like:
178//
179// auto && x = CommonExpr();
180// if (!x.await_ready()) {
181// llvm_coro_save();
182// llvm_coro_await_suspend(&x, frame, wrapper) (*) (**)
183// llvm_coro_suspend(); (***)
184// }
185// x.await_resume();
186//
187// where the result of the entire expression is the result of x.await_resume()
188//
189// (*) llvm_coro_await_suspend_{void, bool, handle} is lowered to
190// wrapper(&x, frame) when it's certain not to interfere with
191// coroutine transform. await_suspend expression is
192// asynchronous to the coroutine body and not all analyses
193// and transformations can handle it correctly at the moment.
194//
195// Wrapper function encapsulates x.await_suspend(...) call and looks like:
196//
197// auto __await_suspend_wrapper(auto& awaiter, void* frame) {
198// std::coroutine_handle<> handle(frame);
199// return awaiter.await_suspend(handle);
200// }
201//
202// (**) If x.await_suspend return type is bool, it allows to veto a suspend:
203// if (x.await_suspend(...))
204// llvm_coro_suspend();
205//
206// (***) llvm_coro_suspend() encodes three possible continuations as
207// a switch instruction:
208//
209// %where-to = call i8 @llvm.coro.suspend(...)
210// switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend
211// i8 0, label %yield.ready ; go here when resumed
212// i8 1, label %yield.cleanup ; go here when destroyed
213// ]
214//
215// See llvm's docs/Coroutines.rst for more details.
216//
217namespace {
218 struct LValueOrRValue {
219 LValue LV;
220 RValue RV;
221 };
222}
223static LValueOrRValue emitSuspendExpression(CodeGenFunction &CGF, CGCoroData &Coro,
224 CoroutineSuspendExpr const &S,
225 AwaitKind Kind, AggValueSlot aggSlot,
226 bool ignoreResult, bool forLValue) {
227 auto *E = S.getCommonExpr();
228
229 auto CommonBinder =
230 CodeGenFunction::OpaqueValueMappingData::bind(CGF, ov: S.getOpaqueValue(), e: E);
231 auto UnbindCommonOnExit =
232 llvm::make_scope_exit(F: [&] { CommonBinder.unbind(CGF); });
233
234 auto Prefix = buildSuspendPrefixStr(Coro, Kind);
235 BasicBlock *ReadyBlock = CGF.createBasicBlock(name: Prefix + Twine(".ready"));
236 BasicBlock *SuspendBlock = CGF.createBasicBlock(name: Prefix + Twine(".suspend"));
237 BasicBlock *CleanupBlock = CGF.createBasicBlock(name: Prefix + Twine(".cleanup"));
238
239 // If expression is ready, no need to suspend.
240 CGF.EmitBranchOnBoolExpr(Cond: S.getReadyExpr(), TrueBlock: ReadyBlock, FalseBlock: SuspendBlock, TrueCount: 0);
241
242 // Otherwise, emit suspend logic.
243 CGF.EmitBlock(BB: SuspendBlock);
244
245 auto &Builder = CGF.Builder;
246 llvm::Function *CoroSave = CGF.CGM.getIntrinsic(IID: llvm::Intrinsic::coro_save);
247 auto *NullPtr = llvm::ConstantPointerNull::get(T: CGF.CGM.Int8PtrTy);
248 auto *SaveCall = Builder.CreateCall(Callee: CoroSave, Args: {NullPtr});
249
250 auto SuspendWrapper = CodeGenFunction(CGF.CGM).generateAwaitSuspendWrapper(
251 CoroName: CGF.CurFn->getName(), SuspendPointName: Prefix, S);
252
253 CGF.CurCoro.InSuspendBlock = true;
254
255 assert(CGF.CurCoro.Data && CGF.CurCoro.Data->CoroBegin &&
256 "expected to be called in coroutine context");
257
258 SmallVector<llvm::Value *, 3> SuspendIntrinsicCallArgs;
259 SuspendIntrinsicCallArgs.push_back(
260 Elt: CGF.getOrCreateOpaqueLValueMapping(e: S.getOpaqueValue()).getPointer(CGF));
261
262 SuspendIntrinsicCallArgs.push_back(Elt: CGF.CurCoro.Data->CoroBegin);
263 SuspendIntrinsicCallArgs.push_back(Elt: SuspendWrapper);
264
265 const auto SuspendReturnType = S.getSuspendReturnType();
266 llvm::Intrinsic::ID AwaitSuspendIID;
267
268 switch (SuspendReturnType) {
269 case CoroutineSuspendExpr::SuspendReturnType::SuspendVoid:
270 AwaitSuspendIID = llvm::Intrinsic::coro_await_suspend_void;
271 break;
272 case CoroutineSuspendExpr::SuspendReturnType::SuspendBool:
273 AwaitSuspendIID = llvm::Intrinsic::coro_await_suspend_bool;
274 break;
275 case CoroutineSuspendExpr::SuspendReturnType::SuspendHandle:
276 AwaitSuspendIID = llvm::Intrinsic::coro_await_suspend_handle;
277 break;
278 }
279
280 llvm::Function *AwaitSuspendIntrinsic = CGF.CGM.getIntrinsic(IID: AwaitSuspendIID);
281
282 // SuspendHandle might throw since it also resumes the returned handle.
283 const bool AwaitSuspendCanThrow =
284 SuspendReturnType ==
285 CoroutineSuspendExpr::SuspendReturnType::SuspendHandle ||
286 StmtCanThrow(S: S.getSuspendExpr());
287
288 llvm::CallBase *SuspendRet = nullptr;
289 // FIXME: add call attributes?
290 if (AwaitSuspendCanThrow)
291 SuspendRet =
292 CGF.EmitCallOrInvoke(Callee: AwaitSuspendIntrinsic, Args: SuspendIntrinsicCallArgs);
293 else
294 SuspendRet = CGF.EmitNounwindRuntimeCall(callee: AwaitSuspendIntrinsic,
295 args: SuspendIntrinsicCallArgs);
296
297 assert(SuspendRet);
298 CGF.CurCoro.InSuspendBlock = false;
299
300 switch (SuspendReturnType) {
301 case CoroutineSuspendExpr::SuspendReturnType::SuspendVoid:
302 assert(SuspendRet->getType()->isVoidTy());
303 break;
304 case CoroutineSuspendExpr::SuspendReturnType::SuspendBool: {
305 assert(SuspendRet->getType()->isIntegerTy());
306
307 // Veto suspension if requested by bool returning await_suspend.
308 BasicBlock *RealSuspendBlock =
309 CGF.createBasicBlock(name: Prefix + Twine(".suspend.bool"));
310 CGF.Builder.CreateCondBr(Cond: SuspendRet, True: RealSuspendBlock, False: ReadyBlock);
311 CGF.EmitBlock(BB: RealSuspendBlock);
312 break;
313 }
314 case CoroutineSuspendExpr::SuspendReturnType::SuspendHandle: {
315 assert(SuspendRet->getType()->isVoidTy());
316 break;
317 }
318 }
319
320 // Emit the suspend point.
321 const bool IsFinalSuspend = (Kind == AwaitKind::Final);
322 llvm::Function *CoroSuspend =
323 CGF.CGM.getIntrinsic(IID: llvm::Intrinsic::coro_suspend);
324 auto *SuspendResult = Builder.CreateCall(
325 Callee: CoroSuspend, Args: {SaveCall, Builder.getInt1(V: IsFinalSuspend)});
326
327 // Create a switch capturing three possible continuations.
328 auto *Switch = Builder.CreateSwitch(V: SuspendResult, Dest: Coro.SuspendBB, NumCases: 2);
329 Switch->addCase(OnVal: Builder.getInt8(C: 0), Dest: ReadyBlock);
330 Switch->addCase(OnVal: Builder.getInt8(C: 1), Dest: CleanupBlock);
331
332 // Emit cleanup for this suspend point.
333 CGF.EmitBlock(BB: CleanupBlock);
334 CGF.EmitBranchThroughCleanup(Dest: Coro.CleanupJD);
335
336 // Emit await_resume expression.
337 CGF.EmitBlock(BB: ReadyBlock);
338
339 // Exception handling requires additional IR. If the 'await_resume' function
340 // is marked as 'noexcept', we avoid generating this additional IR.
341 CXXTryStmt *TryStmt = nullptr;
342 if (Coro.ExceptionHandler && Kind == AwaitKind::Init &&
343 StmtCanThrow(S: S.getResumeExpr())) {
344 Coro.ResumeEHVar =
345 CGF.CreateTempAlloca(Ty: Builder.getInt1Ty(), Name: Prefix + Twine("resume.eh"));
346 Builder.CreateFlagStore(Value: true, Addr: Coro.ResumeEHVar);
347
348 auto Loc = S.getResumeExpr()->getExprLoc();
349 auto *Catch = new (CGF.getContext())
350 CXXCatchStmt(Loc, /*exDecl=*/nullptr, Coro.ExceptionHandler);
351 auto *TryBody = CompoundStmt::Create(C: CGF.getContext(), Stmts: S.getResumeExpr(),
352 FPFeatures: FPOptionsOverride(), LB: Loc, RB: Loc);
353 TryStmt = CXXTryStmt::Create(C: CGF.getContext(), tryLoc: Loc, tryBlock: TryBody, handlers: Catch);
354 CGF.EnterCXXTryStmt(S: *TryStmt);
355 CGF.EmitStmt(S: TryBody);
356 // We don't use EmitCXXTryStmt here. We need to store to ResumeEHVar that
357 // doesn't exist in the body.
358 Builder.CreateFlagStore(Value: false, Addr: Coro.ResumeEHVar);
359 CGF.ExitCXXTryStmt(S: *TryStmt);
360 LValueOrRValue Res;
361 // We are not supposed to obtain the value from init suspend await_resume().
362 Res.RV = RValue::getIgnored();
363 return Res;
364 }
365
366 LValueOrRValue Res;
367 if (forLValue)
368 Res.LV = CGF.EmitLValue(E: S.getResumeExpr());
369 else
370 Res.RV = CGF.EmitAnyExpr(E: S.getResumeExpr(), aggSlot, ignoreResult);
371
372 return Res;
373}
374
375RValue CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr &E,
376 AggValueSlot aggSlot,
377 bool ignoreResult) {
378 return emitSuspendExpression(CGF&: *this, Coro&: *CurCoro.Data, S: E,
379 Kind: CurCoro.Data->CurrentAwaitKind, aggSlot,
380 ignoreResult, /*forLValue*/false).RV;
381}
382RValue CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr &E,
383 AggValueSlot aggSlot,
384 bool ignoreResult) {
385 return emitSuspendExpression(CGF&: *this, Coro&: *CurCoro.Data, S: E, Kind: AwaitKind::Yield,
386 aggSlot, ignoreResult, /*forLValue*/false).RV;
387}
388
389void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt const &S) {
390 ++CurCoro.Data->CoreturnCount;
391 const Expr *RV = S.getOperand();
392 if (RV && RV->getType()->isVoidType() && !isa<InitListExpr>(Val: RV)) {
393 // Make sure to evaluate the non initlist expression of a co_return
394 // with a void expression for side effects.
395 RunCleanupsScope cleanupScope(*this);
396 EmitIgnoredExpr(E: RV);
397 }
398 EmitStmt(S: S.getPromiseCall());
399 EmitBranchThroughCleanup(Dest: CurCoro.Data->FinalJD);
400}
401
402
403#ifndef NDEBUG
404static QualType getCoroutineSuspendExprReturnType(const ASTContext &Ctx,
405 const CoroutineSuspendExpr *E) {
406 const auto *RE = E->getResumeExpr();
407 // Is it possible for RE to be a CXXBindTemporaryExpr wrapping
408 // a MemberCallExpr?
409 assert(isa<CallExpr>(RE) && "unexpected suspend expression type");
410 return cast<CallExpr>(RE)->getCallReturnType(Ctx);
411}
412#endif
413
414llvm::Function *
415CodeGenFunction::generateAwaitSuspendWrapper(Twine const &CoroName,
416 Twine const &SuspendPointName,
417 CoroutineSuspendExpr const &S) {
418 std::string FuncName =
419 (CoroName + ".__await_suspend_wrapper__" + SuspendPointName).str();
420
421 ASTContext &C = getContext();
422
423 FunctionArgList args;
424
425 ImplicitParamDecl AwaiterDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
426 ImplicitParamDecl FrameDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
427 QualType ReturnTy = S.getSuspendExpr()->getType();
428
429 args.push_back(Elt: &AwaiterDecl);
430 args.push_back(Elt: &FrameDecl);
431
432 const CGFunctionInfo &FI =
433 CGM.getTypes().arrangeBuiltinFunctionDeclaration(resultType: ReturnTy, args);
434
435 llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(Info: FI);
436
437 llvm::Function *Fn = llvm::Function::Create(
438 Ty: LTy, Linkage: llvm::GlobalValue::PrivateLinkage, N: FuncName, M: &CGM.getModule());
439
440 Fn->addParamAttr(ArgNo: 0, Kind: llvm::Attribute::AttrKind::NonNull);
441 Fn->addParamAttr(ArgNo: 0, Kind: llvm::Attribute::AttrKind::NoUndef);
442
443 Fn->addParamAttr(ArgNo: 1, Kind: llvm::Attribute::AttrKind::NoUndef);
444
445 Fn->setMustProgress();
446 Fn->addFnAttr(Kind: llvm::Attribute::AttrKind::AlwaysInline);
447
448 StartFunction(GD: GlobalDecl(), RetTy: ReturnTy, Fn, FnInfo: FI, Args: args);
449
450 // FIXME: add TBAA metadata to the loads
451 llvm::Value *AwaiterPtr = Builder.CreateLoad(Addr: GetAddrOfLocalVar(VD: &AwaiterDecl));
452 auto AwaiterLValue =
453 MakeNaturalAlignAddrLValue(V: AwaiterPtr, T: AwaiterDecl.getType());
454
455 CurAwaitSuspendWrapper.FramePtr =
456 Builder.CreateLoad(Addr: GetAddrOfLocalVar(VD: &FrameDecl));
457
458 auto AwaiterBinder = CodeGenFunction::OpaqueValueMappingData::bind(
459 CGF&: *this, ov: S.getOpaqueValue(), lv: AwaiterLValue);
460
461 auto *SuspendRet = EmitScalarExpr(E: S.getSuspendExpr());
462
463 auto UnbindCommonOnExit =
464 llvm::make_scope_exit(F: [&] { AwaiterBinder.unbind(CGF&: *this); });
465 if (SuspendRet != nullptr) {
466 Fn->addRetAttr(Kind: llvm::Attribute::AttrKind::NoUndef);
467 Builder.CreateStore(Val: SuspendRet, Addr: ReturnValue);
468 }
469
470 CurAwaitSuspendWrapper.FramePtr = nullptr;
471 FinishFunction();
472 return Fn;
473}
474
475LValue
476CodeGenFunction::EmitCoawaitLValue(const CoawaitExpr *E) {
477 assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&
478 "Can't have a scalar return unless the return type is a "
479 "reference type!");
480 return emitSuspendExpression(CGF&: *this, Coro&: *CurCoro.Data, S: *E,
481 Kind: CurCoro.Data->CurrentAwaitKind, aggSlot: AggValueSlot::ignored(),
482 /*ignoreResult*/false, /*forLValue*/true).LV;
483}
484
485LValue
486CodeGenFunction::EmitCoyieldLValue(const CoyieldExpr *E) {
487 assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&
488 "Can't have a scalar return unless the return type is a "
489 "reference type!");
490 return emitSuspendExpression(CGF&: *this, Coro&: *CurCoro.Data, S: *E,
491 Kind: AwaitKind::Yield, aggSlot: AggValueSlot::ignored(),
492 /*ignoreResult*/false, /*forLValue*/true).LV;
493}
494
495// Hunts for the parameter reference in the parameter copy/move declaration.
496namespace {
497struct GetParamRef : public StmtVisitor<GetParamRef> {
498public:
499 DeclRefExpr *Expr = nullptr;
500 GetParamRef() {}
501 void VisitDeclRefExpr(DeclRefExpr *E) {
502 assert(Expr == nullptr && "multilple declref in param move");
503 Expr = E;
504 }
505 void VisitStmt(Stmt *S) {
506 for (auto *C : S->children()) {
507 if (C)
508 Visit(S: C);
509 }
510 }
511};
512}
513
514// This class replaces references to parameters to their copies by changing
515// the addresses in CGF.LocalDeclMap and restoring back the original values in
516// its destructor.
517
518namespace {
519 struct ParamReferenceReplacerRAII {
520 CodeGenFunction::DeclMapTy SavedLocals;
521 CodeGenFunction::DeclMapTy& LocalDeclMap;
522
523 ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy &LocalDeclMap)
524 : LocalDeclMap(LocalDeclMap) {}
525
526 void addCopy(DeclStmt const *PM) {
527 // Figure out what param it refers to.
528
529 assert(PM->isSingleDecl());
530 VarDecl const*VD = static_cast<VarDecl const*>(PM->getSingleDecl());
531 Expr const *InitExpr = VD->getInit();
532 GetParamRef Visitor;
533 Visitor.Visit(S: const_cast<Expr*>(InitExpr));
534 assert(Visitor.Expr);
535 DeclRefExpr *DREOrig = Visitor.Expr;
536 auto *PD = DREOrig->getDecl();
537
538 auto it = LocalDeclMap.find(Val: PD);
539 assert(it != LocalDeclMap.end() && "parameter is not found");
540 SavedLocals.insert(KV: { PD, it->second });
541
542 auto copyIt = LocalDeclMap.find(Val: VD);
543 assert(copyIt != LocalDeclMap.end() && "parameter copy is not found");
544 it->second = copyIt->getSecond();
545 }
546
547 ~ParamReferenceReplacerRAII() {
548 for (auto&& SavedLocal : SavedLocals) {
549 LocalDeclMap.insert(KV: {SavedLocal.first, SavedLocal.second});
550 }
551 }
552 };
553}
554
555// For WinEH exception representation backend needs to know what funclet coro.end
556// belongs to. That information is passed in a funclet bundle.
557static SmallVector<llvm::OperandBundleDef, 1>
558getBundlesForCoroEnd(CodeGenFunction &CGF) {
559 SmallVector<llvm::OperandBundleDef, 1> BundleList;
560
561 if (llvm::Instruction *EHPad = CGF.CurrentFuncletPad)
562 BundleList.emplace_back(Args: "funclet", Args&: EHPad);
563
564 return BundleList;
565}
566
567namespace {
568// We will insert coro.end to cut any of the destructors for objects that
569// do not need to be destroyed once the coroutine is resumed.
570// See llvm/docs/Coroutines.rst for more details about coro.end.
571struct CallCoroEnd final : public EHScopeStack::Cleanup {
572 void Emit(CodeGenFunction &CGF, Flags flags) override {
573 auto &CGM = CGF.CGM;
574 auto *NullPtr = llvm::ConstantPointerNull::get(T: CGF.Int8PtrTy);
575 llvm::Function *CoroEndFn = CGM.getIntrinsic(IID: llvm::Intrinsic::coro_end);
576 // See if we have a funclet bundle to associate coro.end with. (WinEH)
577 auto Bundles = getBundlesForCoroEnd(CGF);
578 auto *CoroEnd =
579 CGF.Builder.CreateCall(Callee: CoroEndFn,
580 Args: {NullPtr, CGF.Builder.getTrue(),
581 llvm::ConstantTokenNone::get(Context&: CoroEndFn->getContext())},
582 OpBundles: Bundles);
583 if (Bundles.empty()) {
584 // Otherwise, (landingpad model), create a conditional branch that leads
585 // either to a cleanup block or a block with EH resume instruction.
586 auto *ResumeBB = CGF.getEHResumeBlock(/*isCleanup=*/true);
587 auto *CleanupContBB = CGF.createBasicBlock(name: "cleanup.cont");
588 CGF.Builder.CreateCondBr(Cond: CoroEnd, True: ResumeBB, False: CleanupContBB);
589 CGF.EmitBlock(BB: CleanupContBB);
590 }
591 }
592};
593}
594
595namespace {
596// Make sure to call coro.delete on scope exit.
597struct CallCoroDelete final : public EHScopeStack::Cleanup {
598 Stmt *Deallocate;
599
600 // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;"
601
602 // Note: That deallocation will be emitted twice: once for a normal exit and
603 // once for exceptional exit. This usage is safe because Deallocate does not
604 // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr()
605 // builds a single call to a deallocation function which is safe to emit
606 // multiple times.
607 void Emit(CodeGenFunction &CGF, Flags) override {
608 // Remember the current point, as we are going to emit deallocation code
609 // first to get to coro.free instruction that is an argument to a delete
610 // call.
611 BasicBlock *SaveInsertBlock = CGF.Builder.GetInsertBlock();
612
613 auto *FreeBB = CGF.createBasicBlock(name: "coro.free");
614 CGF.EmitBlock(BB: FreeBB);
615 CGF.EmitStmt(S: Deallocate);
616
617 auto *AfterFreeBB = CGF.createBasicBlock(name: "after.coro.free");
618 CGF.EmitBlock(BB: AfterFreeBB);
619
620 // We should have captured coro.free from the emission of deallocate.
621 auto *CoroFree = CGF.CurCoro.Data->LastCoroFree;
622 if (!CoroFree) {
623 CGF.CGM.Error(loc: Deallocate->getBeginLoc(),
624 error: "Deallocation expressoin does not refer to coro.free");
625 return;
626 }
627
628 // Get back to the block we were originally and move coro.free there.
629 auto *InsertPt = SaveInsertBlock->getTerminator();
630 CoroFree->moveBefore(InsertPos: InsertPt->getIterator());
631 CGF.Builder.SetInsertPoint(InsertPt);
632
633 // Add if (auto *mem = coro.free) Deallocate;
634 auto *NullPtr = llvm::ConstantPointerNull::get(T: CGF.Int8PtrTy);
635 auto *Cond = CGF.Builder.CreateICmpNE(LHS: CoroFree, RHS: NullPtr);
636 CGF.Builder.CreateCondBr(Cond, True: FreeBB, False: AfterFreeBB);
637
638 // No longer need old terminator.
639 InsertPt->eraseFromParent();
640 CGF.Builder.SetInsertPoint(AfterFreeBB);
641 }
642 explicit CallCoroDelete(Stmt *DeallocStmt) : Deallocate(DeallocStmt) {}
643};
644}
645
646namespace {
647struct GetReturnObjectManager {
648 CodeGenFunction &CGF;
649 CGBuilderTy &Builder;
650 const CoroutineBodyStmt &S;
651 // When true, performs RVO for the return object.
652 bool DirectEmit = false;
653
654 Address GroActiveFlag;
655 CodeGenFunction::AutoVarEmission GroEmission;
656
657 GetReturnObjectManager(CodeGenFunction &CGF, const CoroutineBodyStmt &S)
658 : CGF(CGF), Builder(CGF.Builder), S(S), GroActiveFlag(Address::invalid()),
659 GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {
660 // The call to get_­return_­object is sequenced before the call to
661 // initial_­suspend and is invoked at most once, but there are caveats
662 // regarding on whether the prvalue result object may be initialized
663 // directly/eager or delayed, depending on the types involved.
664 //
665 // More info at https://github.com/cplusplus/papers/issues/1414
666 //
667 // The general cases:
668 // 1. Same type of get_return_object and coroutine return type (direct
669 // emission):
670 // - Constructed in the return slot.
671 // 2. Different types (delayed emission):
672 // - Constructed temporary object prior to initial suspend initialized with
673 // a call to get_return_object()
674 // - When coroutine needs to to return to the caller and needs to construct
675 // return value for the coroutine it is initialized with expiring value of
676 // the temporary obtained above.
677 //
678 // Direct emission for void returning coroutines or GROs.
679 DirectEmit = [&]() {
680 auto *RVI = S.getReturnValueInit();
681 assert(RVI && "expected RVI");
682 auto GroType = RVI->getType();
683 return CGF.getContext().hasSameType(T1: GroType, T2: CGF.FnRetTy);
684 }();
685 }
686
687 // The gro variable has to outlive coroutine frame and coroutine promise, but,
688 // it can only be initialized after coroutine promise was created, thus, we
689 // split its emission in two parts. EmitGroAlloca emits an alloca and sets up
690 // cleanups. Later when coroutine promise is available we initialize the gro
691 // and sets the flag that the cleanup is now active.
692 void EmitGroAlloca() {
693 if (DirectEmit)
694 return;
695
696 auto *GroDeclStmt = dyn_cast_or_null<DeclStmt>(Val: S.getResultDecl());
697 if (!GroDeclStmt) {
698 // If get_return_object returns void, no need to do an alloca.
699 return;
700 }
701
702 auto *GroVarDecl = cast<VarDecl>(Val: GroDeclStmt->getSingleDecl());
703
704 // Set GRO flag that it is not initialized yet
705 GroActiveFlag = CGF.CreateTempAlloca(Ty: Builder.getInt1Ty(), align: CharUnits::One(),
706 Name: "gro.active");
707 Builder.CreateStore(Val: Builder.getFalse(), Addr: GroActiveFlag);
708
709 GroEmission = CGF.EmitAutoVarAlloca(var: *GroVarDecl);
710 auto *GroAlloca = dyn_cast_or_null<llvm::AllocaInst>(
711 Val: GroEmission.getOriginalAllocatedAddress().getPointer());
712 assert(GroAlloca && "expected alloca to be emitted");
713 GroAlloca->setMetadata(KindID: llvm::LLVMContext::MD_coro_outside_frame,
714 Node: llvm::MDNode::get(Context&: CGF.CGM.getLLVMContext(), MDs: {}));
715
716 // Remember the top of EHStack before emitting the cleanup.
717 auto old_top = CGF.EHStack.stable_begin();
718 CGF.EmitAutoVarCleanups(emission: GroEmission);
719 auto top = CGF.EHStack.stable_begin();
720
721 // Make the cleanup conditional on gro.active
722 for (auto b = CGF.EHStack.find(sp: top), e = CGF.EHStack.find(sp: old_top); b != e;
723 b++) {
724 if (auto *Cleanup = dyn_cast<EHCleanupScope>(Val: &*b)) {
725 assert(!Cleanup->hasActiveFlag() && "cleanup already has active flag?");
726 Cleanup->setActiveFlag(GroActiveFlag);
727 Cleanup->setTestFlagInEHCleanup();
728 Cleanup->setTestFlagInNormalCleanup();
729 }
730 }
731 }
732
733 void EmitGroInit() {
734 if (DirectEmit) {
735 // ReturnValue should be valid as long as the coroutine's return type
736 // is not void. The assertion could help us to reduce the check later.
737 assert(CGF.ReturnValue.isValid() == (bool)S.getReturnStmt());
738 // Now we have the promise, initialize the GRO.
739 // We need to emit `get_return_object` first. According to:
740 // [dcl.fct.def.coroutine]p7
741 // The call to get_return_­object is sequenced before the call to
742 // initial_suspend and is invoked at most once.
743 //
744 // So we couldn't emit return value when we emit return statment,
745 // otherwise the call to get_return_object wouldn't be in front
746 // of initial_suspend.
747 if (CGF.ReturnValue.isValid()) {
748 CGF.EmitAnyExprToMem(E: S.getReturnValue(), Location: CGF.ReturnValue,
749 Quals: S.getReturnValue()->getType().getQualifiers(),
750 /*IsInit*/ IsInitializer: true);
751 }
752 return;
753 }
754
755 if (!GroActiveFlag.isValid()) {
756 // No Gro variable was allocated. Simply emit the call to
757 // get_return_object.
758 CGF.EmitStmt(S: S.getResultDecl());
759 return;
760 }
761
762 CGF.EmitAutoVarInit(emission: GroEmission);
763 Builder.CreateStore(Val: Builder.getTrue(), Addr: GroActiveFlag);
764 }
765};
766} // namespace
767
768static void emitBodyAndFallthrough(CodeGenFunction &CGF,
769 const CoroutineBodyStmt &S, Stmt *Body) {
770 CGF.EmitStmt(S: Body);
771 const bool CanFallthrough = CGF.Builder.GetInsertBlock();
772 if (CanFallthrough)
773 if (Stmt *OnFallthrough = S.getFallthroughHandler())
774 CGF.EmitStmt(S: OnFallthrough);
775}
776
777void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt &S) {
778 auto *NullPtr = llvm::ConstantPointerNull::get(T: Builder.getPtrTy());
779 auto &TI = CGM.getContext().getTargetInfo();
780 unsigned NewAlign = TI.getNewAlign() / TI.getCharWidth();
781
782 auto *EntryBB = Builder.GetInsertBlock();
783 auto *AllocBB = createBasicBlock(name: "coro.alloc");
784 auto *InitBB = createBasicBlock(name: "coro.init");
785 auto *FinalBB = createBasicBlock(name: "coro.final");
786 auto *RetBB = createBasicBlock(name: "coro.ret");
787
788 auto *CoroId = Builder.CreateCall(
789 Callee: CGM.getIntrinsic(IID: llvm::Intrinsic::coro_id),
790 Args: {Builder.getInt32(C: NewAlign), NullPtr, NullPtr, NullPtr});
791 createCoroData(CGF&: *this, CurCoro, CoroId);
792 CurCoro.Data->SuspendBB = RetBB;
793 assert(ShouldEmitLifetimeMarkers &&
794 "Must emit lifetime intrinsics for coroutines");
795
796 // Backend is allowed to elide memory allocations, to help it, emit
797 // auto mem = coro.alloc() ? 0 : ... allocation code ...;
798 auto *CoroAlloc = Builder.CreateCall(
799 Callee: CGM.getIntrinsic(IID: llvm::Intrinsic::coro_alloc), Args: {CoroId});
800
801 Builder.CreateCondBr(Cond: CoroAlloc, True: AllocBB, False: InitBB);
802
803 EmitBlock(BB: AllocBB);
804 auto *AllocateCall = EmitScalarExpr(E: S.getAllocate());
805 auto *AllocOrInvokeContBB = Builder.GetInsertBlock();
806
807 // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided.
808 if (auto *RetOnAllocFailure = S.getReturnStmtOnAllocFailure()) {
809 auto *RetOnFailureBB = createBasicBlock(name: "coro.ret.on.failure");
810
811 // See if allocation was successful.
812 auto *NullPtr = llvm::ConstantPointerNull::get(T: Int8PtrTy);
813 auto *Cond = Builder.CreateICmpNE(LHS: AllocateCall, RHS: NullPtr);
814 // Expect the allocation to be successful.
815 emitCondLikelihoodViaExpectIntrinsic(Cond, LH: Stmt::LH_Likely);
816 Builder.CreateCondBr(Cond, True: InitBB, False: RetOnFailureBB);
817
818 // If not, return OnAllocFailure object.
819 EmitBlock(BB: RetOnFailureBB);
820 EmitStmt(S: RetOnAllocFailure);
821 }
822 else {
823 Builder.CreateBr(Dest: InitBB);
824 }
825
826 EmitBlock(BB: InitBB);
827
828 // Pass the result of the allocation to coro.begin.
829 auto *Phi = Builder.CreatePHI(Ty: VoidPtrTy, NumReservedValues: 2);
830 Phi->addIncoming(V: NullPtr, BB: EntryBB);
831 Phi->addIncoming(V: AllocateCall, BB: AllocOrInvokeContBB);
832 auto *CoroBegin = Builder.CreateCall(
833 Callee: CGM.getIntrinsic(IID: llvm::Intrinsic::coro_begin), Args: {CoroId, Phi});
834 CurCoro.Data->CoroBegin = CoroBegin;
835
836 GetReturnObjectManager GroManager(*this, S);
837 GroManager.EmitGroAlloca();
838
839 CurCoro.Data->CleanupJD = getJumpDestInCurrentScope(Target: RetBB);
840 {
841 CGDebugInfo *DI = getDebugInfo();
842 ParamReferenceReplacerRAII ParamReplacer(LocalDeclMap);
843 CodeGenFunction::RunCleanupsScope ResumeScope(*this);
844 EHStack.pushCleanup<CallCoroDelete>(Kind: NormalAndEHCleanup, A: S.getDeallocate());
845
846 // Create mapping between parameters and copy-params for coroutine function.
847 llvm::ArrayRef<const Stmt *> ParamMoves = S.getParamMoves();
848 assert(
849 (ParamMoves.size() == 0 || (ParamMoves.size() == FnArgs.size())) &&
850 "ParamMoves and FnArgs should be the same size for coroutine function");
851 if (ParamMoves.size() == FnArgs.size() && DI)
852 for (const auto Pair : llvm::zip(t&: FnArgs, u&: ParamMoves))
853 DI->getCoroutineParameterMappings().insert(
854 KV: {std::get<0>(t: Pair), std::get<1>(t: Pair)});
855
856 // Create parameter copies. We do it before creating a promise, since an
857 // evolution of coroutine TS may allow promise constructor to observe
858 // parameter copies.
859 for (const ParmVarDecl *Parm : FnArgs) {
860 // If the original param is in an alloca, exclude it from the coroutine
861 // frame. The parameter copy will be part of the frame, but the original
862 // parameter memory should remain on the stack. This is necessary to
863 // ensure that parameters destroyed in callees, as with `trivial_abi` or
864 // in the MSVC C++ ABI, are appropriately destroyed after setting up the
865 // coroutine.
866 Address ParmAddr = GetAddrOfLocalVar(VD: Parm);
867 if (auto *ParmAlloca =
868 dyn_cast<llvm::AllocaInst>(Val: ParmAddr.getBasePointer())) {
869 ParmAlloca->setMetadata(KindID: llvm::LLVMContext::MD_coro_outside_frame,
870 Node: llvm::MDNode::get(Context&: CGM.getLLVMContext(), MDs: {}));
871 }
872 }
873 for (auto *PM : S.getParamMoves()) {
874 EmitStmt(S: PM);
875 ParamReplacer.addCopy(PM: cast<DeclStmt>(Val: PM));
876 // TODO: if(CoroParam(...)) need to surround ctor and dtor
877 // for the copy, so that llvm can elide it if the copy is
878 // not needed.
879 }
880
881 EmitStmt(S: S.getPromiseDeclStmt());
882
883 Address PromiseAddr = GetAddrOfLocalVar(VD: S.getPromiseDecl());
884 auto *PromiseAddrVoidPtr =
885 new llvm::BitCastInst(PromiseAddr.emitRawPointer(CGF&: *this), VoidPtrTy, "",
886 CoroId->getIterator());
887 // Update CoroId to refer to the promise. We could not do it earlier because
888 // promise local variable was not emitted yet.
889 CoroId->setArgOperand(i: 1, v: PromiseAddrVoidPtr);
890
891 // Now we have the promise, initialize the GRO
892 GroManager.EmitGroInit();
893
894 EHStack.pushCleanup<CallCoroEnd>(Kind: EHCleanup);
895
896 CurCoro.Data->CurrentAwaitKind = AwaitKind::Init;
897 CurCoro.Data->ExceptionHandler = S.getExceptionHandler();
898 EmitStmt(S: S.getInitSuspendStmt());
899 CurCoro.Data->FinalJD = getJumpDestInCurrentScope(Target: FinalBB);
900
901 CurCoro.Data->CurrentAwaitKind = AwaitKind::Normal;
902
903 if (CurCoro.Data->ExceptionHandler) {
904 // If we generated IR to record whether an exception was thrown from
905 // 'await_resume', then use that IR to determine whether the coroutine
906 // body should be skipped.
907 // If we didn't generate the IR (perhaps because 'await_resume' was marked
908 // as 'noexcept'), then we skip this check.
909 BasicBlock *ContBB = nullptr;
910 if (CurCoro.Data->ResumeEHVar) {
911 BasicBlock *BodyBB = createBasicBlock(name: "coro.resumed.body");
912 ContBB = createBasicBlock(name: "coro.resumed.cont");
913 Value *SkipBody = Builder.CreateFlagLoad(Addr: CurCoro.Data->ResumeEHVar,
914 Name: "coro.resumed.eh");
915 Builder.CreateCondBr(Cond: SkipBody, True: ContBB, False: BodyBB);
916 EmitBlock(BB: BodyBB);
917 }
918
919 auto Loc = S.getBeginLoc();
920 CXXCatchStmt Catch(Loc, /*exDecl=*/nullptr,
921 CurCoro.Data->ExceptionHandler);
922 auto *TryStmt =
923 CXXTryStmt::Create(C: getContext(), tryLoc: Loc, tryBlock: S.getBody(), handlers: &Catch);
924
925 EnterCXXTryStmt(S: *TryStmt);
926 emitBodyAndFallthrough(CGF&: *this, S, Body: TryStmt->getTryBlock());
927 ExitCXXTryStmt(S: *TryStmt);
928
929 if (ContBB)
930 EmitBlock(BB: ContBB);
931 }
932 else {
933 emitBodyAndFallthrough(CGF&: *this, S, Body: S.getBody());
934 }
935
936 // See if we need to generate final suspend.
937 const bool CanFallthrough = Builder.GetInsertBlock();
938 const bool HasCoreturns = CurCoro.Data->CoreturnCount > 0;
939 if (CanFallthrough || HasCoreturns) {
940 EmitBlock(BB: FinalBB);
941 CurCoro.Data->CurrentAwaitKind = AwaitKind::Final;
942 EmitStmt(S: S.getFinalSuspendStmt());
943 } else {
944 // We don't need FinalBB. Emit it to make sure the block is deleted.
945 EmitBlock(BB: FinalBB, /*IsFinished=*/true);
946 }
947 }
948
949 EmitBlock(BB: RetBB);
950 // Emit coro.end before getReturnStmt (and parameter destructors), since
951 // resume and destroy parts of the coroutine should not include them.
952 llvm::Function *CoroEnd = CGM.getIntrinsic(IID: llvm::Intrinsic::coro_end);
953 Builder.CreateCall(Callee: CoroEnd,
954 Args: {NullPtr, Builder.getFalse(),
955 llvm::ConstantTokenNone::get(Context&: CoroEnd->getContext())});
956
957 if (Stmt *Ret = S.getReturnStmt()) {
958 // Since we already emitted the return value above, so we shouldn't
959 // emit it again here.
960 Expr *PreviousRetValue = nullptr;
961 if (GroManager.DirectEmit) {
962 PreviousRetValue = cast<ReturnStmt>(Val: Ret)->getRetValue();
963 cast<ReturnStmt>(Val: Ret)->setRetValue(nullptr);
964 }
965 EmitStmt(S: Ret);
966 // Set the return value back. The code generator, as the AST **Consumer**,
967 // shouldn't change the AST.
968 if (PreviousRetValue)
969 cast<ReturnStmt>(Val: Ret)->setRetValue(PreviousRetValue);
970 }
971
972 // LLVM require the frontend to mark the coroutine.
973 CurFn->setPresplitCoroutine();
974
975 if (CXXRecordDecl *RD = FnRetTy->getAsCXXRecordDecl();
976 RD && RD->hasAttr<CoroOnlyDestroyWhenCompleteAttr>())
977 CurFn->setCoroDestroyOnlyWhenComplete();
978}
979
980// Emit coroutine intrinsic and patch up arguments of the token type.
981RValue CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr *E,
982 unsigned int IID) {
983 SmallVector<llvm::Value *, 8> Args;
984 switch (IID) {
985 default:
986 break;
987 // The coro.frame builtin is replaced with an SSA value of the coro.begin
988 // intrinsic.
989 case llvm::Intrinsic::coro_frame: {
990 if (CurCoro.Data && CurCoro.Data->CoroBegin) {
991 return RValue::get(V: CurCoro.Data->CoroBegin);
992 }
993
994 if (CurAwaitSuspendWrapper.FramePtr) {
995 return RValue::get(V: CurAwaitSuspendWrapper.FramePtr);
996 }
997
998 CGM.Error(loc: E->getBeginLoc(), error: "this builtin expect that __builtin_coro_begin "
999 "has been used earlier in this function");
1000 auto *NullPtr = llvm::ConstantPointerNull::get(T: Builder.getPtrTy());
1001 return RValue::get(V: NullPtr);
1002 }
1003 case llvm::Intrinsic::coro_size: {
1004 auto &Context = getContext();
1005 CanQualType SizeTy = Context.getSizeType();
1006 llvm::IntegerType *T = Builder.getIntNTy(N: Context.getTypeSize(T: SizeTy));
1007 llvm::Function *F = CGM.getIntrinsic(IID: llvm::Intrinsic::coro_size, Tys: T);
1008 return RValue::get(V: Builder.CreateCall(Callee: F));
1009 }
1010 case llvm::Intrinsic::coro_align: {
1011 auto &Context = getContext();
1012 CanQualType SizeTy = Context.getSizeType();
1013 llvm::IntegerType *T = Builder.getIntNTy(N: Context.getTypeSize(T: SizeTy));
1014 llvm::Function *F = CGM.getIntrinsic(IID: llvm::Intrinsic::coro_align, Tys: T);
1015 return RValue::get(V: Builder.CreateCall(Callee: F));
1016 }
1017 // The following three intrinsics take a token parameter referring to a token
1018 // returned by earlier call to @llvm.coro.id. Since we cannot represent it in
1019 // builtins, we patch it up here.
1020 case llvm::Intrinsic::coro_alloc:
1021 case llvm::Intrinsic::coro_begin:
1022 case llvm::Intrinsic::coro_free: {
1023 if (CurCoro.Data && CurCoro.Data->CoroId) {
1024 Args.push_back(Elt: CurCoro.Data->CoroId);
1025 break;
1026 }
1027 CGM.Error(loc: E->getBeginLoc(), error: "this builtin expect that __builtin_coro_id has"
1028 " been used earlier in this function");
1029 // Fallthrough to the next case to add TokenNone as the first argument.
1030 [[fallthrough]];
1031 }
1032 // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first
1033 // argument.
1034 case llvm::Intrinsic::coro_suspend:
1035 Args.push_back(Elt: llvm::ConstantTokenNone::get(Context&: getLLVMContext()));
1036 break;
1037 }
1038 for (const Expr *Arg : E->arguments())
1039 Args.push_back(Elt: EmitScalarExpr(E: Arg));
1040 // @llvm.coro.end takes a token parameter. Add token 'none' as the last
1041 // argument.
1042 if (IID == llvm::Intrinsic::coro_end)
1043 Args.push_back(Elt: llvm::ConstantTokenNone::get(Context&: getLLVMContext()));
1044
1045 llvm::Function *F = CGM.getIntrinsic(IID);
1046 llvm::CallInst *Call = Builder.CreateCall(Callee: F, Args);
1047
1048 // Note: The following code is to enable to emit coro.id and coro.begin by
1049 // hand to experiment with coroutines in C.
1050 // If we see @llvm.coro.id remember it in the CoroData. We will update
1051 // coro.alloc, coro.begin and coro.free intrinsics to refer to it.
1052 if (IID == llvm::Intrinsic::coro_id) {
1053 createCoroData(CGF&: *this, CurCoro, CoroId: Call, CoroIdExpr: E);
1054 }
1055 else if (IID == llvm::Intrinsic::coro_begin) {
1056 if (CurCoro.Data)
1057 CurCoro.Data->CoroBegin = Call;
1058 }
1059 else if (IID == llvm::Intrinsic::coro_free) {
1060 // Remember the last coro_free as we need it to build the conditional
1061 // deletion of the coroutine frame.
1062 if (CurCoro.Data)
1063 CurCoro.Data->LastCoroFree = Call;
1064 }
1065 return RValue::get(V: Call);
1066}
1067