clang 20.0.0git
ItaniumCXXABI.cpp
Go to the documentation of this file.
1//===------- ItaniumCXXABI.cpp - Emit LLVM Code from ASTs for a Module ----===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This provides C++ code generation targeting the Itanium C++ ABI. The class
10// in this file generates structures that follow the Itanium C++ ABI, which is
11// documented at:
12// https://itanium-cxx-abi.github.io/cxx-abi/abi.html
13// https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html
14//
15// It also supports the closely-related ARM ABI, documented at:
16// https://developer.arm.com/documentation/ihi0041/g/
17//
18//===----------------------------------------------------------------------===//
19
20#include "CGCXXABI.h"
21#include "CGCleanup.h"
22#include "CGRecordLayout.h"
23#include "CGVTables.h"
24#include "CodeGenFunction.h"
25#include "CodeGenModule.h"
26#include "TargetInfo.h"
27#include "clang/AST/Attr.h"
28#include "clang/AST/Mangle.h"
29#include "clang/AST/StmtCXX.h"
30#include "clang/AST/Type.h"
32#include "llvm/IR/DataLayout.h"
33#include "llvm/IR/GlobalValue.h"
34#include "llvm/IR/Instructions.h"
35#include "llvm/IR/Intrinsics.h"
36#include "llvm/IR/Value.h"
37#include "llvm/Support/ScopedPrinter.h"
38
39#include <optional>
40
41using namespace clang;
42using namespace CodeGen;
43
44namespace {
45class ItaniumCXXABI : public CodeGen::CGCXXABI {
46 /// VTables - All the vtables which have been defined.
47 llvm::DenseMap<const CXXRecordDecl *, llvm::GlobalVariable *> VTables;
48
49 /// All the thread wrapper functions that have been used.
51 ThreadWrappers;
52
53protected:
54 bool UseARMMethodPtrABI;
55 bool UseARMGuardVarABI;
56 bool Use32BitVTableOffsetABI;
57
59 return cast<ItaniumMangleContext>(CodeGen::CGCXXABI::getMangleContext());
60 }
61
62public:
63 ItaniumCXXABI(CodeGen::CodeGenModule &CGM,
64 bool UseARMMethodPtrABI = false,
65 bool UseARMGuardVarABI = false) :
66 CGCXXABI(CGM), UseARMMethodPtrABI(UseARMMethodPtrABI),
67 UseARMGuardVarABI(UseARMGuardVarABI),
68 Use32BitVTableOffsetABI(false) { }
69
70 bool classifyReturnType(CGFunctionInfo &FI) const override;
71
72 RecordArgABI getRecordArgABI(const CXXRecordDecl *RD) const override {
73 // If C++ prohibits us from making a copy, pass by address.
74 if (!RD->canPassInRegisters())
75 return RAA_Indirect;
76 return RAA_Default;
77 }
78
79 bool isThisCompleteObject(GlobalDecl GD) const override {
80 // The Itanium ABI has separate complete-object vs. base-object
81 // variants of both constructors and destructors.
82 if (isa<CXXDestructorDecl>(GD.getDecl())) {
83 switch (GD.getDtorType()) {
84 case Dtor_Complete:
85 case Dtor_Deleting:
86 return true;
87
88 case Dtor_Base:
89 return false;
90
91 case Dtor_Comdat:
92 llvm_unreachable("emitting dtor comdat as function?");
93 }
94 llvm_unreachable("bad dtor kind");
95 }
96 if (isa<CXXConstructorDecl>(GD.getDecl())) {
97 switch (GD.getCtorType()) {
98 case Ctor_Complete:
99 return true;
100
101 case Ctor_Base:
102 return false;
103
106 llvm_unreachable("closure ctors in Itanium ABI?");
107
108 case Ctor_Comdat:
109 llvm_unreachable("emitting ctor comdat as function?");
110 }
111 llvm_unreachable("bad dtor kind");
112 }
113
114 // No other kinds.
115 return false;
116 }
117
118 bool isZeroInitializable(const MemberPointerType *MPT) override;
119
120 llvm::Type *ConvertMemberPointerType(const MemberPointerType *MPT) override;
121
124 const Expr *E,
125 Address This,
126 llvm::Value *&ThisPtrForCall,
127 llvm::Value *MemFnPtr,
128 const MemberPointerType *MPT) override;
129
130 llvm::Value *
133 llvm::Value *MemPtr,
134 const MemberPointerType *MPT) override;
135
137 const CastExpr *E,
138 llvm::Value *Src) override;
139 llvm::Constant *EmitMemberPointerConversion(const CastExpr *E,
140 llvm::Constant *Src) override;
141
142 llvm::Constant *EmitNullMemberPointer(const MemberPointerType *MPT) override;
143
144 llvm::Constant *EmitMemberFunctionPointer(const CXXMethodDecl *MD) override;
145 llvm::Constant *EmitMemberDataPointer(const MemberPointerType *MPT,
146 CharUnits offset) override;
147 llvm::Constant *EmitMemberPointer(const APValue &MP, QualType MPT) override;
148 llvm::Constant *BuildMemberPointer(const CXXMethodDecl *MD,
150
152 llvm::Value *L, llvm::Value *R,
153 const MemberPointerType *MPT,
154 bool Inequality) override;
155
157 llvm::Value *Addr,
158 const MemberPointerType *MPT) override;
159
161 Address Ptr, QualType ElementType,
162 const CXXDestructorDecl *Dtor) override;
163
164 void emitRethrow(CodeGenFunction &CGF, bool isNoReturn) override;
165 void emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) override;
166
167 void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override;
168
169 llvm::CallInst *
171 llvm::Value *Exn) override;
172
173 void EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD);
174 llvm::Constant *getAddrOfRTTIDescriptor(QualType Ty) override;
177 QualType CatchHandlerType) override {
179 }
180
181 bool shouldTypeidBeNullChecked(QualType SrcRecordTy) override;
182 void EmitBadTypeidCall(CodeGenFunction &CGF) override;
183 llvm::Value *EmitTypeid(CodeGenFunction &CGF, QualType SrcRecordTy,
184 Address ThisPtr,
185 llvm::Type *StdTypeInfoPtrTy) override;
186
187 bool shouldDynamicCastCallBeNullChecked(bool SrcIsPtr,
188 QualType SrcRecordTy) override;
189
190 /// Determine whether we know that all instances of type RecordTy will have
191 /// the same vtable pointer values, that is distinct from all other vtable
192 /// pointers. While this is required by the Itanium ABI, it doesn't happen in
193 /// practice in some cases due to language extensions.
194 bool hasUniqueVTablePointer(QualType RecordTy) {
195 const CXXRecordDecl *RD = RecordTy->getAsCXXRecordDecl();
196
197 // Under -fapple-kext, multiple definitions of the same vtable may be
198 // emitted.
199 if (!CGM.getCodeGenOpts().AssumeUniqueVTables ||
200 getContext().getLangOpts().AppleKext)
201 return false;
202
203 // If the type_info* would be null, the vtable might be merged with that of
204 // another type.
205 if (!CGM.shouldEmitRTTI())
206 return false;
207
208 // If there's only one definition of the vtable in the program, it has a
209 // unique address.
210 if (!llvm::GlobalValue::isWeakForLinker(CGM.getVTableLinkage(RD)))
211 return true;
212
213 // Even if there are multiple definitions of the vtable, they are required
214 // by the ABI to use the same symbol name, so should be merged at load
215 // time. However, if the class has hidden visibility, there can be
216 // different versions of the class in different modules, and the ABI
217 // library might treat them as being the same.
218 if (CGM.GetLLVMVisibility(RD->getVisibility()) !=
219 llvm::GlobalValue::DefaultVisibility)
220 return false;
221
222 return true;
223 }
224
225 bool shouldEmitExactDynamicCast(QualType DestRecordTy) override {
226 return hasUniqueVTablePointer(DestRecordTy);
227 }
228
230 QualType SrcRecordTy, QualType DestTy,
231 QualType DestRecordTy,
232 llvm::BasicBlock *CastEnd) override;
233
234 llvm::Value *emitExactDynamicCast(CodeGenFunction &CGF, Address ThisAddr,
235 QualType SrcRecordTy, QualType DestTy,
236 QualType DestRecordTy,
237 llvm::BasicBlock *CastSuccess,
238 llvm::BasicBlock *CastFail) override;
239
241 QualType SrcRecordTy) override;
242
243 bool EmitBadCastCall(CodeGenFunction &CGF) override;
244
245 llvm::Value *
247 const CXXRecordDecl *ClassDecl,
248 const CXXRecordDecl *BaseClassDecl) override;
249
250 void EmitCXXConstructors(const CXXConstructorDecl *D) override;
251
252 AddedStructorArgCounts
254 SmallVectorImpl<CanQualType> &ArgTys) override;
255
257 CXXDtorType DT) const override {
258 // Itanium does not emit any destructor variant as an inline thunk.
259 // Delegating may occur as an optimization, but all variants are either
260 // emitted with external linkage or as linkonce if they are inline and used.
261 return false;
262 }
263
264 void EmitCXXDestructors(const CXXDestructorDecl *D) override;
265
267 FunctionArgList &Params) override;
268
270
271 AddedStructorArgs getImplicitConstructorArgs(CodeGenFunction &CGF,
272 const CXXConstructorDecl *D,
274 bool ForVirtualBase,
275 bool Delegating) override;
276
278 const CXXDestructorDecl *DD,
280 bool ForVirtualBase,
281 bool Delegating) override;
282
284 CXXDtorType Type, bool ForVirtualBase,
285 bool Delegating, Address This,
286 QualType ThisTy) override;
287
289 const CXXRecordDecl *RD) override;
290
292 CodeGenFunction::VPtr Vptr) override;
293
294 bool doStructorsInitializeVPtrs(const CXXRecordDecl *VTableClass) override {
295 return true;
296 }
297
298 llvm::Constant *
300 const CXXRecordDecl *VTableClass) override;
301
303 CodeGenFunction &CGF, const CXXRecordDecl *VTableClass,
304 BaseSubobject Base, const CXXRecordDecl *NearestVBase) override;
305
306 llvm::Value *getVTableAddressPointInStructorWithVTT(
307 CodeGenFunction &CGF, const CXXRecordDecl *VTableClass,
308 BaseSubobject Base, const CXXRecordDecl *NearestVBase);
309
310 llvm::GlobalVariable *getAddrOfVTable(const CXXRecordDecl *RD,
311 CharUnits VPtrOffset) override;
312
314 Address This, llvm::Type *Ty,
315 SourceLocation Loc) override;
316
317 llvm::Value *
319 CXXDtorType DtorType, Address This,
320 DeleteOrMemberCallExpr E,
321 llvm::CallBase **CallOrInvoke) override;
322
323 void emitVirtualInheritanceTables(const CXXRecordDecl *RD) override;
324
325 bool canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const override;
326 bool canSpeculativelyEmitVTableAsBaseClass(const CXXRecordDecl *RD) const;
327
328 void setThunkLinkage(llvm::Function *Thunk, bool ForVTable, GlobalDecl GD,
329 bool ReturnAdjustment) override {
330 // Allow inlining of thunks by emitting them with available_externally
331 // linkage together with vtables when needed.
332 if (ForVTable && !Thunk->hasLocalLinkage())
333 Thunk->setLinkage(llvm::GlobalValue::AvailableExternallyLinkage);
334 CGM.setGVProperties(Thunk, GD);
335 }
336
337 bool exportThunk() override { return true; }
338
339 llvm::Value *performThisAdjustment(CodeGenFunction &CGF, Address This,
340 const CXXRecordDecl *UnadjustedThisClass,
341 const ThunkInfo &TI) override;
342
343 llvm::Value *performReturnAdjustment(CodeGenFunction &CGF, Address Ret,
344 const CXXRecordDecl *UnadjustedRetClass,
345 const ReturnAdjustment &RA) override;
346
348 FunctionArgList &Args) const override {
349 assert(!Args.empty() && "expected the arglist to not be empty!");
350 return Args.size() - 1;
351 }
352
353 StringRef GetPureVirtualCallName() override { return "__cxa_pure_virtual"; }
354 StringRef GetDeletedVirtualCallName() override
355 { return "__cxa_deleted_virtual"; }
356
357 CharUnits getArrayCookieSizeImpl(QualType elementType) override;
359 Address NewPtr,
360 llvm::Value *NumElements,
361 const CXXNewExpr *expr,
362 QualType ElementType) override;
363 llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF,
364 Address allocPtr,
365 CharUnits cookieSize) override;
366
367 void EmitGuardedInit(CodeGenFunction &CGF, const VarDecl &D,
368 llvm::GlobalVariable *DeclPtr,
369 bool PerformInit) override;
371 llvm::FunctionCallee dtor,
372 llvm::Constant *addr) override;
373
374 llvm::Function *getOrCreateThreadLocalWrapper(const VarDecl *VD,
375 llvm::Value *Val);
377 CodeGenModule &CGM,
378 ArrayRef<const VarDecl *> CXXThreadLocals,
379 ArrayRef<llvm::Function *> CXXThreadLocalInits,
380 ArrayRef<const VarDecl *> CXXThreadLocalInitVars) override;
381
382 bool usesThreadWrapperFunction(const VarDecl *VD) const override {
385 }
387 QualType LValType) override;
388
389 bool NeedsVTTParameter(GlobalDecl GD) override;
390
391 llvm::Constant *
392 getOrCreateVirtualFunctionPointerThunk(const CXXMethodDecl *MD);
393
394 /**************************** RTTI Uniqueness ******************************/
395
396protected:
397 /// Returns true if the ABI requires RTTI type_info objects to be unique
398 /// across a program.
399 virtual bool shouldRTTIBeUnique() const { return true; }
400
401public:
402 /// What sort of unique-RTTI behavior should we use?
403 enum RTTIUniquenessKind {
404 /// We are guaranteeing, or need to guarantee, that the RTTI string
405 /// is unique.
406 RUK_Unique,
407
408 /// We are not guaranteeing uniqueness for the RTTI string, so we
409 /// can demote to hidden visibility but must use string comparisons.
410 RUK_NonUniqueHidden,
411
412 /// We are not guaranteeing uniqueness for the RTTI string, so we
413 /// have to use string comparisons, but we also have to emit it with
414 /// non-hidden visibility.
415 RUK_NonUniqueVisible
416 };
417
418 /// Return the required visibility status for the given type and linkage in
419 /// the current ABI.
420 RTTIUniquenessKind
421 classifyRTTIUniqueness(QualType CanTy,
422 llvm::GlobalValue::LinkageTypes Linkage) const;
423 friend class ItaniumRTTIBuilder;
424
425 void emitCXXStructor(GlobalDecl GD) override;
426
427 std::pair<llvm::Value *, const CXXRecordDecl *>
429 const CXXRecordDecl *RD) override;
430
431 private:
432 llvm::Constant *
433 getSignedVirtualMemberFunctionPointer(const CXXMethodDecl *MD);
434
435 bool hasAnyUnusedVirtualInlineFunction(const CXXRecordDecl *RD) const {
436 const auto &VtableLayout =
437 CGM.getItaniumVTableContext().getVTableLayout(RD);
438
439 for (const auto &VtableComponent : VtableLayout.vtable_components()) {
440 // Skip empty slot.
441 if (!VtableComponent.isUsedFunctionPointerKind())
442 continue;
443
444 const CXXMethodDecl *Method = VtableComponent.getFunctionDecl();
445 const FunctionDecl *FD = Method->getDefinition();
446 const bool IsInlined =
447 Method->getCanonicalDecl()->isInlined() || (FD && FD->isInlined());
448 if (!IsInlined)
449 continue;
450
451 StringRef Name = CGM.getMangledName(VtableComponent.getGlobalDecl());
452 auto *Entry = CGM.GetGlobalValue(Name);
453 // This checks if virtual inline function has already been emitted.
454 // Note that it is possible that this inline function would be emitted
455 // after trying to emit vtable speculatively. Because of this we do
456 // an extra pass after emitting all deferred vtables to find and emit
457 // these vtables opportunistically.
458 if (!Entry || Entry->isDeclaration())
459 return true;
460 }
461 return false;
462 }
463
464 bool isVTableHidden(const CXXRecordDecl *RD) const {
465 const auto &VtableLayout =
466 CGM.getItaniumVTableContext().getVTableLayout(RD);
467
468 for (const auto &VtableComponent : VtableLayout.vtable_components()) {
469 if (VtableComponent.isRTTIKind()) {
470 const CXXRecordDecl *RTTIDecl = VtableComponent.getRTTIDecl();
471 if (RTTIDecl->getVisibility() == Visibility::HiddenVisibility)
472 return true;
473 } else if (VtableComponent.isUsedFunctionPointerKind()) {
474 const CXXMethodDecl *Method = VtableComponent.getFunctionDecl();
475 if (Method->getVisibility() == Visibility::HiddenVisibility &&
476 !Method->isDefined())
477 return true;
478 }
479 }
480 return false;
481 }
482};
483
484class ARMCXXABI : public ItaniumCXXABI {
485public:
486 ARMCXXABI(CodeGen::CodeGenModule &CGM) :
487 ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
488 /*UseARMGuardVarABI=*/true) {}
489
490 bool constructorsAndDestructorsReturnThis() const override { return true; }
491
492 void EmitReturnFromThunk(CodeGenFunction &CGF, RValue RV,
493 QualType ResTy) override;
494
495 CharUnits getArrayCookieSizeImpl(QualType elementType) override;
496 Address InitializeArrayCookie(CodeGenFunction &CGF,
497 Address NewPtr,
498 llvm::Value *NumElements,
499 const CXXNewExpr *expr,
500 QualType ElementType) override;
501 llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr,
502 CharUnits cookieSize) override;
503};
504
505class AppleARM64CXXABI : public ARMCXXABI {
506public:
507 AppleARM64CXXABI(CodeGen::CodeGenModule &CGM) : ARMCXXABI(CGM) {
508 Use32BitVTableOffsetABI = true;
509 }
510
511 // ARM64 libraries are prepared for non-unique RTTI.
512 bool shouldRTTIBeUnique() const override { return false; }
513};
514
515class FuchsiaCXXABI final : public ItaniumCXXABI {
516public:
517 explicit FuchsiaCXXABI(CodeGen::CodeGenModule &CGM)
518 : ItaniumCXXABI(CGM) {}
519
520private:
521 bool constructorsAndDestructorsReturnThis() const override { return true; }
522};
523
524class WebAssemblyCXXABI final : public ItaniumCXXABI {
525public:
526 explicit WebAssemblyCXXABI(CodeGen::CodeGenModule &CGM)
527 : ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
528 /*UseARMGuardVarABI=*/true) {}
529 void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override;
530 llvm::CallInst *
531 emitTerminateForUnexpectedException(CodeGenFunction &CGF,
532 llvm::Value *Exn) override;
533
534private:
535 bool constructorsAndDestructorsReturnThis() const override { return true; }
536 bool canCallMismatchedFunctionType() const override { return false; }
537};
538
539class XLCXXABI final : public ItaniumCXXABI {
540public:
541 explicit XLCXXABI(CodeGen::CodeGenModule &CGM)
542 : ItaniumCXXABI(CGM) {}
543
544 void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
545 llvm::FunctionCallee dtor,
546 llvm::Constant *addr) override;
547
548 bool useSinitAndSterm() const override { return true; }
549
550private:
551 void emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub,
552 llvm::Constant *addr);
553};
554}
555
557 switch (CGM.getContext().getCXXABIKind()) {
558 // For IR-generation purposes, there's no significant difference
559 // between the ARM and iOS ABIs.
560 case TargetCXXABI::GenericARM:
561 case TargetCXXABI::iOS:
562 case TargetCXXABI::WatchOS:
563 return new ARMCXXABI(CGM);
564
565 case TargetCXXABI::AppleARM64:
566 return new AppleARM64CXXABI(CGM);
567
568 case TargetCXXABI::Fuchsia:
569 return new FuchsiaCXXABI(CGM);
570
571 // Note that AArch64 uses the generic ItaniumCXXABI class since it doesn't
572 // include the other 32-bit ARM oddities: constructor/destructor return values
573 // and array cookies.
574 case TargetCXXABI::GenericAArch64:
575 return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
576 /*UseARMGuardVarABI=*/true);
577
578 case TargetCXXABI::GenericMIPS:
579 return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true);
580
581 case TargetCXXABI::WebAssembly:
582 return new WebAssemblyCXXABI(CGM);
583
584 case TargetCXXABI::XL:
585 return new XLCXXABI(CGM);
586
587 case TargetCXXABI::GenericItanium:
588 return new ItaniumCXXABI(CGM);
589
590 case TargetCXXABI::Microsoft:
591 llvm_unreachable("Microsoft ABI is not Itanium-based");
592 }
593 llvm_unreachable("bad ABI kind");
594}
595
596llvm::Type *
597ItaniumCXXABI::ConvertMemberPointerType(const MemberPointerType *MPT) {
598 if (MPT->isMemberDataPointer())
599 return CGM.PtrDiffTy;
600 return llvm::StructType::get(CGM.PtrDiffTy, CGM.PtrDiffTy);
601}
602
603/// In the Itanium and ARM ABIs, method pointers have the form:
604/// struct { ptrdiff_t ptr; ptrdiff_t adj; } memptr;
605///
606/// In the Itanium ABI:
607/// - method pointers are virtual if (memptr.ptr & 1) is nonzero
608/// - the this-adjustment is (memptr.adj)
609/// - the virtual offset is (memptr.ptr - 1)
610///
611/// In the ARM ABI:
612/// - method pointers are virtual if (memptr.adj & 1) is nonzero
613/// - the this-adjustment is (memptr.adj >> 1)
614/// - the virtual offset is (memptr.ptr)
615/// ARM uses 'adj' for the virtual flag because Thumb functions
616/// may be only single-byte aligned.
617///
618/// If the member is virtual, the adjusted 'this' pointer points
619/// to a vtable pointer from which the virtual offset is applied.
620///
621/// If the member is non-virtual, memptr.ptr is the address of
622/// the function to call.
623CGCallee ItaniumCXXABI::EmitLoadOfMemberFunctionPointer(
624 CodeGenFunction &CGF, const Expr *E, Address ThisAddr,
625 llvm::Value *&ThisPtrForCall,
626 llvm::Value *MemFnPtr, const MemberPointerType *MPT) {
627 CGBuilderTy &Builder = CGF.Builder;
628
629 const FunctionProtoType *FPT =
631 auto *RD =
632 cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl());
633
634 llvm::Constant *ptrdiff_1 = llvm::ConstantInt::get(CGM.PtrDiffTy, 1);
635
636 llvm::BasicBlock *FnVirtual = CGF.createBasicBlock("memptr.virtual");
637 llvm::BasicBlock *FnNonVirtual = CGF.createBasicBlock("memptr.nonvirtual");
638 llvm::BasicBlock *FnEnd = CGF.createBasicBlock("memptr.end");
639
640 // Extract memptr.adj, which is in the second field.
641 llvm::Value *RawAdj = Builder.CreateExtractValue(MemFnPtr, 1, "memptr.adj");
642
643 // Compute the true adjustment.
644 llvm::Value *Adj = RawAdj;
645 if (UseARMMethodPtrABI)
646 Adj = Builder.CreateAShr(Adj, ptrdiff_1, "memptr.adj.shifted");
647
648 // Apply the adjustment and cast back to the original struct type
649 // for consistency.
650 llvm::Value *This = ThisAddr.emitRawPointer(CGF);
651 This = Builder.CreateInBoundsGEP(Builder.getInt8Ty(), This, Adj);
652 ThisPtrForCall = This;
653
654 // Load the function pointer.
655 llvm::Value *FnAsInt = Builder.CreateExtractValue(MemFnPtr, 0, "memptr.ptr");
656
657 // If the LSB in the function pointer is 1, the function pointer points to
658 // a virtual function.
659 llvm::Value *IsVirtual;
660 if (UseARMMethodPtrABI)
661 IsVirtual = Builder.CreateAnd(RawAdj, ptrdiff_1);
662 else
663 IsVirtual = Builder.CreateAnd(FnAsInt, ptrdiff_1);
664 IsVirtual = Builder.CreateIsNotNull(IsVirtual, "memptr.isvirtual");
665 Builder.CreateCondBr(IsVirtual, FnVirtual, FnNonVirtual);
666
667 // In the virtual path, the adjustment left 'This' pointing to the
668 // vtable of the correct base subobject. The "function pointer" is an
669 // offset within the vtable (+1 for the virtual flag on non-ARM).
670 CGF.EmitBlock(FnVirtual);
671
672 // Cast the adjusted this to a pointer to vtable pointer and load.
673 llvm::Type *VTableTy = CGF.CGM.GlobalsInt8PtrTy;
674 CharUnits VTablePtrAlign =
675 CGF.CGM.getDynamicOffsetAlignment(ThisAddr.getAlignment(), RD,
676 CGF.getPointerAlign());
677 llvm::Value *VTable = CGF.GetVTablePtr(
678 Address(This, ThisAddr.getElementType(), VTablePtrAlign), VTableTy, RD);
679
680 // Apply the offset.
681 // On ARM64, to reserve extra space in virtual member function pointers,
682 // we only pay attention to the low 32 bits of the offset.
683 llvm::Value *VTableOffset = FnAsInt;
684 if (!UseARMMethodPtrABI)
685 VTableOffset = Builder.CreateSub(VTableOffset, ptrdiff_1);
686 if (Use32BitVTableOffsetABI) {
687 VTableOffset = Builder.CreateTrunc(VTableOffset, CGF.Int32Ty);
688 VTableOffset = Builder.CreateZExt(VTableOffset, CGM.PtrDiffTy);
689 }
690
691 // Check the address of the function pointer if CFI on member function
692 // pointers is enabled.
693 llvm::Constant *CheckSourceLocation;
694 llvm::Constant *CheckTypeDesc;
695 bool ShouldEmitCFICheck = CGF.SanOpts.has(SanitizerKind::CFIMFCall) &&
696 CGM.HasHiddenLTOVisibility(RD);
697 bool ShouldEmitVFEInfo = CGM.getCodeGenOpts().VirtualFunctionElimination &&
698 CGM.HasHiddenLTOVisibility(RD);
699 bool ShouldEmitWPDInfo =
700 CGM.getCodeGenOpts().WholeProgramVTables &&
701 // Don't insert type tests if we are forcing public visibility.
702 !CGM.AlwaysHasLTOVisibilityPublic(RD);
703 llvm::Value *VirtualFn = nullptr;
704
705 {
706 CodeGenFunction::SanitizerScope SanScope(&CGF);
707 llvm::Value *TypeId = nullptr;
708 llvm::Value *CheckResult = nullptr;
709
710 if (ShouldEmitCFICheck || ShouldEmitVFEInfo || ShouldEmitWPDInfo) {
711 // If doing CFI, VFE or WPD, we will need the metadata node to check
712 // against.
713 llvm::Metadata *MD =
714 CGM.CreateMetadataIdentifierForVirtualMemPtrType(QualType(MPT, 0));
715 TypeId = llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD);
716 }
717
718 if (ShouldEmitVFEInfo) {
719 llvm::Value *VFPAddr =
720 Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
721
722 // If doing VFE, load from the vtable with a type.checked.load intrinsic
723 // call. Note that we use the GEP to calculate the address to load from
724 // and pass 0 as the offset to the intrinsic. This is because every
725 // vtable slot of the correct type is marked with matching metadata, and
726 // we know that the load must be from one of these slots.
727 llvm::Value *CheckedLoad = Builder.CreateCall(
728 CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
729 {VFPAddr, llvm::ConstantInt::get(CGM.Int32Ty, 0), TypeId});
730 CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
731 VirtualFn = Builder.CreateExtractValue(CheckedLoad, 0);
732 } else {
733 // When not doing VFE, emit a normal load, as it allows more
734 // optimisations than type.checked.load.
735 if (ShouldEmitCFICheck || ShouldEmitWPDInfo) {
736 llvm::Value *VFPAddr =
737 Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
738 llvm::Intrinsic::ID IID = CGM.HasHiddenLTOVisibility(RD)
739 ? llvm::Intrinsic::type_test
740 : llvm::Intrinsic::public_type_test;
741
742 CheckResult =
743 Builder.CreateCall(CGM.getIntrinsic(IID), {VFPAddr, TypeId});
744 }
745
746 if (CGM.getItaniumVTableContext().isRelativeLayout()) {
747 VirtualFn = CGF.Builder.CreateCall(
748 CGM.getIntrinsic(llvm::Intrinsic::load_relative,
749 {VTableOffset->getType()}),
750 {VTable, VTableOffset});
751 } else {
752 llvm::Value *VFPAddr =
753 CGF.Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
754 VirtualFn = CGF.Builder.CreateAlignedLoad(CGF.UnqualPtrTy, VFPAddr,
755 CGF.getPointerAlign(),
756 "memptr.virtualfn");
757 }
758 }
759 assert(VirtualFn && "Virtual fuction pointer not created!");
760 assert((!ShouldEmitCFICheck || !ShouldEmitVFEInfo || !ShouldEmitWPDInfo ||
761 CheckResult) &&
762 "Check result required but not created!");
763
764 if (ShouldEmitCFICheck) {
765 // If doing CFI, emit the check.
766 CheckSourceLocation = CGF.EmitCheckSourceLocation(E->getBeginLoc());
767 CheckTypeDesc = CGF.EmitCheckTypeDescriptor(QualType(MPT, 0));
768 llvm::Constant *StaticData[] = {
769 llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_VMFCall),
770 CheckSourceLocation,
771 CheckTypeDesc,
772 };
773
774 if (CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIMFCall)) {
775 CGF.EmitTrapCheck(CheckResult, SanitizerHandler::CFICheckFail);
776 } else {
777 llvm::Value *AllVtables = llvm::MetadataAsValue::get(
778 CGM.getLLVMContext(),
779 llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
780 llvm::Value *ValidVtable = Builder.CreateCall(
781 CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables});
782 CGF.EmitCheck(std::make_pair(CheckResult, SanitizerKind::SO_CFIMFCall),
783 SanitizerHandler::CFICheckFail, StaticData,
784 {VTable, ValidVtable});
785 }
786
787 FnVirtual = Builder.GetInsertBlock();
788 }
789 } // End of sanitizer scope
790
791 CGF.EmitBranch(FnEnd);
792
793 // In the non-virtual path, the function pointer is actually a
794 // function pointer.
795 CGF.EmitBlock(FnNonVirtual);
796 llvm::Value *NonVirtualFn =
797 Builder.CreateIntToPtr(FnAsInt, CGF.UnqualPtrTy, "memptr.nonvirtualfn");
798
799 // Check the function pointer if CFI on member function pointers is enabled.
800 if (ShouldEmitCFICheck) {
802 if (RD->hasDefinition()) {
803 CodeGenFunction::SanitizerScope SanScope(&CGF);
804
805 llvm::Constant *StaticData[] = {
806 llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_NVMFCall),
807 CheckSourceLocation,
808 CheckTypeDesc,
809 };
810
811 llvm::Value *Bit = Builder.getFalse();
812 for (const CXXRecordDecl *Base : CGM.getMostBaseClasses(RD)) {
813 llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(
814 getContext().getMemberPointerType(
815 MPT->getPointeeType(),
816 getContext().getRecordType(Base).getTypePtr()));
817 llvm::Value *TypeId =
818 llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD);
819
820 llvm::Value *TypeTest =
821 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
822 {NonVirtualFn, TypeId});
823 Bit = Builder.CreateOr(Bit, TypeTest);
824 }
825
826 CGF.EmitCheck(std::make_pair(Bit, SanitizerKind::SO_CFIMFCall),
827 SanitizerHandler::CFICheckFail, StaticData,
828 {NonVirtualFn, llvm::UndefValue::get(CGF.IntPtrTy)});
829
830 FnNonVirtual = Builder.GetInsertBlock();
831 }
832 }
833
834 // We're done.
835 CGF.EmitBlock(FnEnd);
836 llvm::PHINode *CalleePtr = Builder.CreatePHI(CGF.UnqualPtrTy, 2);
837 CalleePtr->addIncoming(VirtualFn, FnVirtual);
838 CalleePtr->addIncoming(NonVirtualFn, FnNonVirtual);
839
840 CGPointerAuthInfo PointerAuth;
841
842 if (const auto &Schema =
843 CGM.getCodeGenOpts().PointerAuth.CXXMemberFunctionPointers) {
844 llvm::PHINode *DiscriminatorPHI = Builder.CreatePHI(CGF.IntPtrTy, 2);
845 DiscriminatorPHI->addIncoming(llvm::ConstantInt::get(CGF.IntPtrTy, 0),
846 FnVirtual);
847 const auto &AuthInfo =
848 CGM.getMemberFunctionPointerAuthInfo(QualType(MPT, 0));
849 assert(Schema.getKey() == AuthInfo.getKey() &&
850 "Keys for virtual and non-virtual member functions must match");
851 auto *NonVirtualDiscriminator = AuthInfo.getDiscriminator();
852 DiscriminatorPHI->addIncoming(NonVirtualDiscriminator, FnNonVirtual);
853 PointerAuth = CGPointerAuthInfo(
854 Schema.getKey(), Schema.getAuthenticationMode(), Schema.isIsaPointer(),
855 Schema.authenticatesNullValues(), DiscriminatorPHI);
856 }
857
858 CGCallee Callee(FPT, CalleePtr, PointerAuth);
859 return Callee;
860}
861
862/// Compute an l-value by applying the given pointer-to-member to a
863/// base object.
864llvm::Value *ItaniumCXXABI::EmitMemberDataPointerAddress(
865 CodeGenFunction &CGF, const Expr *E, Address Base, llvm::Value *MemPtr,
866 const MemberPointerType *MPT) {
867 assert(MemPtr->getType() == CGM.PtrDiffTy);
868
869 CGBuilderTy &Builder = CGF.Builder;
870
871 // Apply the offset, which we assume is non-null.
872 return Builder.CreateInBoundsGEP(CGF.Int8Ty, Base.emitRawPointer(CGF), MemPtr,
873 "memptr.offset");
874}
875
876// See if it's possible to return a constant signed pointer.
877static llvm::Constant *pointerAuthResignConstant(
878 llvm::Value *Ptr, const CGPointerAuthInfo &CurAuthInfo,
879 const CGPointerAuthInfo &NewAuthInfo, CodeGenModule &CGM) {
880 const auto *CPA = dyn_cast<llvm::ConstantPtrAuth>(Ptr);
881
882 if (!CPA)
883 return nullptr;
884
885 assert(CPA->getKey()->getZExtValue() == CurAuthInfo.getKey() &&
886 CPA->getAddrDiscriminator()->isZeroValue() &&
887 CPA->getDiscriminator() == CurAuthInfo.getDiscriminator() &&
888 "unexpected key or discriminators");
889
890 return CGM.getConstantSignedPointer(
891 CPA->getPointer(), NewAuthInfo.getKey(), nullptr,
892 cast<llvm::ConstantInt>(NewAuthInfo.getDiscriminator()));
893}
894
895/// Perform a bitcast, derived-to-base, or base-to-derived member pointer
896/// conversion.
897///
898/// Bitcast conversions are always a no-op under Itanium.
899///
900/// Obligatory offset/adjustment diagram:
901/// <-- offset --> <-- adjustment -->
902/// |--------------------------|----------------------|--------------------|
903/// ^Derived address point ^Base address point ^Member address point
904///
905/// So when converting a base member pointer to a derived member pointer,
906/// we add the offset to the adjustment because the address point has
907/// decreased; and conversely, when converting a derived MP to a base MP
908/// we subtract the offset from the adjustment because the address point
909/// has increased.
910///
911/// The standard forbids (at compile time) conversion to and from
912/// virtual bases, which is why we don't have to consider them here.
913///
914/// The standard forbids (at run time) casting a derived MP to a base
915/// MP when the derived MP does not point to a member of the base.
916/// This is why -1 is a reasonable choice for null data member
917/// pointers.
918llvm::Value *
919ItaniumCXXABI::EmitMemberPointerConversion(CodeGenFunction &CGF,
920 const CastExpr *E,
921 llvm::Value *src) {
922 // Use constant emission if we can.
923 if (isa<llvm::Constant>(src))
924 return EmitMemberPointerConversion(E, cast<llvm::Constant>(src));
925
926 assert(E->getCastKind() == CK_DerivedToBaseMemberPointer ||
927 E->getCastKind() == CK_BaseToDerivedMemberPointer ||
928 E->getCastKind() == CK_ReinterpretMemberPointer);
929
930 CGBuilderTy &Builder = CGF.Builder;
931 QualType DstType = E->getType();
932
933 if (DstType->isMemberFunctionPointerType()) {
934 if (const auto &NewAuthInfo =
935 CGM.getMemberFunctionPointerAuthInfo(DstType)) {
936 QualType SrcType = E->getSubExpr()->getType();
937 assert(SrcType->isMemberFunctionPointerType());
938 const auto &CurAuthInfo = CGM.getMemberFunctionPointerAuthInfo(SrcType);
939 llvm::Value *MemFnPtr = Builder.CreateExtractValue(src, 0, "memptr.ptr");
940 llvm::Type *OrigTy = MemFnPtr->getType();
941
942 llvm::BasicBlock *StartBB = Builder.GetInsertBlock();
943 llvm::BasicBlock *ResignBB = CGF.createBasicBlock("resign");
944 llvm::BasicBlock *MergeBB = CGF.createBasicBlock("merge");
945
946 // Check whether we have a virtual offset or a pointer to a function.
947 assert(UseARMMethodPtrABI && "ARM ABI expected");
948 llvm::Value *Adj = Builder.CreateExtractValue(src, 1, "memptr.adj");
949 llvm::Constant *Ptrdiff_1 = llvm::ConstantInt::get(CGM.PtrDiffTy, 1);
950 llvm::Value *AndVal = Builder.CreateAnd(Adj, Ptrdiff_1);
951 llvm::Value *IsVirtualOffset =
952 Builder.CreateIsNotNull(AndVal, "is.virtual.offset");
953 Builder.CreateCondBr(IsVirtualOffset, MergeBB, ResignBB);
954
955 CGF.EmitBlock(ResignBB);
956 llvm::Type *PtrTy = llvm::PointerType::getUnqual(CGM.Int8Ty);
957 MemFnPtr = Builder.CreateIntToPtr(MemFnPtr, PtrTy);
958 MemFnPtr =
959 CGF.emitPointerAuthResign(MemFnPtr, SrcType, CurAuthInfo, NewAuthInfo,
960 isa<llvm::Constant>(src));
961 MemFnPtr = Builder.CreatePtrToInt(MemFnPtr, OrigTy);
962 llvm::Value *ResignedVal = Builder.CreateInsertValue(src, MemFnPtr, 0);
963 ResignBB = Builder.GetInsertBlock();
964
965 CGF.EmitBlock(MergeBB);
966 llvm::PHINode *NewSrc = Builder.CreatePHI(src->getType(), 2);
967 NewSrc->addIncoming(src, StartBB);
968 NewSrc->addIncoming(ResignedVal, ResignBB);
969 src = NewSrc;
970 }
971 }
972
973 // Under Itanium, reinterprets don't require any additional processing.
974 if (E->getCastKind() == CK_ReinterpretMemberPointer) return src;
975
976 llvm::Constant *adj = getMemberPointerAdjustment(E);
977 if (!adj) return src;
978
979 bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer);
980
981 const MemberPointerType *destTy =
983
984 // For member data pointers, this is just a matter of adding the
985 // offset if the source is non-null.
986 if (destTy->isMemberDataPointer()) {
987 llvm::Value *dst;
988 if (isDerivedToBase)
989 dst = Builder.CreateNSWSub(src, adj, "adj");
990 else
991 dst = Builder.CreateNSWAdd(src, adj, "adj");
992
993 // Null check.
994 llvm::Value *null = llvm::Constant::getAllOnesValue(src->getType());
995 llvm::Value *isNull = Builder.CreateICmpEQ(src, null, "memptr.isnull");
996 return Builder.CreateSelect(isNull, src, dst);
997 }
998
999 // The this-adjustment is left-shifted by 1 on ARM.
1000 if (UseARMMethodPtrABI) {
1001 uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue();
1002 offset <<= 1;
1003 adj = llvm::ConstantInt::get(adj->getType(), offset);
1004 }
1005
1006 llvm::Value *srcAdj = Builder.CreateExtractValue(src, 1, "src.adj");
1007 llvm::Value *dstAdj;
1008 if (isDerivedToBase)
1009 dstAdj = Builder.CreateNSWSub(srcAdj, adj, "adj");
1010 else
1011 dstAdj = Builder.CreateNSWAdd(srcAdj, adj, "adj");
1012
1013 return Builder.CreateInsertValue(src, dstAdj, 1);
1014}
1015
1016static llvm::Constant *
1018 QualType SrcType, CodeGenModule &CGM) {
1019 assert(DestType->isMemberFunctionPointerType() &&
1020 SrcType->isMemberFunctionPointerType() &&
1021 "member function pointers expected");
1022 if (DestType == SrcType)
1023 return Src;
1024
1025 const auto &NewAuthInfo = CGM.getMemberFunctionPointerAuthInfo(DestType);
1026 const auto &CurAuthInfo = CGM.getMemberFunctionPointerAuthInfo(SrcType);
1027
1028 if (!NewAuthInfo && !CurAuthInfo)
1029 return Src;
1030
1031 llvm::Constant *MemFnPtr = Src->getAggregateElement(0u);
1032 if (MemFnPtr->getNumOperands() == 0) {
1033 // src must be a pair of null pointers.
1034 assert(isa<llvm::ConstantInt>(MemFnPtr) && "constant int expected");
1035 return Src;
1036 }
1037
1038 llvm::Constant *ConstPtr = pointerAuthResignConstant(
1039 cast<llvm::User>(MemFnPtr)->getOperand(0), CurAuthInfo, NewAuthInfo, CGM);
1040 ConstPtr = llvm::ConstantExpr::getPtrToInt(ConstPtr, MemFnPtr->getType());
1041 return ConstantFoldInsertValueInstruction(Src, ConstPtr, 0);
1042}
1043
1044llvm::Constant *
1045ItaniumCXXABI::EmitMemberPointerConversion(const CastExpr *E,
1046 llvm::Constant *src) {
1047 assert(E->getCastKind() == CK_DerivedToBaseMemberPointer ||
1048 E->getCastKind() == CK_BaseToDerivedMemberPointer ||
1049 E->getCastKind() == CK_ReinterpretMemberPointer);
1050
1051 QualType DstType = E->getType();
1052
1053 if (DstType->isMemberFunctionPointerType())
1055 src, DstType, E->getSubExpr()->getType(), CGM);
1056
1057 // Under Itanium, reinterprets don't require any additional processing.
1058 if (E->getCastKind() == CK_ReinterpretMemberPointer) return src;
1059
1060 // If the adjustment is trivial, we don't need to do anything.
1061 llvm::Constant *adj = getMemberPointerAdjustment(E);
1062 if (!adj) return src;
1063
1064 bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer);
1065
1066 const MemberPointerType *destTy =
1068
1069 // For member data pointers, this is just a matter of adding the
1070 // offset if the source is non-null.
1071 if (destTy->isMemberDataPointer()) {
1072 // null maps to null.
1073 if (src->isAllOnesValue()) return src;
1074
1075 if (isDerivedToBase)
1076 return llvm::ConstantExpr::getNSWSub(src, adj);
1077 else
1078 return llvm::ConstantExpr::getNSWAdd(src, adj);
1079 }
1080
1081 // The this-adjustment is left-shifted by 1 on ARM.
1082 if (UseARMMethodPtrABI) {
1083 uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue();
1084 offset <<= 1;
1085 adj = llvm::ConstantInt::get(adj->getType(), offset);
1086 }
1087
1088 llvm::Constant *srcAdj = src->getAggregateElement(1);
1089 llvm::Constant *dstAdj;
1090 if (isDerivedToBase)
1091 dstAdj = llvm::ConstantExpr::getNSWSub(srcAdj, adj);
1092 else
1093 dstAdj = llvm::ConstantExpr::getNSWAdd(srcAdj, adj);
1094
1095 llvm::Constant *res = ConstantFoldInsertValueInstruction(src, dstAdj, 1);
1096 assert(res != nullptr && "Folding must succeed");
1097 return res;
1098}
1099
1100llvm::Constant *
1101ItaniumCXXABI::EmitNullMemberPointer(const MemberPointerType *MPT) {
1102 // Itanium C++ ABI 2.3:
1103 // A NULL pointer is represented as -1.
1104 if (MPT->isMemberDataPointer())
1105 return llvm::ConstantInt::get(CGM.PtrDiffTy, -1ULL, /*isSigned=*/true);
1106
1107 llvm::Constant *Zero = llvm::ConstantInt::get(CGM.PtrDiffTy, 0);
1108 llvm::Constant *Values[2] = { Zero, Zero };
1109 return llvm::ConstantStruct::getAnon(Values);
1110}
1111
1112llvm::Constant *
1113ItaniumCXXABI::EmitMemberDataPointer(const MemberPointerType *MPT,
1114 CharUnits offset) {
1115 // Itanium C++ ABI 2.3:
1116 // A pointer to data member is an offset from the base address of
1117 // the class object containing it, represented as a ptrdiff_t
1118 return llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity());
1119}
1120
1121llvm::Constant *
1122ItaniumCXXABI::EmitMemberFunctionPointer(const CXXMethodDecl *MD) {
1123 return BuildMemberPointer(MD, CharUnits::Zero());
1124}
1125
1126llvm::Constant *ItaniumCXXABI::BuildMemberPointer(const CXXMethodDecl *MD,
1128 assert(MD->isInstance() && "Member function must not be static!");
1129
1130 CodeGenTypes &Types = CGM.getTypes();
1131
1132 // Get the function pointer (or index if this is a virtual function).
1133 llvm::Constant *MemPtr[2];
1134 if (MD->isVirtual()) {
1135 uint64_t Index = CGM.getItaniumVTableContext().getMethodVTableIndex(MD);
1136 uint64_t VTableOffset;
1137 if (CGM.getItaniumVTableContext().isRelativeLayout()) {
1138 // Multiply by 4-byte relative offsets.
1139 VTableOffset = Index * 4;
1140 } else {
1141 const ASTContext &Context = getContext();
1142 CharUnits PointerWidth = Context.toCharUnitsFromBits(
1143 Context.getTargetInfo().getPointerWidth(LangAS::Default));
1144 VTableOffset = Index * PointerWidth.getQuantity();
1145 }
1146
1147 if (UseARMMethodPtrABI) {
1148 // ARM C++ ABI 3.2.1:
1149 // This ABI specifies that adj contains twice the this
1150 // adjustment, plus 1 if the member function is virtual. The
1151 // least significant bit of adj then makes exactly the same
1152 // discrimination as the least significant bit of ptr does for
1153 // Itanium.
1154
1155 // We cannot use the Itanium ABI's representation for virtual member
1156 // function pointers under pointer authentication because it would
1157 // require us to store both the virtual offset and the constant
1158 // discriminator in the pointer, which would be immediately vulnerable
1159 // to attack. Instead we introduce a thunk that does the virtual dispatch
1160 // and store it as if it were a non-virtual member function. This means
1161 // that virtual function pointers may not compare equal anymore, but
1162 // fortunately they aren't required to by the standard, and we do make
1163 // a best-effort attempt to re-use the thunk.
1164 //
1165 // To support interoperation with code in which pointer authentication
1166 // is disabled, derefencing a member function pointer must still handle
1167 // the virtual case, but it can use a discriminator which should never
1168 // be valid.
1169 const auto &Schema =
1170 CGM.getCodeGenOpts().PointerAuth.CXXMemberFunctionPointers;
1171 if (Schema)
1172 MemPtr[0] = llvm::ConstantExpr::getPtrToInt(
1173 getSignedVirtualMemberFunctionPointer(MD), CGM.PtrDiffTy);
1174 else
1175 MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset);
1176 // Don't set the LSB of adj to 1 if pointer authentication for member
1177 // function pointers is enabled.
1178 MemPtr[1] = llvm::ConstantInt::get(
1179 CGM.PtrDiffTy, 2 * ThisAdjustment.getQuantity() + !Schema);
1180 } else {
1181 // Itanium C++ ABI 2.3:
1182 // For a virtual function, [the pointer field] is 1 plus the
1183 // virtual table offset (in bytes) of the function,
1184 // represented as a ptrdiff_t.
1185 MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset + 1);
1186 MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
1187 ThisAdjustment.getQuantity());
1188 }
1189 } else {
1190 const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
1191 llvm::Type *Ty;
1192 // Check whether the function has a computable LLVM signature.
1193 if (Types.isFuncTypeConvertible(FPT)) {
1194 // The function has a computable LLVM signature; use the correct type.
1195 Ty = Types.GetFunctionType(Types.arrangeCXXMethodDeclaration(MD));
1196 } else {
1197 // Use an arbitrary non-function type to tell GetAddrOfFunction that the
1198 // function type is incomplete.
1199 Ty = CGM.PtrDiffTy;
1200 }
1201 llvm::Constant *addr = CGM.getMemberFunctionPointer(MD, Ty);
1202
1203 MemPtr[0] = llvm::ConstantExpr::getPtrToInt(addr, CGM.PtrDiffTy);
1204 MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
1205 (UseARMMethodPtrABI ? 2 : 1) *
1206 ThisAdjustment.getQuantity());
1207 }
1208
1209 return llvm::ConstantStruct::getAnon(MemPtr);
1210}
1211
1212llvm::Constant *ItaniumCXXABI::EmitMemberPointer(const APValue &MP,
1213 QualType MPType) {
1214 const MemberPointerType *MPT = MPType->castAs<MemberPointerType>();
1215 const ValueDecl *MPD = MP.getMemberPointerDecl();
1216 if (!MPD)
1217 return EmitNullMemberPointer(MPT);
1218
1219 CharUnits ThisAdjustment = getContext().getMemberPointerPathAdjustment(MP);
1220
1221 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MPD)) {
1222 llvm::Constant *Src = BuildMemberPointer(MD, ThisAdjustment);
1223 QualType SrcType = getContext().getMemberPointerType(
1224 MD->getType(), MD->getParent()->getTypeForDecl());
1225 return pointerAuthResignMemberFunctionPointer(Src, MPType, SrcType, CGM);
1226 }
1227
1228 CharUnits FieldOffset =
1229 getContext().toCharUnitsFromBits(getContext().getFieldOffset(MPD));
1230 return EmitMemberDataPointer(MPT, ThisAdjustment + FieldOffset);
1231}
1232
1233/// The comparison algorithm is pretty easy: the member pointers are
1234/// the same if they're either bitwise identical *or* both null.
1235///
1236/// ARM is different here only because null-ness is more complicated.
1237llvm::Value *
1238ItaniumCXXABI::EmitMemberPointerComparison(CodeGenFunction &CGF,
1239 llvm::Value *L,
1240 llvm::Value *R,
1241 const MemberPointerType *MPT,
1242 bool Inequality) {
1243 CGBuilderTy &Builder = CGF.Builder;
1244
1245 llvm::ICmpInst::Predicate Eq;
1246 llvm::Instruction::BinaryOps And, Or;
1247 if (Inequality) {
1248 Eq = llvm::ICmpInst::ICMP_NE;
1249 And = llvm::Instruction::Or;
1250 Or = llvm::Instruction::And;
1251 } else {
1252 Eq = llvm::ICmpInst::ICMP_EQ;
1253 And = llvm::Instruction::And;
1254 Or = llvm::Instruction::Or;
1255 }
1256
1257 // Member data pointers are easy because there's a unique null
1258 // value, so it just comes down to bitwise equality.
1259 if (MPT->isMemberDataPointer())
1260 return Builder.CreateICmp(Eq, L, R);
1261
1262 // For member function pointers, the tautologies are more complex.
1263 // The Itanium tautology is:
1264 // (L == R) <==> (L.ptr == R.ptr && (L.ptr == 0 || L.adj == R.adj))
1265 // The ARM tautology is:
1266 // (L == R) <==> (L.ptr == R.ptr &&
1267 // (L.adj == R.adj ||
1268 // (L.ptr == 0 && ((L.adj|R.adj) & 1) == 0)))
1269 // The inequality tautologies have exactly the same structure, except
1270 // applying De Morgan's laws.
1271
1272 llvm::Value *LPtr = Builder.CreateExtractValue(L, 0, "lhs.memptr.ptr");
1273 llvm::Value *RPtr = Builder.CreateExtractValue(R, 0, "rhs.memptr.ptr");
1274
1275 // This condition tests whether L.ptr == R.ptr. This must always be
1276 // true for equality to hold.
1277 llvm::Value *PtrEq = Builder.CreateICmp(Eq, LPtr, RPtr, "cmp.ptr");
1278
1279 // This condition, together with the assumption that L.ptr == R.ptr,
1280 // tests whether the pointers are both null. ARM imposes an extra
1281 // condition.
1282 llvm::Value *Zero = llvm::Constant::getNullValue(LPtr->getType());
1283 llvm::Value *EqZero = Builder.CreateICmp(Eq, LPtr, Zero, "cmp.ptr.null");
1284
1285 // This condition tests whether L.adj == R.adj. If this isn't
1286 // true, the pointers are unequal unless they're both null.
1287 llvm::Value *LAdj = Builder.CreateExtractValue(L, 1, "lhs.memptr.adj");
1288 llvm::Value *RAdj = Builder.CreateExtractValue(R, 1, "rhs.memptr.adj");
1289 llvm::Value *AdjEq = Builder.CreateICmp(Eq, LAdj, RAdj, "cmp.adj");
1290
1291 // Null member function pointers on ARM clear the low bit of Adj,
1292 // so the zero condition has to check that neither low bit is set.
1293 if (UseARMMethodPtrABI) {
1294 llvm::Value *One = llvm::ConstantInt::get(LPtr->getType(), 1);
1295
1296 // Compute (l.adj | r.adj) & 1 and test it against zero.
1297 llvm::Value *OrAdj = Builder.CreateOr(LAdj, RAdj, "or.adj");
1298 llvm::Value *OrAdjAnd1 = Builder.CreateAnd(OrAdj, One);
1299 llvm::Value *OrAdjAnd1EqZero = Builder.CreateICmp(Eq, OrAdjAnd1, Zero,
1300 "cmp.or.adj");
1301 EqZero = Builder.CreateBinOp(And, EqZero, OrAdjAnd1EqZero);
1302 }
1303
1304 // Tie together all our conditions.
1305 llvm::Value *Result = Builder.CreateBinOp(Or, EqZero, AdjEq);
1306 Result = Builder.CreateBinOp(And, PtrEq, Result,
1307 Inequality ? "memptr.ne" : "memptr.eq");
1308 return Result;
1309}
1310
1311llvm::Value *
1312ItaniumCXXABI::EmitMemberPointerIsNotNull(CodeGenFunction &CGF,
1313 llvm::Value *MemPtr,
1314 const MemberPointerType *MPT) {
1315 CGBuilderTy &Builder = CGF.Builder;
1316
1317 /// For member data pointers, this is just a check against -1.
1318 if (MPT->isMemberDataPointer()) {
1319 assert(MemPtr->getType() == CGM.PtrDiffTy);
1320 llvm::Value *NegativeOne =
1321 llvm::Constant::getAllOnesValue(MemPtr->getType());
1322 return Builder.CreateICmpNE(MemPtr, NegativeOne, "memptr.tobool");
1323 }
1324
1325 // In Itanium, a member function pointer is not null if 'ptr' is not null.
1326 llvm::Value *Ptr = Builder.CreateExtractValue(MemPtr, 0, "memptr.ptr");
1327
1328 llvm::Constant *Zero = llvm::ConstantInt::get(Ptr->getType(), 0);
1329 llvm::Value *Result = Builder.CreateICmpNE(Ptr, Zero, "memptr.tobool");
1330
1331 // On ARM, a member function pointer is also non-null if the low bit of 'adj'
1332 // (the virtual bit) is set.
1333 if (UseARMMethodPtrABI) {
1334 llvm::Constant *One = llvm::ConstantInt::get(Ptr->getType(), 1);
1335 llvm::Value *Adj = Builder.CreateExtractValue(MemPtr, 1, "memptr.adj");
1336 llvm::Value *VirtualBit = Builder.CreateAnd(Adj, One, "memptr.virtualbit");
1337 llvm::Value *IsVirtual = Builder.CreateICmpNE(VirtualBit, Zero,
1338 "memptr.isvirtual");
1339 Result = Builder.CreateOr(Result, IsVirtual);
1340 }
1341
1342 return Result;
1343}
1344
1345bool ItaniumCXXABI::classifyReturnType(CGFunctionInfo &FI) const {
1346 const CXXRecordDecl *RD = FI.getReturnType()->getAsCXXRecordDecl();
1347 if (!RD)
1348 return false;
1349
1350 // If C++ prohibits us from making a copy, return by address.
1351 if (!RD->canPassInRegisters()) {
1352 auto Align = CGM.getContext().getTypeAlignInChars(FI.getReturnType());
1353 FI.getReturnInfo() = ABIArgInfo::getIndirect(Align, /*ByVal=*/false);
1354 return true;
1355 }
1356 return false;
1357}
1358
1359/// The Itanium ABI requires non-zero initialization only for data
1360/// member pointers, for which '0' is a valid offset.
1361bool ItaniumCXXABI::isZeroInitializable(const MemberPointerType *MPT) {
1362 return MPT->isMemberFunctionPointer();
1363}
1364
1365/// The Itanium ABI always places an offset to the complete object
1366/// at entry -2 in the vtable.
1367void ItaniumCXXABI::emitVirtualObjectDelete(CodeGenFunction &CGF,
1368 const CXXDeleteExpr *DE,
1369 Address Ptr,
1370 QualType ElementType,
1371 const CXXDestructorDecl *Dtor) {
1372 bool UseGlobalDelete = DE->isGlobalDelete();
1373 if (UseGlobalDelete) {
1374 // Derive the complete-object pointer, which is what we need
1375 // to pass to the deallocation function.
1376
1377 // Grab the vtable pointer as an intptr_t*.
1378 auto *ClassDecl =
1379 cast<CXXRecordDecl>(ElementType->castAs<RecordType>()->getDecl());
1380 llvm::Value *VTable = CGF.GetVTablePtr(Ptr, CGF.UnqualPtrTy, ClassDecl);
1381
1382 // Track back to entry -2 and pull out the offset there.
1383 llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
1384 CGF.IntPtrTy, VTable, -2, "complete-offset.ptr");
1385 llvm::Value *Offset = CGF.Builder.CreateAlignedLoad(CGF.IntPtrTy, OffsetPtr,
1386 CGF.getPointerAlign());
1387
1388 // Apply the offset.
1389 llvm::Value *CompletePtr = Ptr.emitRawPointer(CGF);
1390 CompletePtr =
1391 CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, CompletePtr, Offset);
1392
1393 // If we're supposed to call the global delete, make sure we do so
1394 // even if the destructor throws.
1395 CGF.pushCallObjectDeleteCleanup(DE->getOperatorDelete(), CompletePtr,
1396 ElementType);
1397 }
1398
1399 // FIXME: Provide a source location here even though there's no
1400 // CXXMemberCallExpr for dtor call.
1401 CXXDtorType DtorType = UseGlobalDelete ? Dtor_Complete : Dtor_Deleting;
1402 EmitVirtualDestructorCall(CGF, Dtor, DtorType, Ptr, DE,
1403 /*CallOrInvoke=*/nullptr);
1404
1405 if (UseGlobalDelete)
1406 CGF.PopCleanupBlock();
1407}
1408
1409void ItaniumCXXABI::emitRethrow(CodeGenFunction &CGF, bool isNoReturn) {
1410 // void __cxa_rethrow();
1411
1412 llvm::FunctionType *FTy =
1413 llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1414
1415 llvm::FunctionCallee Fn = CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow");
1416
1417 if (isNoReturn)
1419 else
1421}
1422
1423static llvm::FunctionCallee getAllocateExceptionFn(CodeGenModule &CGM) {
1424 // void *__cxa_allocate_exception(size_t thrown_size);
1425
1426 llvm::FunctionType *FTy =
1427 llvm::FunctionType::get(CGM.Int8PtrTy, CGM.SizeTy, /*isVarArg=*/false);
1428
1429 return CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception");
1430}
1431
1432static llvm::FunctionCallee getThrowFn(CodeGenModule &CGM) {
1433 // void __cxa_throw(void *thrown_exception, std::type_info *tinfo,
1434 // void (*dest) (void *));
1435
1436 llvm::Type *Args[3] = { CGM.Int8PtrTy, CGM.GlobalsInt8PtrTy, CGM.Int8PtrTy };
1437 llvm::FunctionType *FTy =
1438 llvm::FunctionType::get(CGM.VoidTy, Args, /*isVarArg=*/false);
1439
1440 return CGM.CreateRuntimeFunction(FTy, "__cxa_throw");
1441}
1442
1443void ItaniumCXXABI::emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) {
1444 QualType ThrowType = E->getSubExpr()->getType();
1445 // Now allocate the exception object.
1446 llvm::Type *SizeTy = CGF.ConvertType(getContext().getSizeType());
1447 uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity();
1448
1449 llvm::FunctionCallee AllocExceptionFn = getAllocateExceptionFn(CGM);
1450 llvm::CallInst *ExceptionPtr = CGF.EmitNounwindRuntimeCall(
1451 AllocExceptionFn, llvm::ConstantInt::get(SizeTy, TypeSize), "exception");
1452
1453 CharUnits ExnAlign = CGF.getContext().getExnObjectAlignment();
1454 CGF.EmitAnyExprToExn(
1455 E->getSubExpr(), Address(ExceptionPtr, CGM.Int8Ty, ExnAlign));
1456
1457 // Now throw the exception.
1458 llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType,
1459 /*ForEH=*/true);
1460
1461 // The address of the destructor. If the exception type has a
1462 // trivial destructor (or isn't a record), we just pass null.
1463 llvm::Constant *Dtor = nullptr;
1464 if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) {
1465 CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl());
1466 if (!Record->hasTrivialDestructor()) {
1467 // __cxa_throw is declared to take its destructor as void (*)(void *). We
1468 // must match that if function pointers can be authenticated with a
1469 // discriminator based on their type.
1470 const ASTContext &Ctx = getContext();
1471 QualType DtorTy = Ctx.getFunctionType(Ctx.VoidTy, {Ctx.VoidPtrTy},
1473
1474 CXXDestructorDecl *DtorD = Record->getDestructor();
1475 Dtor = CGM.getAddrOfCXXStructor(GlobalDecl(DtorD, Dtor_Complete));
1476 Dtor = CGM.getFunctionPointer(Dtor, DtorTy);
1477 }
1478 }
1479 if (!Dtor) Dtor = llvm::Constant::getNullValue(CGM.Int8PtrTy);
1480
1481 llvm::Value *args[] = { ExceptionPtr, TypeInfo, Dtor };
1483}
1484
1485static llvm::FunctionCallee getItaniumDynamicCastFn(CodeGenFunction &CGF) {
1486 // void *__dynamic_cast(const void *sub,
1487 // global_as const abi::__class_type_info *src,
1488 // global_as const abi::__class_type_info *dst,
1489 // std::ptrdiff_t src2dst_offset);
1490
1491 llvm::Type *Int8PtrTy = CGF.Int8PtrTy;
1492 llvm::Type *GlobInt8PtrTy = CGF.GlobalsInt8PtrTy;
1493 llvm::Type *PtrDiffTy =
1495
1496 llvm::Type *Args[4] = { Int8PtrTy, GlobInt8PtrTy, GlobInt8PtrTy, PtrDiffTy };
1497
1498 llvm::FunctionType *FTy = llvm::FunctionType::get(Int8PtrTy, Args, false);
1499
1500 // Mark the function as nounwind willreturn readonly.
1501 llvm::AttrBuilder FuncAttrs(CGF.getLLVMContext());
1502 FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
1503 FuncAttrs.addAttribute(llvm::Attribute::WillReturn);
1504 FuncAttrs.addMemoryAttr(llvm::MemoryEffects::readOnly());
1505 llvm::AttributeList Attrs = llvm::AttributeList::get(
1506 CGF.getLLVMContext(), llvm::AttributeList::FunctionIndex, FuncAttrs);
1507
1508 return CGF.CGM.CreateRuntimeFunction(FTy, "__dynamic_cast", Attrs);
1509}
1510
1511static llvm::FunctionCallee getBadCastFn(CodeGenFunction &CGF) {
1512 // void __cxa_bad_cast();
1513 llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
1514 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_cast");
1515}
1516
1517/// Compute the src2dst_offset hint as described in the
1518/// Itanium C++ ABI [2.9.7]
1520 const CXXRecordDecl *Src,
1521 const CXXRecordDecl *Dst) {
1522 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
1523 /*DetectVirtual=*/false);
1524
1525 // If Dst is not derived from Src we can skip the whole computation below and
1526 // return that Src is not a public base of Dst. Record all inheritance paths.
1527 if (!Dst->isDerivedFrom(Src, Paths))
1528 return CharUnits::fromQuantity(-2ULL);
1529
1530 unsigned NumPublicPaths = 0;
1531 CharUnits Offset;
1532
1533 // Now walk all possible inheritance paths.
1534 for (const CXXBasePath &Path : Paths) {
1535 if (Path.Access != AS_public) // Ignore non-public inheritance.
1536 continue;
1537
1538 ++NumPublicPaths;
1539
1540 for (const CXXBasePathElement &PathElement : Path) {
1541 // If the path contains a virtual base class we can't give any hint.
1542 // -1: no hint.
1543 if (PathElement.Base->isVirtual())
1544 return CharUnits::fromQuantity(-1ULL);
1545
1546 if (NumPublicPaths > 1) // Won't use offsets, skip computation.
1547 continue;
1548
1549 // Accumulate the base class offsets.
1550 const ASTRecordLayout &L = Context.getASTRecordLayout(PathElement.Class);
1551 Offset += L.getBaseClassOffset(
1552 PathElement.Base->getType()->getAsCXXRecordDecl());
1553 }
1554 }
1555
1556 // -2: Src is not a public base of Dst.
1557 if (NumPublicPaths == 0)
1558 return CharUnits::fromQuantity(-2ULL);
1559
1560 // -3: Src is a multiple public base type but never a virtual base type.
1561 if (NumPublicPaths > 1)
1562 return CharUnits::fromQuantity(-3ULL);
1563
1564 // Otherwise, the Src type is a unique public nonvirtual base type of Dst.
1565 // Return the offset of Src from the origin of Dst.
1566 return Offset;
1567}
1568
1569static llvm::FunctionCallee getBadTypeidFn(CodeGenFunction &CGF) {
1570 // void __cxa_bad_typeid();
1571 llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
1572
1573 return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid");
1574}
1575
1576bool ItaniumCXXABI::shouldTypeidBeNullChecked(QualType SrcRecordTy) {
1577 return true;
1578}
1579
1580void ItaniumCXXABI::EmitBadTypeidCall(CodeGenFunction &CGF) {
1581 llvm::FunctionCallee Fn = getBadTypeidFn(CGF);
1582 llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn);
1583 Call->setDoesNotReturn();
1584 CGF.Builder.CreateUnreachable();
1585}
1586
1587llvm::Value *ItaniumCXXABI::EmitTypeid(CodeGenFunction &CGF,
1588 QualType SrcRecordTy,
1589 Address ThisPtr,
1590 llvm::Type *StdTypeInfoPtrTy) {
1591 auto *ClassDecl =
1592 cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl());
1593 llvm::Value *Value = CGF.GetVTablePtr(ThisPtr, CGM.GlobalsInt8PtrTy,
1594 ClassDecl);
1595
1596 if (CGM.getItaniumVTableContext().isRelativeLayout()) {
1597 // Load the type info.
1598 Value = CGF.Builder.CreateCall(
1599 CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}),
1600 {Value, llvm::ConstantInt::get(CGM.Int32Ty, -4)});
1601 } else {
1602 // Load the type info.
1603 Value =
1604 CGF.Builder.CreateConstInBoundsGEP1_64(StdTypeInfoPtrTy, Value, -1ULL);
1605 }
1606 return CGF.Builder.CreateAlignedLoad(StdTypeInfoPtrTy, Value,
1607 CGF.getPointerAlign());
1608}
1609
1610bool ItaniumCXXABI::shouldDynamicCastCallBeNullChecked(bool SrcIsPtr,
1611 QualType SrcRecordTy) {
1612 return SrcIsPtr;
1613}
1614
1615llvm::Value *ItaniumCXXABI::emitDynamicCastCall(
1616 CodeGenFunction &CGF, Address ThisAddr, QualType SrcRecordTy,
1617 QualType DestTy, QualType DestRecordTy, llvm::BasicBlock *CastEnd) {
1618 llvm::Type *PtrDiffLTy =
1620
1621 llvm::Value *SrcRTTI =
1623 llvm::Value *DestRTTI =
1625
1626 // Compute the offset hint.
1627 const CXXRecordDecl *SrcDecl = SrcRecordTy->getAsCXXRecordDecl();
1628 const CXXRecordDecl *DestDecl = DestRecordTy->getAsCXXRecordDecl();
1629 llvm::Value *OffsetHint = llvm::ConstantInt::get(
1630 PtrDiffLTy,
1631 computeOffsetHint(CGF.getContext(), SrcDecl, DestDecl).getQuantity());
1632
1633 // Emit the call to __dynamic_cast.
1634 llvm::Value *Value = ThisAddr.emitRawPointer(CGF);
1635 if (CGM.getCodeGenOpts().PointerAuth.CXXVTablePointers) {
1636 // We perform a no-op load of the vtable pointer here to force an
1637 // authentication. In environments that do not support pointer
1638 // authentication this is a an actual no-op that will be elided. When
1639 // pointer authentication is supported and enforced on vtable pointers this
1640 // load can trap.
1641 llvm::Value *Vtable =
1642 CGF.GetVTablePtr(ThisAddr, CGM.Int8PtrTy, SrcDecl,
1643 CodeGenFunction::VTableAuthMode::MustTrap);
1644 assert(Vtable);
1645 (void)Vtable;
1646 }
1647
1648 llvm::Value *args[] = {Value, SrcRTTI, DestRTTI, OffsetHint};
1650
1651 /// C++ [expr.dynamic.cast]p9:
1652 /// A failed cast to reference type throws std::bad_cast
1653 if (DestTy->isReferenceType()) {
1654 llvm::BasicBlock *BadCastBlock =
1655 CGF.createBasicBlock("dynamic_cast.bad_cast");
1656
1657 llvm::Value *IsNull = CGF.Builder.CreateIsNull(Value);
1658 CGF.Builder.CreateCondBr(IsNull, BadCastBlock, CastEnd);
1659
1660 CGF.EmitBlock(BadCastBlock);
1661 EmitBadCastCall(CGF);
1662 }
1663
1664 return Value;
1665}
1666
1667llvm::Value *ItaniumCXXABI::emitExactDynamicCast(
1668 CodeGenFunction &CGF, Address ThisAddr, QualType SrcRecordTy,
1669 QualType DestTy, QualType DestRecordTy, llvm::BasicBlock *CastSuccess,
1670 llvm::BasicBlock *CastFail) {
1671 ASTContext &Context = getContext();
1672
1673 // Find all the inheritance paths.
1674 const CXXRecordDecl *SrcDecl = SrcRecordTy->getAsCXXRecordDecl();
1675 const CXXRecordDecl *DestDecl = DestRecordTy->getAsCXXRecordDecl();
1676 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
1677 /*DetectVirtual=*/false);
1678 (void)DestDecl->isDerivedFrom(SrcDecl, Paths);
1679
1680 // Find an offset within `DestDecl` where a `SrcDecl` instance and its vptr
1681 // might appear.
1682 std::optional<CharUnits> Offset;
1683 for (const CXXBasePath &Path : Paths) {
1684 // dynamic_cast only finds public inheritance paths.
1685 if (Path.Access != AS_public)
1686 continue;
1687
1688 CharUnits PathOffset;
1689 for (const CXXBasePathElement &PathElement : Path) {
1690 // Find the offset along this inheritance step.
1691 const CXXRecordDecl *Base =
1692 PathElement.Base->getType()->getAsCXXRecordDecl();
1693 if (PathElement.Base->isVirtual()) {
1694 // For a virtual base class, we know that the derived class is exactly
1695 // DestDecl, so we can use the vbase offset from its layout.
1696 const ASTRecordLayout &L = Context.getASTRecordLayout(DestDecl);
1697 PathOffset = L.getVBaseClassOffset(Base);
1698 } else {
1699 const ASTRecordLayout &L =
1700 Context.getASTRecordLayout(PathElement.Class);
1701 PathOffset += L.getBaseClassOffset(Base);
1702 }
1703 }
1704
1705 if (!Offset)
1706 Offset = PathOffset;
1707 else if (Offset != PathOffset) {
1708 // Base appears in at least two different places. Find the most-derived
1709 // object and see if it's a DestDecl. Note that the most-derived object
1710 // must be at least as aligned as this base class subobject, and must
1711 // have a vptr at offset 0.
1712 ThisAddr = Address(emitDynamicCastToVoid(CGF, ThisAddr, SrcRecordTy),
1713 CGF.VoidPtrTy, ThisAddr.getAlignment());
1714 SrcDecl = DestDecl;
1715 Offset = CharUnits::Zero();
1716 break;
1717 }
1718 }
1719
1720 if (!Offset) {
1721 // If there are no public inheritance paths, the cast always fails.
1722 CGF.EmitBranch(CastFail);
1723 return llvm::PoisonValue::get(CGF.VoidPtrTy);
1724 }
1725
1726 // Compare the vptr against the expected vptr for the destination type at
1727 // this offset. Note that we do not know what type ThisAddr points to in
1728 // the case where the derived class multiply inherits from the base class
1729 // so we can't use GetVTablePtr, so we load the vptr directly instead.
1730 llvm::Instruction *VPtr = CGF.Builder.CreateLoad(
1731 ThisAddr.withElementType(CGF.VoidPtrPtrTy), "vtable");
1732 CGM.DecorateInstructionWithTBAA(
1733 VPtr, CGM.getTBAAVTablePtrAccessInfo(CGF.VoidPtrPtrTy));
1734 llvm::Value *Success = CGF.Builder.CreateICmpEQ(
1735 VPtr, getVTableAddressPoint(BaseSubobject(SrcDecl, *Offset), DestDecl));
1736 llvm::Value *Result = ThisAddr.emitRawPointer(CGF);
1737 if (!Offset->isZero())
1738 Result = CGF.Builder.CreateInBoundsGEP(
1739 CGF.CharTy, Result,
1740 {llvm::ConstantInt::get(CGF.PtrDiffTy, -Offset->getQuantity())});
1741 CGF.Builder.CreateCondBr(Success, CastSuccess, CastFail);
1742 return Result;
1743}
1744
1745llvm::Value *ItaniumCXXABI::emitDynamicCastToVoid(CodeGenFunction &CGF,
1746 Address ThisAddr,
1747 QualType SrcRecordTy) {
1748 auto *ClassDecl =
1749 cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl());
1750 llvm::Value *OffsetToTop;
1751 if (CGM.getItaniumVTableContext().isRelativeLayout()) {
1752 // Get the vtable pointer.
1753 llvm::Value *VTable =
1754 CGF.GetVTablePtr(ThisAddr, CGF.UnqualPtrTy, ClassDecl);
1755
1756 // Get the offset-to-top from the vtable.
1757 OffsetToTop =
1758 CGF.Builder.CreateConstInBoundsGEP1_32(CGM.Int32Ty, VTable, -2U);
1759 OffsetToTop = CGF.Builder.CreateAlignedLoad(
1760 CGM.Int32Ty, OffsetToTop, CharUnits::fromQuantity(4), "offset.to.top");
1761 } else {
1762 llvm::Type *PtrDiffLTy =
1764
1765 // Get the vtable pointer.
1766 llvm::Value *VTable =
1767 CGF.GetVTablePtr(ThisAddr, CGF.UnqualPtrTy, ClassDecl);
1768
1769 // Get the offset-to-top from the vtable.
1770 OffsetToTop =
1771 CGF.Builder.CreateConstInBoundsGEP1_64(PtrDiffLTy, VTable, -2ULL);
1772 OffsetToTop = CGF.Builder.CreateAlignedLoad(
1773 PtrDiffLTy, OffsetToTop, CGF.getPointerAlign(), "offset.to.top");
1774 }
1775 // Finally, add the offset to the pointer.
1776 return CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ThisAddr.emitRawPointer(CGF),
1777 OffsetToTop);
1778}
1779
1780bool ItaniumCXXABI::EmitBadCastCall(CodeGenFunction &CGF) {
1781 llvm::FunctionCallee Fn = getBadCastFn(CGF);
1782 llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn);
1783 Call->setDoesNotReturn();
1784 CGF.Builder.CreateUnreachable();
1785 return true;
1786}
1787
1788llvm::Value *
1789ItaniumCXXABI::GetVirtualBaseClassOffset(CodeGenFunction &CGF,
1790 Address This,
1791 const CXXRecordDecl *ClassDecl,
1792 const CXXRecordDecl *BaseClassDecl) {
1793 llvm::Value *VTablePtr = CGF.GetVTablePtr(This, CGM.Int8PtrTy, ClassDecl);
1794 CharUnits VBaseOffsetOffset =
1795 CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(ClassDecl,
1796 BaseClassDecl);
1797 llvm::Value *VBaseOffsetPtr =
1798 CGF.Builder.CreateConstGEP1_64(
1799 CGF.Int8Ty, VTablePtr, VBaseOffsetOffset.getQuantity(),
1800 "vbase.offset.ptr");
1801
1802 llvm::Value *VBaseOffset;
1803 if (CGM.getItaniumVTableContext().isRelativeLayout()) {
1804 VBaseOffset = CGF.Builder.CreateAlignedLoad(
1805 CGF.Int32Ty, VBaseOffsetPtr, CharUnits::fromQuantity(4),
1806 "vbase.offset");
1807 } else {
1808 VBaseOffset = CGF.Builder.CreateAlignedLoad(
1809 CGM.PtrDiffTy, VBaseOffsetPtr, CGF.getPointerAlign(), "vbase.offset");
1810 }
1811 return VBaseOffset;
1812}
1813
1814void ItaniumCXXABI::EmitCXXConstructors(const CXXConstructorDecl *D) {
1815 // Just make sure we're in sync with TargetCXXABI.
1816 assert(CGM.getTarget().getCXXABI().hasConstructorVariants());
1817
1818 // The constructor used for constructing this as a base class;
1819 // ignores virtual bases.
1820 CGM.EmitGlobal(GlobalDecl(D, Ctor_Base));
1821
1822 // The constructor used for constructing this as a complete class;
1823 // constructs the virtual bases, then calls the base constructor.
1824 if (!D->getParent()->isAbstract()) {
1825 // We don't need to emit the complete ctor if the class is abstract.
1826 CGM.EmitGlobal(GlobalDecl(D, Ctor_Complete));
1827 }
1828}
1829
1831ItaniumCXXABI::buildStructorSignature(GlobalDecl GD,
1833 ASTContext &Context = getContext();
1834
1835 // All parameters are already in place except VTT, which goes after 'this'.
1836 // These are Clang types, so we don't need to worry about sret yet.
1837
1838 // Check if we need to add a VTT parameter (which has type global void **).
1839 if ((isa<CXXConstructorDecl>(GD.getDecl()) ? GD.getCtorType() == Ctor_Base
1840 : GD.getDtorType() == Dtor_Base) &&
1841 cast<CXXMethodDecl>(GD.getDecl())->getParent()->getNumVBases() != 0) {
1842 LangAS AS = CGM.GetGlobalVarAddressSpace(nullptr);
1843 QualType Q = Context.getAddrSpaceQualType(Context.VoidPtrTy, AS);
1844 ArgTys.insert(ArgTys.begin() + 1,
1846 return AddedStructorArgCounts::prefix(1);
1847 }
1848 return AddedStructorArgCounts{};
1849}
1850
1851void ItaniumCXXABI::EmitCXXDestructors(const CXXDestructorDecl *D) {
1852 // The destructor used for destructing this as a base class; ignores
1853 // virtual bases.
1854 CGM.EmitGlobal(GlobalDecl(D, Dtor_Base));
1855
1856 // The destructor used for destructing this as a most-derived class;
1857 // call the base destructor and then destructs any virtual bases.
1858 CGM.EmitGlobal(GlobalDecl(D, Dtor_Complete));
1859
1860 // The destructor in a virtual table is always a 'deleting'
1861 // destructor, which calls the complete destructor and then uses the
1862 // appropriate operator delete.
1863 if (D->isVirtual())
1864 CGM.EmitGlobal(GlobalDecl(D, Dtor_Deleting));
1865}
1866
1867void ItaniumCXXABI::addImplicitStructorParams(CodeGenFunction &CGF,
1868 QualType &ResTy,
1869 FunctionArgList &Params) {
1870 const CXXMethodDecl *MD = cast<CXXMethodDecl>(CGF.CurGD.getDecl());
1871 assert(isa<CXXConstructorDecl>(MD) || isa<CXXDestructorDecl>(MD));
1872
1873 // Check if we need a VTT parameter as well.
1874 if (NeedsVTTParameter(CGF.CurGD)) {
1875 ASTContext &Context = getContext();
1876
1877 // FIXME: avoid the fake decl
1878 LangAS AS = CGM.GetGlobalVarAddressSpace(nullptr);
1879 QualType Q = Context.getAddrSpaceQualType(Context.VoidPtrTy, AS);
1880 QualType T = Context.getPointerType(Q);
1881 auto *VTTDecl = ImplicitParamDecl::Create(
1882 Context, /*DC=*/nullptr, MD->getLocation(), &Context.Idents.get("vtt"),
1883 T, ImplicitParamKind::CXXVTT);
1884 Params.insert(Params.begin() + 1, VTTDecl);
1885 getStructorImplicitParamDecl(CGF) = VTTDecl;
1886 }
1887}
1888
1889void ItaniumCXXABI::EmitInstanceFunctionProlog(CodeGenFunction &CGF) {
1890 // Naked functions have no prolog.
1891 if (CGF.CurFuncDecl && CGF.CurFuncDecl->hasAttr<NakedAttr>())
1892 return;
1893
1894 /// Initialize the 'this' slot. In the Itanium C++ ABI, no prologue
1895 /// adjustments are required, because they are all handled by thunks.
1896 setCXXABIThisValue(CGF, loadIncomingCXXThis(CGF));
1897
1898 /// Initialize the 'vtt' slot if needed.
1899 if (getStructorImplicitParamDecl(CGF)) {
1900 getStructorImplicitParamValue(CGF) = CGF.Builder.CreateLoad(
1901 CGF.GetAddrOfLocalVar(getStructorImplicitParamDecl(CGF)), "vtt");
1902 }
1903
1904 /// If this is a function that the ABI specifies returns 'this', initialize
1905 /// the return slot to 'this' at the start of the function.
1906 ///
1907 /// Unlike the setting of return types, this is done within the ABI
1908 /// implementation instead of by clients of CGCXXABI because:
1909 /// 1) getThisValue is currently protected
1910 /// 2) in theory, an ABI could implement 'this' returns some other way;
1911 /// HasThisReturn only specifies a contract, not the implementation
1912 if (HasThisReturn(CGF.CurGD))
1913 CGF.Builder.CreateStore(getThisValue(CGF), CGF.ReturnValue);
1914}
1915
1916CGCXXABI::AddedStructorArgs ItaniumCXXABI::getImplicitConstructorArgs(
1918 bool ForVirtualBase, bool Delegating) {
1919 if (!NeedsVTTParameter(GlobalDecl(D, Type)))
1920 return AddedStructorArgs{};
1921
1922 // Insert the implicit 'vtt' argument as the second argument. Make sure to
1923 // correctly reflect its address space, which can differ from generic on
1924 // some targets.
1925 llvm::Value *VTT =
1926 CGF.GetVTTParameter(GlobalDecl(D, Type), ForVirtualBase, Delegating);
1927 LangAS AS = CGM.GetGlobalVarAddressSpace(nullptr);
1928 QualType Q = getContext().getAddrSpaceQualType(getContext().VoidPtrTy, AS);
1929 QualType VTTTy = getContext().getPointerType(Q);
1930 return AddedStructorArgs::prefix({{VTT, VTTTy}});
1931}
1932
1933llvm::Value *ItaniumCXXABI::getCXXDestructorImplicitParam(
1935 bool ForVirtualBase, bool Delegating) {
1936 GlobalDecl GD(DD, Type);
1937 return CGF.GetVTTParameter(GD, ForVirtualBase, Delegating);
1938}
1939
1940void ItaniumCXXABI::EmitDestructorCall(CodeGenFunction &CGF,
1941 const CXXDestructorDecl *DD,
1942 CXXDtorType Type, bool ForVirtualBase,
1943 bool Delegating, Address This,
1944 QualType ThisTy) {
1945 GlobalDecl GD(DD, Type);
1946 llvm::Value *VTT =
1947 getCXXDestructorImplicitParam(CGF, DD, Type, ForVirtualBase, Delegating);
1948 QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy);
1949
1951 if (getContext().getLangOpts().AppleKext &&
1952 Type != Dtor_Base && DD->isVirtual())
1954 else
1955 Callee = CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD), GD);
1956
1957 CGF.EmitCXXDestructorCall(GD, Callee, CGF.getAsNaturalPointerTo(This, ThisTy),
1958 ThisTy, VTT, VTTTy, nullptr);
1959}
1960
1961// Check if any non-inline method has the specified attribute.
1962template <typename T>
1964 for (const auto *D : RD->noload_decls()) {
1965 if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
1966 if (FD->isInlined() || FD->doesThisDeclarationHaveABody() ||
1967 FD->isPureVirtual())
1968 continue;
1969 if (D->hasAttr<T>())
1970 return true;
1971 }
1972 }
1973
1974 return false;
1975}
1976
1978 llvm::GlobalVariable *VTable,
1979 const CXXRecordDecl *RD) {
1980 if (VTable->getDLLStorageClass() !=
1981 llvm::GlobalVariable::DefaultStorageClass ||
1982 RD->hasAttr<DLLImportAttr>() || RD->hasAttr<DLLExportAttr>())
1983 return;
1984
1985 if (CGM.getVTables().isVTableExternal(RD)) {
1986 if (CXXRecordNonInlineHasAttr<DLLImportAttr>(RD))
1987 VTable->setDLLStorageClass(llvm::GlobalValue::DLLImportStorageClass);
1988 } else if (CXXRecordNonInlineHasAttr<DLLExportAttr>(RD))
1989 VTable->setDLLStorageClass(llvm::GlobalValue::DLLExportStorageClass);
1990}
1991
1992void ItaniumCXXABI::emitVTableDefinitions(CodeGenVTables &CGVT,
1993 const CXXRecordDecl *RD) {
1994 llvm::GlobalVariable *VTable = getAddrOfVTable(RD, CharUnits());
1995 if (VTable->hasInitializer())
1996 return;
1997
1998 ItaniumVTableContext &VTContext = CGM.getItaniumVTableContext();
1999 const VTableLayout &VTLayout = VTContext.getVTableLayout(RD);
2000 llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
2001 llvm::Constant *RTTI =
2002 CGM.GetAddrOfRTTIDescriptor(CGM.getContext().getTagDeclType(RD));
2003
2004 // Create and set the initializer.
2005 ConstantInitBuilder builder(CGM);
2006 auto components = builder.beginStruct();
2007 CGVT.createVTableInitializer(components, VTLayout, RTTI,
2008 llvm::GlobalValue::isLocalLinkage(Linkage));
2009 components.finishAndSetAsInitializer(VTable);
2010
2011 // Set the correct linkage.
2012 VTable->setLinkage(Linkage);
2013
2014 if (CGM.supportsCOMDAT() && VTable->isWeakForLinker())
2015 VTable->setComdat(CGM.getModule().getOrInsertComdat(VTable->getName()));
2016
2017 if (CGM.getTarget().hasPS4DLLImportExport())
2018 setVTableSelectiveDLLImportExport(CGM, VTable, RD);
2019
2020 // Set the right visibility.
2021 CGM.setGVProperties(VTable, RD);
2022
2023 // If this is the magic class __cxxabiv1::__fundamental_type_info,
2024 // we will emit the typeinfo for the fundamental types. This is the
2025 // same behaviour as GCC.
2026 const DeclContext *DC = RD->getDeclContext();
2027 if (RD->getIdentifier() &&
2028 RD->getIdentifier()->isStr("__fundamental_type_info") &&
2029 isa<NamespaceDecl>(DC) && cast<NamespaceDecl>(DC)->getIdentifier() &&
2030 cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") &&
2032 EmitFundamentalRTTIDescriptors(RD);
2033
2034 // Always emit type metadata on non-available_externally definitions, and on
2035 // available_externally definitions if we are performing whole program
2036 // devirtualization. For WPD we need the type metadata on all vtable
2037 // definitions to ensure we associate derived classes with base classes
2038 // defined in headers but with a strong definition only in a shared library.
2039 if (!VTable->isDeclarationForLinker() ||
2040 CGM.getCodeGenOpts().WholeProgramVTables) {
2041 CGM.EmitVTableTypeMetadata(RD, VTable, VTLayout);
2042 // For available_externally definitions, add the vtable to
2043 // @llvm.compiler.used so that it isn't deleted before whole program
2044 // analysis.
2045 if (VTable->isDeclarationForLinker()) {
2046 assert(CGM.getCodeGenOpts().WholeProgramVTables);
2047 CGM.addCompilerUsedGlobal(VTable);
2048 }
2049 }
2050
2051 if (VTContext.isRelativeLayout()) {
2052 CGVT.RemoveHwasanMetadata(VTable);
2053 if (!VTable->isDSOLocal())
2054 CGVT.GenerateRelativeVTableAlias(VTable, VTable->getName());
2055 }
2056}
2057
2058bool ItaniumCXXABI::isVirtualOffsetNeededForVTableField(
2059 CodeGenFunction &CGF, CodeGenFunction::VPtr Vptr) {
2060 if (Vptr.NearestVBase == nullptr)
2061 return false;
2062 return NeedsVTTParameter(CGF.CurGD);
2063}
2064
2065llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructor(
2066 CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base,
2067 const CXXRecordDecl *NearestVBase) {
2068
2069 if ((Base.getBase()->getNumVBases() || NearestVBase != nullptr) &&
2070 NeedsVTTParameter(CGF.CurGD)) {
2071 return getVTableAddressPointInStructorWithVTT(CGF, VTableClass, Base,
2072 NearestVBase);
2073 }
2074 return getVTableAddressPoint(Base, VTableClass);
2075}
2076
2077llvm::Constant *
2078ItaniumCXXABI::getVTableAddressPoint(BaseSubobject Base,
2079 const CXXRecordDecl *VTableClass) {
2080 llvm::GlobalValue *VTable = getAddrOfVTable(VTableClass, CharUnits());
2081
2082 // Find the appropriate vtable within the vtable group, and the address point
2083 // within that vtable.
2084 const VTableLayout &Layout =
2085 CGM.getItaniumVTableContext().getVTableLayout(VTableClass);
2087 Layout.getAddressPoint(Base);
2088 llvm::Value *Indices[] = {
2089 llvm::ConstantInt::get(CGM.Int32Ty, 0),
2090 llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.VTableIndex),
2091 llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.AddressPointIndex),
2092 };
2093
2094 // Add inrange attribute to indicate that only the VTableIndex can be
2095 // accessed.
2096 unsigned ComponentSize =
2097 CGM.getDataLayout().getTypeAllocSize(CGM.getVTableComponentType());
2098 unsigned VTableSize =
2099 ComponentSize * Layout.getVTableSize(AddressPoint.VTableIndex);
2100 unsigned Offset = ComponentSize * AddressPoint.AddressPointIndex;
2101 llvm::ConstantRange InRange(
2102 llvm::APInt(32, (int)-Offset, true),
2103 llvm::APInt(32, (int)(VTableSize - Offset), true));
2104 return llvm::ConstantExpr::getGetElementPtr(
2105 VTable->getValueType(), VTable, Indices, /*InBounds=*/true, InRange);
2106}
2107
2108llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructorWithVTT(
2109 CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base,
2110 const CXXRecordDecl *NearestVBase) {
2111 assert((Base.getBase()->getNumVBases() || NearestVBase != nullptr) &&
2112 NeedsVTTParameter(CGF.CurGD) && "This class doesn't have VTT");
2113
2114 // Get the secondary vpointer index.
2115 uint64_t VirtualPointerIndex =
2116 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
2117