diff --git a/clang/lib/CIR/CodeGen/CIRGenItaniumCXXABI.cpp b/clang/lib/CIR/CodeGen/CIRGenItaniumCXXABI.cpp index b447e981fd40..ae3941fe627e 100644 --- a/clang/lib/CIR/CodeGen/CIRGenItaniumCXXABI.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenItaniumCXXABI.cpp @@ -1562,8 +1562,7 @@ mlir::Attribute CIRGenItaniumRTTIBuilder::BuildTypeInfo(mlir::Location loc, if (OldGV && !OldGV.isDeclaration()) { assert(!OldGV.hasAvailableExternallyLinkage() && "available_externally typeinfos not yet implemented"); - return CGM.getBuilder().getGlobalViewAttr(CGM.getBuilder().getUInt8PtrTy(), - OldGV); + return CGM.getBuilder().getGlobalViewAttr(CGM.GlobalsUInt8PtrTy, OldGV); } // Check if there is already an external RTTI descriptor for this type. @@ -1718,8 +1717,7 @@ void CIRGenItaniumRTTIBuilder::BuildVTablePointer(mlir::Location loc, if (CGM.getItaniumVTableContext().isRelativeLayout()) llvm_unreachable("NYI"); if (!VTable) { - VTable = CGM.getOrInsertGlobal(loc, VTableName, - CGM.getBuilder().getUInt8PtrTy()); + VTable = CGM.getOrInsertGlobal(loc, VTableName, CGM.GlobalsUInt8PtrTy); } if (cir::MissingFeatures::setDSOLocal()) @@ -1733,8 +1731,8 @@ void CIRGenItaniumRTTIBuilder::BuildVTablePointer(mlir::Location loc, SmallVector offsets{ CGM.getBuilder().getI32IntegerAttr(2)}; auto indices = mlir::ArrayAttr::get(builder.getContext(), offsets); - field = CGM.getBuilder().getGlobalViewAttr(CGM.getBuilder().getUInt8PtrTy(), - VTable, indices); + field = CGM.getBuilder().getGlobalViewAttr(CGM.GlobalsUInt8PtrTy, VTable, + indices); } assert(field && "expected attribute"); @@ -1942,7 +1940,7 @@ CIRGenItaniumRTTIBuilder::GetAddrOfExternalRTTIDescriptor(mlir::Location loc, // From LLVM codegen => Note for the future: If we would ever like to do // deferred emission of RTTI, check if emitting vtables opportunistically // need any adjustment. - GV = CIRGenModule::createGlobalOp(CGM, loc, Name, builder.getUInt8PtrTy(), + GV = CIRGenModule::createGlobalOp(CGM, loc, Name, CGM.GlobalsUInt8PtrTy, /*isConstant=*/true); const CXXRecordDecl *RD = Ty->getAsCXXRecordDecl(); CGM.setGVProperties(GV, RD); @@ -1953,7 +1951,7 @@ CIRGenItaniumRTTIBuilder::GetAddrOfExternalRTTIDescriptor(mlir::Location loc, llvm_unreachable("NYI"); } - return builder.getGlobalViewAttr(builder.getUInt8PtrTy(), GV); + return builder.getGlobalViewAttr(CGM.GlobalsUInt8PtrTy, GV); } mlir::Attribute CIRGenItaniumRTTIBuilder::BuildTypeInfo( @@ -1978,8 +1976,7 @@ mlir::Attribute CIRGenItaniumRTTIBuilder::BuildTypeInfo( // for global pointers. This is very ARM64-specific. llvm_unreachable("NYI"); } else { - TypeNameField = - builder.getGlobalViewAttr(builder.getUInt8PtrTy(), TypeName); + TypeNameField = builder.getGlobalViewAttr(CGM.GlobalsUInt8PtrTy, TypeName); } Fields.push_back(TypeNameField); @@ -2144,7 +2141,7 @@ mlir::Attribute CIRGenItaniumRTTIBuilder::BuildTypeInfo( assert(!cir::MissingFeatures::setDSOLocal()); CIRGenModule::setInitializer(GV, init); - return builder.getGlobalViewAttr(builder.getUInt8PtrTy(), GV); + return builder.getGlobalViewAttr(CGM.GlobalsUInt8PtrTy, GV); ; } @@ -2606,7 +2603,7 @@ static cir::FuncOp getItaniumDynamicCastFn(CIRGenFunction &CGF) { // std::ptrdiff_t src2dst_offset); mlir::Type VoidPtrTy = CGF.VoidPtrTy; - mlir::Type RTTIPtrTy = CGF.getBuilder().getUInt8PtrTy(); + mlir::Type RTTIPtrTy = CGF.GlobalsUInt8PtrTy; mlir::Type PtrDiffTy = CGF.convertType(CGF.getContext().getPointerDiffType()); // TODO(cir): mark the function as nowind readonly. @@ -2704,9 +2701,10 @@ static mlir::Value emitExactDynamicCast(CIRGenItaniumCXXABI &ABI, mlir::Value ExpectedVPtr = ABI.getVTableAddressPoint(BaseSubobject(SrcDecl, *Offset), DestDecl); + // mlir::ptr::MemorySpaceAttrInterface srcAS = + // mlir::dyn_cast(Src.getPointer().getType()) + // .getAddrSpace(); - // TODO(cir): handle address space here. - assert(!cir::MissingFeatures::addressSpace()); mlir::Type VPtrTy = ExpectedVPtr.getType(); mlir::Type VPtrPtrTy = builder.getPointerTo(VPtrTy); Address SrcVPtrPtr(builder.createBitcast(Src.getPointer(), VPtrPtrTy), @@ -2723,9 +2721,7 @@ static mlir::Value emitExactDynamicCast(CIRGenItaniumCXXABI &ABI, if (Offset->isZero()) return builder.createBitcast(Src.getPointer(), DestCIRTy); - // TODO(cir): handle address space here. - assert(!cir::MissingFeatures::addressSpace()); - mlir::Type U8PtrTy = builder.getPointerTo(builder.getUInt8Ty()); + mlir::Type U8PtrTy = builder.getPointerTo(builder.getUInt8Ty(), srcAS); mlir::Value StrideToApply = builder.getConstInt(Loc, builder.getUInt64Ty(), Offset->getQuantity()); diff --git a/clang/lib/CIR/CodeGen/CIRGenModule.cpp b/clang/lib/CIR/CodeGen/CIRGenModule.cpp index 8bb25c9157eb..862ca6a7b3a2 100644 --- a/clang/lib/CIR/CodeGen/CIRGenModule.cpp +++ b/clang/lib/CIR/CodeGen/CIRGenModule.cpp @@ -119,6 +119,8 @@ CIRGenModule::CIRGenModule(mlir::MLIRContext &mlirContext, unsigned charSize = astContext.getTargetInfo().getCharWidth(); unsigned intSize = astContext.getTargetInfo().getIntWidth(); unsigned sizeTSize = astContext.getTargetInfo().getMaxPointerWidth(); + mlir::ptr::MemorySpaceAttrInterface CIRGlobalsAS = cir::toCIRLangAddressSpaceAttr( + &getMLIRContext(), getGlobalVarAddressSpace(nullptr)); auto typeSizeInfo = cir::TypeSizeInfoAttr::get(&mlirContext, charSize, intSize, sizeTSize); @@ -168,7 +170,7 @@ CIRGenModule::CIRGenModule(mlir::MLIRContext &mlirContext, UInt8PtrPtrTy = builder.getPointerTo(UInt8PtrTy); AllocaInt8PtrTy = UInt8PtrTy; AllocaVoidPtrTy = VoidPtrTy; - // TODO: GlobalsInt8PtrTy + GlobalsUInt8PtrTy = builder.getPointerTo(UInt8Ty, CIRGlobalsAS); // TODO: ConstGlobalsPtrTy CIRAllocaAddressSpace = getTargetCIRGenInfo().getCIRAllocaAddressSpace(); @@ -823,6 +825,12 @@ cir::GlobalOp CIRGenModule::createGlobalOp( { mlir::OpBuilder::InsertionGuard guard(builder); + mlir::ptr::MemorySpaceAttrInterface cirAS = + addrSpace + ? addrSpace + : toCIRLangAddressSpaceAttr(&cgm.getMLIRContext(), + cgm.getGlobalVarAddressSpace(nullptr)); + // Some global emissions are triggered while emitting a function, e.g. // void s() { const char *s = "yolo"; ... } // @@ -834,7 +842,7 @@ cir::GlobalOp CIRGenModule::createGlobalOp( builder.clearInsertionPoint(); g = cir::GlobalOp::create(builder, loc, name, t, isConstant, linkage, - addrSpace); + cirAS); // Manually insert at the correct location if (curCGF) { diff --git a/clang/lib/CIR/CodeGen/CIRGenTypeCache.h b/clang/lib/CIR/CodeGen/CIRGenTypeCache.h index fb18ba1cd593..47b20f4fedb2 100644 --- a/clang/lib/CIR/CodeGen/CIRGenTypeCache.h +++ b/clang/lib/CIR/CodeGen/CIRGenTypeCache.h @@ -61,7 +61,7 @@ struct CIRGenTypeCache { /// void* in address space 0 cir::PointerType VoidPtrTy; - cir::PointerType UInt8PtrTy; + cir::PointerType UInt8PtrTy; // we should have this ptrTy with the target AS. /// void** in address space 0 cir::PointerType VoidPtrPtrTy; @@ -72,10 +72,10 @@ struct CIRGenTypeCache { cir::PointerType AllocaInt8PtrTy; /// void* in default globals address space - // union { - // cir::PointerType GlobalsVoidPtrTy; - // cir::PointerType GlobalsInt8PtrTy; - // }; + union { + // cir::PointerType GlobalsVoidPtrTy; + cir::PointerType GlobalsUInt8PtrTy; + }; /// void* in the address space for constant globals // cir::PointerType ConstGlobalsPtrTy; diff --git a/clang/test/CIR/CodeGen/dynamic-cast-address-space.cpp b/clang/test/CIR/CodeGen/dynamic-cast-address-space.cpp index 417575c60d69..c14f600dc5b0 100644 --- a/clang/test/CIR/CodeGen/dynamic-cast-address-space.cpp +++ b/clang/test/CIR/CodeGen/dynamic-cast-address-space.cpp @@ -1,4 +1,3 @@ -// RUN: %clang_cc1 -triple amdgcn-amd-amdhsa -std=c++20 -fclangir -emit-cir -mmlir --mlir-print-ir-before=cir-lowering-prepare %s -o %t.cir 2>&1 | FileCheck %s --check-prefix=CIR-BEFORE // RUN: %clang_cc1 -triple amdgcn-amd-amdhsa -std=c++20 -fclangir -emit-cir %s -o %t.cir // RUN: FileCheck %s --input-file=%t.cir --check-prefix=CIR // RUN: %clang_cc1 -triple amdgcn-amd-amdhsa -std=c++20 -fclangir -emit-llvm -fno-clangir-call-conv-lowering %s -o %t.ll @@ -12,68 +11,55 @@ struct Base { struct Derived : Base {}; -// Test dynamic_cast to void* with address space attribute. -// The result pointer should preserve the address space of the source pointer. +// Check that RTTI globals are emitted in the correct address space (addrspace(1) for AMDGCN). +// CIR-DAG: cir.global {{.*}} lang_address_space(offload_global) @_ZTI4Base : !cir.ptr +// CIR-DAG: cir.global {{.*}} lang_address_space(offload_global) @_ZTVN10__cxxabiv120__si_class_type_infoE : !cir.ptr> +// CIR-DAG: cir.global {{.*}} lang_address_space(offload_global) @_ZTS7Derived = {{.*}} : !cir.array +// CIR-DAG: cir.global {{.*}} lang_address_space(offload_global) @_ZTI7Derived = #cir.typeinfo<{{{.*}}}> : !rec_{{.*}} -// CIR-BEFORE: cir.func {{.*}} @_Z30ptr_cast_to_complete_addrspaceP4Base -// CIR-BEFORE: %{{.+}} = cir.dyn_cast ptr %{{.+}} : !cir.ptr -> !cir.ptr -// CIR-BEFORE: } +// Check the __dynamic_cast function signature uses globals address space for RTTI pointers. +// CIR: cir.func private @__dynamic_cast(!cir.ptr, !cir.ptr, !cir.ptr, !s64i) -> !cir.ptr -// CIR: cir.func {{.*}} @_Z30ptr_cast_to_complete_addrspaceP4Base -// CIR: %[[#SRC:]] = cir.load{{.*}} %{{.+}} : !cir.ptr>, !cir.ptr -// CIR: %[[#SRC_IS_NOT_NULL:]] = cir.cast ptr_to_bool %[[#SRC]] : !cir.ptr -> !cir.bool -// CIR: %{{.+}} = cir.ternary(%[[#SRC_IS_NOT_NULL]], true { -// CIR: %[[#SRC_BYTES_PTR:]] = cir.cast bitcast %{{.+}} : !cir.ptr -> !cir.ptr -// CIR: %[[#DST_BYTES_PTR:]] = cir.ptr_stride %[[#SRC_BYTES_PTR]], %{{.+}} : (!cir.ptr, !s64i) -> !cir.ptr -// CIR: %[[#CASTED_PTR:]] = cir.cast bitcast %[[#DST_BYTES_PTR]] : !cir.ptr -> !cir.ptr -// CIR: cir.yield %[[#CASTED_PTR]] : !cir.ptr -// CIR: }, false { -// CIR: %[[#NULL_PTR:]] = cir.const #cir.ptr : !cir.ptr -// CIR: cir.yield %[[#NULL_PTR]] : !cir.ptr -// CIR: }) : (!cir.bool) -> !cir.ptr -// CIR: } +// LLVM-DAG: @_ZTI4Base = external addrspace(1) constant ptr addrspace(1) +// LLVM-DAG: @_ZTVN10__cxxabiv120__si_class_type_infoE = external addrspace(1) global +// LLVM-DAG: @_ZTS7Derived = {{.*}}addrspace(1) constant [{{.*}} x i8] +// LLVM-DAG: @_ZTI7Derived = {{.*}}addrspace(1) constant { ptr addrspace(1), ptr addrspace(1), ptr addrspace(1) } -// LLVM: define dso_local ptr @_Z30ptr_cast_to_complete_addrspaceP4Base -// LLVM-SAME: (ptr %{{.+}}) -// LLVM-DAG: alloca ptr, {{.*}}addrspace(5) -// LLVM-DAG: %[[#RETVAL_ALLOCA:]] = alloca ptr, {{.*}}addrspace(5) -// LLVM-DAG: %[[#RETVAL_ASCAST:]] = addrspacecast ptr addrspace(5) %[[#RETVAL_ALLOCA]] to ptr -// LLVM-DAG: %[[#PTR_ASCAST:]] = addrspacecast ptr addrspace(5) %{{.+}} to ptr -// LLVM: store ptr %{{.+}}, ptr %[[#PTR_ASCAST]], align 8 -// LLVM: %[[#SRC:]] = load ptr, ptr %[[#PTR_ASCAST]], align 8 -// LLVM: %[[#SRC_IS_NOT_NULL:]] = icmp ne ptr %[[#SRC]], null -// LLVM: br i1 %[[#SRC_IS_NOT_NULL]], label %[[#TRUE_BLOCK:]], label %[[#FALSE_BLOCK:]] -// LLVM: [[#TRUE_BLOCK]]: -// LLVM: %[[#VTABLE:]] = load ptr, ptr %[[#SRC]], align 8 -// LLVM: %[[#OFFSET_PTR:]] = getelementptr i64, ptr %[[#VTABLE]], i64 -2 -// LLVM: %[[#OFFSET:]] = load i64, ptr %[[#OFFSET_PTR]], align 8 -// LLVM: %[[#RESULT:]] = getelementptr i8, ptr %[[#SRC]], i64 %[[#OFFSET]] -// LLVM: br label %[[#MERGE:]] -// LLVM: [[#FALSE_BLOCK]]: -// LLVM: br label %[[#MERGE]] -// LLVM: [[#MERGE]]: -// LLVM: %[[#PHI:]] = phi ptr [ null, %[[#FALSE_BLOCK]] ], [ %[[#RESULT]], %[[#TRUE_BLOCK]] ] -// LLVM: store ptr %[[#PHI]], ptr %[[#RETVAL_ASCAST]], align 8 -// LLVM: %[[#RET:]] = load ptr, ptr %[[#RETVAL_ASCAST]], align 8 -// LLVM: ret ptr %[[#RET]] -// LLVM: } +// OGCG-DAG: @_ZTI4Base = external addrspace(1) constant ptr addrspace(1) +// OGCG-DAG: @_ZTVN10__cxxabiv120__si_class_type_infoE = external addrspace(1) global [0 x ptr addrspace(1)] +// OGCG-DAG: @_ZTS7Derived = {{.*}} addrspace(1) constant [{{.*}} x i8] +// OGCG-DAG: @_ZTI7Derived = {{.*}} addrspace(1) constant { ptr addrspace(1), ptr addrspace(1), ptr addrspace(1) } -// OGCG: define dso_local noundef ptr @_Z30ptr_cast_to_complete_addrspaceP4Base -// OGCG-SAME: (ptr noundef %{{.+}}) -// OGCG: %[[RETVAL_ASCAST:[a-z0-9.]+]] = addrspacecast ptr addrspace(5) %{{.+}} to ptr -// OGCG: %[[PTR_ASCAST:[a-z0-9.]+]] = addrspacecast ptr addrspace(5) %{{.+}} to ptr -// OGCG: store ptr %{{.+}}, ptr %[[PTR_ASCAST]], align 8 -// OGCG: %[[SRC:[0-9]+]] = load ptr, ptr %[[PTR_ASCAST]], align 8 -// OGCG: icmp eq ptr %[[SRC]], null -// OGCG: dynamic_cast.notnull: -// OGCG: %[[VTABLE:[a-z0-9]+]] = load ptr, ptr %[[SRC]], align 8 -// OGCG: getelementptr inbounds i64, ptr %[[VTABLE]], i64 -2 -// OGCG: %[[OFFSET:[a-z0-9.]+]] = load i64, ptr %{{.+}}, align 8 -// OGCG: %[[RESULT:[0-9]+]] = getelementptr inbounds i8, ptr %[[SRC]], i64 %[[OFFSET]] -// OGCG: dynamic_cast.end: -// OGCG: %[[PHI:[0-9]+]] = phi ptr [ %[[RESULT]], %dynamic_cast.notnull ], [ null, %dynamic_cast.null ] -// OGCG: ret ptr %[[PHI]] -// OGCG: } -void *ptr_cast_to_complete_addrspace(Base *ptr) { - return dynamic_cast(ptr); -} \ No newline at end of file +// Test dynamic_cast with __dynamic_cast runtime call. +// The RTTI pointers passed to __dynamic_cast should be in the globals address space. + +// CIR-LABEL: cir.func {{.*}} @_Z8ptr_castP4Base +// CIR: cir.call @__dynamic_cast({{.*}}, {{.*}}, {{.*}}, {{.*}}) : (!cir.ptr, !cir.ptr, !cir.ptr, !s64i) -> !cir.ptr + +// LLVM-LABEL: define {{.*}} @_Z8ptr_castP4Base +// LLVM: call ptr @__dynamic_cast(ptr {{.*}}, ptr addrspace(1) @_ZTI4Base, ptr addrspace(1) @_ZTI7Derived, i64 0) + +// OGCG-LABEL: define {{.*}} @_Z8ptr_castP4Base +// OGCG: call ptr @__dynamic_cast(ptr {{.*}}, ptr addrspace(1) @_ZTI4Base, ptr addrspace(1) @_ZTI7Derived, i64 0) +Derived *ptr_cast(Base *b) { + return dynamic_cast(b); +} + +// Test reference dynamic_cast with __cxa_bad_cast on failure. +// The RTTI pointers passed to __dynamic_cast should be in the globals address space. + +// CIR-LABEL: cir.func {{.*}} @_Z8ref_castR4Base +// CIR: cir.call @__dynamic_cast({{.*}}, {{.*}}, {{.*}}, {{.*}}) : (!cir.ptr, !cir.ptr, !cir.ptr, !s64i) -> !cir.ptr +// CIR: cir.call @__cxa_bad_cast() +// CIR: cir.unreachable + +// LLVM-LABEL: define {{.*}} @_Z8ref_castR4Base +// LLVM: call ptr @__dynamic_cast(ptr {{.*}}, ptr addrspace(1) @_ZTI4Base, ptr addrspace(1) @_ZTI7Derived, i64 0) +// LLVM: call void @__cxa_bad_cast() + +// OGCG-LABEL: define {{.*}} @_Z8ref_castR4Base +// OGCG: call ptr @__dynamic_cast(ptr {{.*}}, ptr addrspace(1) @_ZTI4Base, ptr addrspace(1) @_ZTI7Derived, i64 0) +// OGCG: call void @__cxa_bad_cast() +Derived &ref_cast(Base &b) { + return dynamic_cast(b); +}