@@ -5034,7 +5034,8 @@ RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
5034
5034
ReturnValueSlot ReturnValue,
5035
5035
const CallArgList &CallArgs,
5036
5036
llvm::CallBase **callOrInvoke, bool IsMustTail,
5037
- SourceLocation Loc) {
5037
+ SourceLocation Loc,
5038
+ bool IsVirtualFunctionPointerThunk) {
5038
5039
// FIXME: We no longer need the types from CallArgs; lift up and simplify.
5039
5040
5040
5041
assert (Callee.isOrdinary () || Callee.isVirtual ());
@@ -5098,7 +5099,11 @@ RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
5098
5099
RawAddress SRetAlloca = RawAddress::invalid ();
5099
5100
llvm::Value *UnusedReturnSizePtr = nullptr ;
5100
5101
if (RetAI.isIndirect () || RetAI.isInAlloca () || RetAI.isCoerceAndExpand ()) {
5101
- if (!ReturnValue.isNull ()) {
5102
+ if (IsVirtualFunctionPointerThunk && RetAI.isIndirect ()) {
5103
+ SRetPtr = makeNaturalAddressForPointer (CurFn->arg_begin () +
5104
+ IRFunctionArgs.getSRetArgNo (),
5105
+ RetTy, CharUnits::fromQuantity (1 ));
5106
+ } else if (!ReturnValue.isNull ()) {
5102
5107
SRetPtr = ReturnValue.getAddress ();
5103
5108
} else {
5104
5109
SRetPtr = CreateMemTemp (RetTy, " tmp" , &SRetAlloca);
@@ -5877,119 +5882,131 @@ RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
5877
5882
CallArgs.freeArgumentMemory (*this );
5878
5883
5879
5884
// Extract the return value.
5880
- RValue Ret = [&] {
5881
- switch (RetAI.getKind ()) {
5882
- case ABIArgInfo::CoerceAndExpand: {
5883
- auto coercionType = RetAI.getCoerceAndExpandType ();
5884
-
5885
- Address addr = SRetPtr.withElementType (coercionType);
5886
-
5887
- assert (CI->getType () == RetAI.getUnpaddedCoerceAndExpandType ());
5888
- bool requiresExtract = isa<llvm::StructType>(CI->getType ());
5885
+ RValue Ret;
5889
5886
5890
- unsigned unpaddedIndex = 0 ;
5891
- for (unsigned i = 0 , e = coercionType->getNumElements (); i != e; ++i) {
5892
- llvm::Type *eltType = coercionType->getElementType (i);
5893
- if (ABIArgInfo::isPaddingForCoerceAndExpand (eltType)) continue ;
5894
- Address eltAddr = Builder.CreateStructGEP (addr, i);
5895
- llvm::Value *elt = CI;
5896
- if (requiresExtract)
5897
- elt = Builder.CreateExtractValue (elt, unpaddedIndex++);
5898
- else
5899
- assert (unpaddedIndex == 0 );
5900
- Builder.CreateStore (elt, eltAddr);
5887
+ // If the current function is a virtual function pointer thunk, avoid copying
5888
+ // the return value of the musttail call to a temporary.
5889
+ if (IsVirtualFunctionPointerThunk) {
5890
+ Ret = RValue::get (CI);
5891
+ } else {
5892
+ Ret = [&] {
5893
+ switch (RetAI.getKind ()) {
5894
+ case ABIArgInfo::CoerceAndExpand: {
5895
+ auto coercionType = RetAI.getCoerceAndExpandType ();
5896
+
5897
+ Address addr = SRetPtr.withElementType (coercionType);
5898
+
5899
+ assert (CI->getType () == RetAI.getUnpaddedCoerceAndExpandType ());
5900
+ bool requiresExtract = isa<llvm::StructType>(CI->getType ());
5901
+
5902
+ unsigned unpaddedIndex = 0 ;
5903
+ for (unsigned i = 0 , e = coercionType->getNumElements (); i != e; ++i) {
5904
+ llvm::Type *eltType = coercionType->getElementType (i);
5905
+ if (ABIArgInfo::isPaddingForCoerceAndExpand (eltType))
5906
+ continue ;
5907
+ Address eltAddr = Builder.CreateStructGEP (addr, i);
5908
+ llvm::Value *elt = CI;
5909
+ if (requiresExtract)
5910
+ elt = Builder.CreateExtractValue (elt, unpaddedIndex++);
5911
+ else
5912
+ assert (unpaddedIndex == 0 );
5913
+ Builder.CreateStore (elt, eltAddr);
5914
+ }
5915
+ [[fallthrough]];
5901
5916
}
5902
- [[fallthrough]];
5903
- }
5904
-
5905
- case ABIArgInfo::InAlloca:
5906
- case ABIArgInfo::Indirect: {
5907
- RValue ret = convertTempToRValue (SRetPtr, RetTy, SourceLocation ());
5908
- if (UnusedReturnSizePtr)
5909
- PopCleanupBlock ();
5910
- return ret;
5911
- }
5912
5917
5913
- case ABIArgInfo::Ignore:
5914
- // If we are ignoring an argument that had a result, make sure to
5915
- // construct the appropriate return value for our caller.
5916
- return GetUndefRValue (RetTy);
5918
+ case ABIArgInfo::InAlloca:
5919
+ case ABIArgInfo::Indirect: {
5920
+ RValue ret = convertTempToRValue (SRetPtr, RetTy, SourceLocation ());
5921
+ if (UnusedReturnSizePtr)
5922
+ PopCleanupBlock ();
5923
+ return ret;
5924
+ }
5917
5925
5918
- case ABIArgInfo::Extend:
5919
- case ABIArgInfo::Direct: {
5920
- llvm::Type *RetIRTy = ConvertType (RetTy);
5921
- if (RetAI.getCoerceToType () == RetIRTy && RetAI.getDirectOffset () == 0 ) {
5922
- switch (getEvaluationKind (RetTy)) {
5923
- case TEK_Complex: {
5924
- llvm::Value *Real = Builder.CreateExtractValue (CI, 0 );
5925
- llvm::Value *Imag = Builder.CreateExtractValue (CI, 1 );
5926
- return RValue::getComplex (std::make_pair (Real, Imag));
5927
- }
5928
- case TEK_Aggregate: {
5929
- Address DestPtr = ReturnValue.getAddress ();
5930
- bool DestIsVolatile = ReturnValue.isVolatile ();
5926
+ case ABIArgInfo::Ignore:
5927
+ // If we are ignoring an argument that had a result, make sure to
5928
+ // construct the appropriate return value for our caller.
5929
+ return GetUndefRValue (RetTy);
5930
+
5931
+ case ABIArgInfo::Extend:
5932
+ case ABIArgInfo::Direct: {
5933
+ llvm::Type *RetIRTy = ConvertType (RetTy);
5934
+ if (RetAI.getCoerceToType () == RetIRTy &&
5935
+ RetAI.getDirectOffset () == 0 ) {
5936
+ switch (getEvaluationKind (RetTy)) {
5937
+ case TEK_Complex: {
5938
+ llvm::Value *Real = Builder.CreateExtractValue (CI, 0 );
5939
+ llvm::Value *Imag = Builder.CreateExtractValue (CI, 1 );
5940
+ return RValue::getComplex (std::make_pair (Real, Imag));
5941
+ }
5942
+ case TEK_Aggregate: {
5943
+ Address DestPtr = ReturnValue.getAddress ();
5944
+ bool DestIsVolatile = ReturnValue.isVolatile ();
5931
5945
5932
- if (!DestPtr.isValid ()) {
5933
- DestPtr = CreateMemTemp (RetTy, " agg.tmp" );
5934
- DestIsVolatile = false ;
5946
+ if (!DestPtr.isValid ()) {
5947
+ DestPtr = CreateMemTemp (RetTy, " agg.tmp" );
5948
+ DestIsVolatile = false ;
5949
+ }
5950
+ EmitAggregateStore (CI, DestPtr, DestIsVolatile);
5951
+ return RValue::getAggregate (DestPtr);
5952
+ }
5953
+ case TEK_Scalar: {
5954
+ // If the argument doesn't match, perform a bitcast to coerce it.
5955
+ // This can happen due to trivial type mismatches.
5956
+ llvm::Value *V = CI;
5957
+ if (V->getType () != RetIRTy)
5958
+ V = Builder.CreateBitCast (V, RetIRTy);
5959
+ return RValue::get (V);
5935
5960
}
5936
- EmitAggregateStore (CI, DestPtr, DestIsVolatile);
5937
- return RValue::getAggregate (DestPtr );
5961
+ }
5962
+ llvm_unreachable ( " bad evaluation kind " );
5938
5963
}
5939
- case TEK_Scalar: {
5940
- // If the argument doesn't match, perform a bitcast to coerce it. This
5941
- // can happen due to trivial type mismatches.
5964
+
5965
+ // If coercing a fixed vector from a scalable vector for ABI
5966
+ // compatibility, and the types match, use the llvm.vector.extract
5967
+ // intrinsic to perform the conversion.
5968
+ if (auto *FixedDstTy = dyn_cast<llvm::FixedVectorType>(RetIRTy)) {
5942
5969
llvm::Value *V = CI;
5943
- if (V->getType () != RetIRTy)
5944
- V = Builder.CreateBitCast (V, RetIRTy);
5945
- return RValue::get (V);
5946
- }
5970
+ if (auto *ScalableSrcTy =
5971
+ dyn_cast<llvm::ScalableVectorType>(V->getType ())) {
5972
+ if (FixedDstTy->getElementType () ==
5973
+ ScalableSrcTy->getElementType ()) {
5974
+ llvm::Value *Zero = llvm::Constant::getNullValue (CGM.Int64Ty );
5975
+ V = Builder.CreateExtractVector (FixedDstTy, V, Zero,
5976
+ " cast.fixed" );
5977
+ return RValue::get (V);
5978
+ }
5979
+ }
5947
5980
}
5948
- llvm_unreachable (" bad evaluation kind" );
5949
- }
5950
5981
5951
- // If coercing a fixed vector from a scalable vector for ABI
5952
- // compatibility, and the types match, use the llvm.vector.extract
5953
- // intrinsic to perform the conversion.
5954
- if (auto *FixedDstTy = dyn_cast<llvm::FixedVectorType>(RetIRTy)) {
5955
- llvm::Value *V = CI;
5956
- if (auto *ScalableSrcTy =
5957
- dyn_cast<llvm::ScalableVectorType>(V->getType ())) {
5958
- if (FixedDstTy->getElementType () == ScalableSrcTy->getElementType ()) {
5959
- llvm::Value *Zero = llvm::Constant::getNullValue (CGM.Int64Ty );
5960
- V = Builder.CreateExtractVector (FixedDstTy, V, Zero, " cast.fixed" );
5961
- return RValue::get (V);
5962
- }
5982
+ Address DestPtr = ReturnValue.getValue ();
5983
+ bool DestIsVolatile = ReturnValue.isVolatile ();
5984
+
5985
+ if (!DestPtr.isValid ()) {
5986
+ DestPtr = CreateMemTemp (RetTy, " coerce" );
5987
+ DestIsVolatile = false ;
5963
5988
}
5964
- }
5965
5989
5966
- Address DestPtr = ReturnValue.getValue ();
5967
- bool DestIsVolatile = ReturnValue.isVolatile ();
5990
+ // An empty record can overlap other data (if declared with
5991
+ // no_unique_address); omit the store for such types - as there is no
5992
+ // actual data to store.
5993
+ if (!isEmptyRecord (getContext (), RetTy, true )) {
5994
+ // If the value is offset in memory, apply the offset now.
5995
+ Address StorePtr = emitAddressAtOffset (*this , DestPtr, RetAI);
5996
+ CreateCoercedStore (CI, StorePtr, DestIsVolatile, *this );
5997
+ }
5968
5998
5969
- if (!DestPtr.isValid ()) {
5970
- DestPtr = CreateMemTemp (RetTy, " coerce" );
5971
- DestIsVolatile = false ;
5999
+ return convertTempToRValue (DestPtr, RetTy, SourceLocation ());
5972
6000
}
5973
6001
5974
- // An empty record can overlap other data (if declared with
5975
- // no_unique_address); omit the store for such types - as there is no
5976
- // actual data to store.
5977
- if (!isEmptyRecord (getContext (), RetTy, true )) {
5978
- // If the value is offset in memory, apply the offset now.
5979
- Address StorePtr = emitAddressAtOffset (*this , DestPtr, RetAI);
5980
- CreateCoercedStore (CI, StorePtr, DestIsVolatile, *this );
6002
+ case ABIArgInfo::Expand:
6003
+ case ABIArgInfo::IndirectAliased:
6004
+ llvm_unreachable (" Invalid ABI kind for return argument" );
5981
6005
}
5982
6006
5983
- return convertTempToRValue (DestPtr, RetTy, SourceLocation ());
5984
- }
5985
-
5986
- case ABIArgInfo::Expand:
5987
- case ABIArgInfo::IndirectAliased:
5988
- llvm_unreachable (" Invalid ABI kind for return argument" );
5989
- }
5990
-
5991
- llvm_unreachable (" Unhandled ABIArgInfo::Kind" );
5992
- } ();
6007
+ llvm_unreachable (" Unhandled ABIArgInfo::Kind" );
6008
+ }();
6009
+ }
5993
6010
5994
6011
// Emit the assume_aligned check on the return value.
5995
6012
if (Ret.isScalar () && TargetDecl) {
0 commit comments