@@ -1602,6 +1602,7 @@ void ClangToLLVMArgMapping::construct(const ASTContext &Context,
1602
1602
IRArgs.PaddingArgIndex = IRArgNo++;
1603
1603
1604
1604
switch (AI.getKind ()) {
1605
+ case ABIArgInfo::TargetSpecific:
1605
1606
case ABIArgInfo::Extend:
1606
1607
case ABIArgInfo::Direct: {
1607
1608
// FIXME: handle sseregparm someday...
@@ -1712,6 +1713,7 @@ llvm::FunctionType *CodeGenTypes::GetFunctionType(const CGFunctionInfo &FI) {
1712
1713
case ABIArgInfo::IndirectAliased:
1713
1714
llvm_unreachable (" Invalid ABI kind for return argument" );
1714
1715
1716
+ case ABIArgInfo::TargetSpecific:
1715
1717
case ABIArgInfo::Extend:
1716
1718
case ABIArgInfo::Direct:
1717
1719
resultType = retAI.getCoerceToType ();
@@ -1784,6 +1786,7 @@ llvm::FunctionType *CodeGenTypes::GetFunctionType(const CGFunctionInfo &FI) {
1784
1786
ArgTypes[FirstIRArg] = llvm::PointerType::get (
1785
1787
getLLVMContext (), ArgInfo.getIndirectAddrSpace ());
1786
1788
break ;
1789
+ case ABIArgInfo::TargetSpecific:
1787
1790
case ABIArgInfo::Extend:
1788
1791
case ABIArgInfo::Direct: {
1789
1792
// Fast-isel and the optimizer generally like scalar values better than
@@ -2697,6 +2700,7 @@ void CodeGenModule::ConstructAttributeList(StringRef Name,
2697
2700
else
2698
2701
RetAttrs.addAttribute (llvm::Attribute::NoExt);
2699
2702
[[fallthrough]];
2703
+ case ABIArgInfo::TargetSpecific:
2700
2704
case ABIArgInfo::Direct:
2701
2705
if (RetAI.getInReg ())
2702
2706
RetAttrs.addAttribute (llvm::Attribute::InReg);
@@ -2838,6 +2842,7 @@ void CodeGenModule::ConstructAttributeList(StringRef Name,
2838
2842
else
2839
2843
Attrs.addAttribute (llvm::Attribute::NoExt);
2840
2844
[[fallthrough]];
2845
+ case ABIArgInfo::TargetSpecific:
2841
2846
case ABIArgInfo::Direct:
2842
2847
if (ArgNo == 0 && FI.isChainCall ())
2843
2848
Attrs.addAttribute (llvm::Attribute::Nest);
@@ -3335,17 +3340,6 @@ void CodeGenFunction::EmitFunctionProlog(const CGFunctionInfo &FI,
3335
3340
}
3336
3341
}
3337
3342
3338
- // Struct of fixed-length vectors and struct of array of fixed-length
3339
- // vector in VLS calling convention are coerced to vector tuple
3340
- // type(represented as TargetExtType) and scalable vector type
3341
- // respectively, they're no longer handled as struct.
3342
- if (ArgI.isDirect () && isa<llvm::StructType>(ConvertType (Ty)) &&
3343
- (isa<llvm::TargetExtType>(ArgI.getCoerceToType ()) ||
3344
- isa<llvm::ScalableVectorType>(ArgI.getCoerceToType ()))) {
3345
- ArgVals.push_back (ParamValue::forDirect (AI));
3346
- break ;
3347
- }
3348
-
3349
3343
llvm::StructType *STy =
3350
3344
dyn_cast<llvm::StructType>(ArgI.getCoerceToType ());
3351
3345
Address Alloca =
@@ -3486,6 +3480,25 @@ void CodeGenFunction::EmitFunctionProlog(const CGFunctionInfo &FI,
3486
3480
break ;
3487
3481
}
3488
3482
3483
+ case ABIArgInfo::TargetSpecific: {
3484
+ auto *AI = Fn->getArg (FirstIRArg);
3485
+ AI->setName (Arg->getName () + " .target_coerce" );
3486
+ Address Alloca =
3487
+ CreateMemTemp (Ty, getContext ().getDeclAlign (Arg), Arg->getName ());
3488
+ Address Ptr = emitAddressAtOffset (*this , Alloca, ArgI);
3489
+ CGM.getABIInfo ().CreateCoercedStore (AI, Ptr, ArgI, false , *this );
3490
+ if (CodeGenFunction::hasScalarEvaluationKind (Ty)) {
3491
+ llvm::Value *V =
3492
+ EmitLoadOfScalar (Alloca, false , Ty, Arg->getBeginLoc ());
3493
+ if (isPromoted) {
3494
+ V = emitArgumentDemotion (*this , Arg, V);
3495
+ }
3496
+ ArgVals.push_back (ParamValue::forDirect (V));
3497
+ } else {
3498
+ ArgVals.push_back (ParamValue::forIndirect (Alloca));
3499
+ }
3500
+ break ;
3501
+ }
3489
3502
case ABIArgInfo::Ignore:
3490
3503
assert (NumIRArgs == 0 );
3491
3504
// Initialize the local variable appropriately.
@@ -4114,6 +4127,11 @@ void CodeGenFunction::EmitFunctionEpilog(
4114
4127
}
4115
4128
break ;
4116
4129
}
4130
+ case ABIArgInfo::TargetSpecific: {
4131
+ Address V = emitAddressAtOffset (*this , ReturnValue, RetAI);
4132
+ RV = CGM.getABIInfo ().CreateCoercedLoad (V, RetAI, *this );
4133
+ break ;
4134
+ }
4117
4135
case ABIArgInfo::Expand:
4118
4136
case ABIArgInfo::IndirectAliased:
4119
4137
llvm_unreachable (" Invalid ABI kind for return argument" );
@@ -5691,6 +5709,24 @@ RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
5691
5709
assert (IRArgPos == FirstIRArg + NumIRArgs);
5692
5710
break ;
5693
5711
}
5712
+
5713
+ case ABIArgInfo::TargetSpecific: {
5714
+ Address Src = Address::invalid ();
5715
+ if (!I->isAggregate ()) {
5716
+ Src = CreateMemTemp (I->Ty , " target_coerce" );
5717
+ I->copyInto (*this , Src);
5718
+ } else {
5719
+ Src = I->hasLValue () ? I->getKnownLValue ().getAddress ()
5720
+ : I->getKnownRValue ().getAggregateAddress ();
5721
+ }
5722
+
5723
+ // If the value is offset in memory, apply the offset now.
5724
+ Src = emitAddressAtOffset (*this , Src, ArgInfo);
5725
+ llvm::Value *Load =
5726
+ CGM.getABIInfo ().CreateCoercedLoad (Src, ArgInfo, *this );
5727
+ IRCallArgs[FirstIRArg] = Load;
5728
+ break ;
5729
+ }
5694
5730
}
5695
5731
}
5696
5732
@@ -6177,6 +6213,19 @@ RValue CodeGenFunction::EmitCall(const CGFunctionInfo &CallInfo,
6177
6213
return convertTempToRValue (DestPtr, RetTy, SourceLocation ());
6178
6214
}
6179
6215
6216
+ case ABIArgInfo::TargetSpecific: {
6217
+ Address DestPtr = ReturnValue.getValue ();
6218
+ Address StorePtr = emitAddressAtOffset (*this , DestPtr, RetAI);
6219
+ bool DestIsVolatile = ReturnValue.isVolatile ();
6220
+ if (!DestPtr.isValid ()) {
6221
+ DestPtr = CreateMemTemp (RetTy, " target_coerce" );
6222
+ DestIsVolatile = false ;
6223
+ }
6224
+ CGM.getABIInfo ().CreateCoercedStore (CI, StorePtr, RetAI, DestIsVolatile,
6225
+ *this );
6226
+ return convertTempToRValue (DestPtr, RetTy, SourceLocation ());
6227
+ }
6228
+
6180
6229
case ABIArgInfo::Expand:
6181
6230
case ABIArgInfo::IndirectAliased:
6182
6231
llvm_unreachable (" Invalid ABI kind for return argument" );
0 commit comments