Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[mono] Add SwiftError support for Swift reverse pinvokes #101122

Merged
merged 7 commits into from
May 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 26 additions & 10 deletions src/mono/mono/mini/mini-amd64.c
Original file line number Diff line number Diff line change
Expand Up @@ -1990,13 +1990,18 @@ mono_arch_allocate_vars (MonoCompile *cfg)
}
case ArgSwiftError: {
ins->opcode = OP_REGOFFSET;
ins->inst_basereg = cfg->frame_reg;
ins->inst_offset = ainfo->offset + ARGS_OFFSET;
offset = ALIGN_TO (offset, sizeof (target_mgreg_t));
ins->inst_basereg = cfg->frame_reg;
ins->inst_offset = offset;
Copy link
Member

@kotlarmilos kotlarmilos Apr 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why it doesn't include the ARGS_OFFSET offset between fp and the first argument in the callee?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ainfo->offset was always 0 here. So it was always allocated at ARGS_OFFSET from the beginning. I think that offset already includes information about the distance.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Swift types are specified either at the beginning or the end of signature. We don't enforce strict signature rules, but it would be beneficial to make them more general if possible. Can it still function if SwiftError is specified as the last argument, using ainfo->offset instead of offset?

@amanasifkhalid, what are the limitations from the CoreCLR side?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't enforce any signature rules, either. The Swift special register types can appear anywhere in the signature's parameter list.

Copy link
Member Author

@jkurdek jkurdek Apr 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@kotlarmilos i think for that to work with ainfo->offset without any modifications, the error would have to be the first argument. We could probably update ainfo->offset in get_call_info so that it works for every position. I'm not sure what are the benefits of using ainfo->offset though, as the current solution works for every position.

Copy link
Member

@kotlarmilos kotlarmilos Apr 26, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ok, sounds good. I forgot that the ainfo->offset is always 0 for swifterror in the get_call_info.

offset += sizeof (target_mgreg_t);

cfg->arch.swift_error_var = ins;
cfg->used_int_regs |= (size_t)(1 << AMD64_R12);

/* In the n2m case, the error register functions as an extra return register
* and is thus is not treated as callee-saved.
*/
if (cfg->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
cfg->used_int_regs |= (size_t)(1 << AMD64_R12);
}
break;
default:
Expand Down Expand Up @@ -4280,8 +4285,13 @@ emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
guint32 quad;

if (cfg->arch.swift_error_var) {
amd64_mov_reg_membase (code, AMD64_R11, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, sizeof (target_mgreg_t));
amd64_mov_membase_reg (code, AMD64_R11, 0, AMD64_R12, sizeof (target_mgreg_t));
if (cfg->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE) {
amd64_mov_reg_membase (code, AMD64_R11, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, sizeof (target_mgreg_t));
amd64_mov_membase_reg (code, AMD64_R11, 0, AMD64_R12, sizeof (target_mgreg_t));
}
else if (cfg->method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
amd64_mov_reg_membase (code, AMD64_R12, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, sizeof (target_mgreg_t));
}
}

/* Move return value to the target register */
Expand Down Expand Up @@ -8248,11 +8258,17 @@ MONO_RESTORE_WARNING
amd64_mov_membase_reg (code, ins->inst_basereg, ins->inst_offset, ainfo->reg, 8);
break;
case ArgSwiftError:
if (ainfo->offset) {
amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, ARGS_OFFSET + ainfo->offset, 8);
amd64_mov_membase_reg (code, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, AMD64_R11, sizeof (target_mgreg_t));
} else {
amd64_mov_membase_reg (code, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, ainfo->reg, sizeof (target_mgreg_t));
if (cfg->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE) {
if (ainfo->offset) {
amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, ARGS_OFFSET + ainfo->offset, 8);
amd64_mov_membase_reg (code, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, AMD64_R11, sizeof (target_mgreg_t));
} else {
amd64_mov_membase_reg (code, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset, ainfo->reg, sizeof (target_mgreg_t));
}
} else if (cfg->method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
/* Relies on arguments being passed on the stack */
amd64_lea_membase (code, AMD64_R11, cfg->arch.swift_error_var->inst_basereg, cfg->arch.swift_error_var->inst_offset);
amd64_mov_membase_reg (code, ins->inst_basereg, ins->inst_offset, AMD64_R11, 8);
}
break;
default:
Expand Down
32 changes: 24 additions & 8 deletions src/mono/mono/mini/mini-arm64.c
Original file line number Diff line number Diff line change
Expand Up @@ -2887,7 +2887,12 @@ mono_arch_allocate_vars (MonoCompile *cfg)
offset += size;

cfg->arch.swift_error_var = ins;
cfg->used_int_regs |= 1 << ARMREG_R21;

/* In the n2m case, the error register functions as an extra return register
* and is thus is not treated as callee-saved.
*/
if (cfg->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
cfg->used_int_regs |= 1 << ARMREG_R21;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It might be helpful to explain here that in the NATIVE_TO_MANAGED case, the error register is functioning as an extra return register, and thus isn't treated as callee-save.

break;
}
default:
Expand Down Expand Up @@ -3731,8 +3736,13 @@ emit_move_return_value (MonoCompile *cfg, guint8 * code, MonoInst *ins)
MonoCallInst *call;

if (cfg->arch.swift_error_var) {
code = emit_ldrx (code, ARMREG_IP0, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
code = emit_strx (code, ARMREG_R21, ARMREG_IP0, 0);
if (cfg->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE) {
code = emit_ldrx (code, ARMREG_IP0, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
code = emit_strx (code, ARMREG_R21, ARMREG_IP0, 0);
} else if (cfg->method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
/* Load the value of SwiftError into R21 */
code = emit_ldrx (code, ARMREG_R21, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
}
}

call = (MonoCallInst*)ins;
Expand Down Expand Up @@ -5937,11 +5947,17 @@ emit_move_args (MonoCompile *cfg, guint8 *code)
code = emit_strfpq (code, ainfo->reg, ins->inst_basereg, GTMREG_TO_INT (ins->inst_offset));
break;
case ArgSwiftError:
if (ainfo->offset) {
code = emit_ldrx (code, ARMREG_IP0, cfg->arch.args_reg, ainfo->offset);
code = emit_strx (code, ARMREG_IP0, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
} else {
code = emit_strx (code, ainfo->reg, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
if (cfg->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE) {
if (ainfo->offset) {
code = emit_ldrx (code, ARMREG_IP0, cfg->arch.args_reg, ainfo->offset);
code = emit_strx (code, ARMREG_IP0, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
} else {
code = emit_strx (code, ainfo->reg, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
}
} else if (cfg->method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
arm_addx_imm (code, ARMREG_IP0, cfg->arch.swift_error_var->inst_basereg, GTMREG_TO_INT (cfg->arch.swift_error_var->inst_offset));
/* Relies on arguments being passed on the stack */
code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, GTMREG_TO_INT (ins->inst_offset));
}
break;
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,6 @@ public unsafe static void TestSwiftErrorOnStackNotThrown()
}

[Fact]
[SkipOnMono("needs reverse P/Invoke support")]
public static unsafe void TestUnmanagedCallersOnly()
{
SwiftError error;
Expand All @@ -144,7 +143,6 @@ public static unsafe void TestUnmanagedCallersOnly()
}

[Fact]
[SkipOnMono("needs reverse P/Invoke support")]
public static unsafe void TestUnmanagedCallersOnlyWithReturn()
{
SwiftError error;
Expand Down
3 changes: 3 additions & 0 deletions src/tests/issues.targets
Original file line number Diff line number Diff line change
Expand Up @@ -2142,6 +2142,9 @@
<ExcludeList Include = "$(XUnitTestBinBase)/JIT/Directed/Arrays/nintindexoutofrange/**">
<Issue>https://github.com/dotnet/runtime/issues/71656</Issue>
</ExcludeList>
<ExcludeList Include="$(XunitTestBinBase)/Interop/Swift/SwiftErrorHandling/**">
<Issue>Reverse P/Invokes not supported yet</Issue>
</ExcludeList>
<!-- End interpreter issues -->
</ItemGroup>

Expand Down
Loading