Line | Branch | Exec | Source |
---|---|---|---|
1 | // Copyright (c) 2021-2025 ChilliBits. All rights reserved. | ||
2 | |||
3 | #include "IRGenerator.h" | ||
4 | |||
5 | #include <driver/Driver.h> | ||
6 | |||
7 | namespace spice::compiler { | ||
8 | |||
9 | 1 | std::string IRGenerator::getSysCallAsmString(uint8_t numRegs) const { | |
10 | // For each architecture we have a mapping of operand index -> register. | ||
11 |
1/2✓ Branch 0 (2→3) taken 1 times.
✗ Branch 1 (2→74) not taken.
|
1 | std::stringstream asmString; |
12 |
3/10✓ Branch 0 (3→4) taken 1 times.
✗ Branch 1 (3→72) not taken.
✗ Branch 2 (4→5) not taken.
✓ Branch 3 (4→7) taken 1 times.
✗ Branch 4 (5→6) not taken.
✗ Branch 5 (5→72) not taken.
✗ Branch 6 (6→7) not taken.
✗ Branch 7 (6→8) not taken.
✓ Branch 8 (9→10) taken 1 times.
✗ Branch 9 (9→21) not taken.
|
1 | if (cliOptions.targetArch == "x86_64" || cliOptions.targetArch == "amd64") { |
13 | static constexpr const char *regs[] = {"%rax", "%rdi", "%rsi", "%rdx", "%r10", "%r8", "%r9"}; | ||
14 |
2/2✓ Branch 0 (19→11) taken 4 times.
✓ Branch 1 (19→20) taken 1 times.
|
5 | for (uint8_t i = 0; i < numRegs; ++i) |
15 |
5/10✓ Branch 0 (11→12) taken 4 times.
✗ Branch 1 (11→72) not taken.
✓ Branch 2 (13→14) taken 4 times.
✗ Branch 3 (13→63) not taken.
✓ Branch 4 (14→15) taken 4 times.
✗ Branch 5 (14→63) not taken.
✓ Branch 6 (15→16) taken 4 times.
✗ Branch 7 (15→63) not taken.
✓ Branch 8 (16→17) taken 4 times.
✗ Branch 9 (16→63) not taken.
|
4 | asmString << "movq $" << std::to_string(i) << ", " << regs[i] << "\n"; |
16 |
1/2✓ Branch 0 (20→58) taken 1 times.
✗ Branch 1 (20→72) not taken.
|
1 | asmString << "syscall\n"; |
17 | ✗ | } else if (cliOptions.targetArch == "x86" || cliOptions.targetArch == "i386") { | |
18 | // Note: Using movl for 32-bit registers. | ||
19 | static constexpr const char *regs[] = {"%eax", "%ebx", "%ecx", "%edx", "%esi", "%edi", "%ebp"}; | ||
20 | ✗ | for (uint8_t i = 0; i < numRegs; ++i) | |
21 | ✗ | asmString << "movl $" << std::to_string(i) << ", " << regs[i] << "\n"; | |
22 | ✗ | asmString << "syscall\n"; | |
23 | ✗ | } else if (cliOptions.targetArch == "aarch64" || cliOptions.targetArch == "arm64") { | |
24 | // Mapping: operand 0 -> x8, then operands 1..6 -> x0,x1,...,x5. | ||
25 | static constexpr const char *regs[] = {"x8", "x0", "x1", "x2", "x3", "x4", "x5"}; | ||
26 | ✗ | for (uint8_t i = 0; i < numRegs; ++i) | |
27 | ✗ | asmString << "mov " << regs[i] << ", $" << std::to_string(i) << "\n"; | |
28 | ✗ | asmString << "svc 0\n"; | |
29 | } else { // LCOV_EXCL_LINE | ||
30 | − | assert_fail("Unsupported target for inline assembly"); // LCOV_EXCL_LINE | |
31 | } // LCOV_EXCL_LINE | ||
32 |
1/2✓ Branch 0 (58→59) taken 1 times.
✗ Branch 1 (58→72) not taken.
|
2 | return asmString.str(); |
33 | 1 | } | |
34 | |||
35 | 1 | std::string IRGenerator::getSysCallConstraintString(uint8_t numRegs) const { | |
36 |
1/2✓ Branch 0 (2→3) taken 1 times.
✗ Branch 1 (2→68) not taken.
|
1 | std::stringstream constraints; |
37 | |||
38 | // Generate a comma-separated constraint string: first the operand constraints, | ||
39 | // then the corresponding clobbers, then extra clobbers. | ||
40 |
3/10✓ Branch 0 (3→4) taken 1 times.
✗ Branch 1 (3→66) not taken.
✗ Branch 2 (4→5) not taken.
✓ Branch 3 (4→7) taken 1 times.
✗ Branch 4 (5→6) not taken.
✗ Branch 5 (5→66) not taken.
✗ Branch 6 (6→7) not taken.
✗ Branch 7 (6→8) not taken.
✓ Branch 8 (9→10) taken 1 times.
✗ Branch 9 (9→22) not taken.
|
1 | if (cliOptions.targetArch == "x86_64" || cliOptions.targetArch == "amd64") { |
41 | // Mapping of operand i to its clobber. | ||
42 | static constexpr const char *clobbers[] = {"~{rax}", "~{rdi}", "~{rsi}", "~{rdx}", "~{r10}", "~{r8}", "~{r9}"}; | ||
43 | // Operand constraints: "r" for each operand. | ||
44 |
2/2✓ Branch 0 (15→11) taken 4 times.
✓ Branch 1 (15→16) taken 1 times.
|
5 | for (uint8_t i = 0; i < numRegs; i++) { |
45 |
1/2✓ Branch 0 (11→12) taken 4 times.
✗ Branch 1 (11→66) not taken.
|
4 | constraints << "r"; |
46 |
2/2✓ Branch 0 (12→13) taken 3 times.
✓ Branch 1 (12→14) taken 1 times.
|
4 | if (i != numRegs - 1) |
47 |
1/2✓ Branch 0 (13→14) taken 3 times.
✗ Branch 1 (13→66) not taken.
|
3 | constraints << ","; |
48 | } | ||
49 | // Append corresponding clobbers. | ||
50 |
2/2✓ Branch 0 (20→17) taken 4 times.
✓ Branch 1 (20→21) taken 1 times.
|
5 | for (uint8_t i = 0; i < numRegs; i++) |
51 |
2/4✓ Branch 0 (17→18) taken 4 times.
✗ Branch 1 (17→66) not taken.
✓ Branch 2 (18→19) taken 4 times.
✗ Branch 3 (18→66) not taken.
|
4 | constraints << "," << clobbers[i]; |
52 | // Append extra clobbers. | ||
53 |
1/2✓ Branch 0 (21→61) taken 1 times.
✗ Branch 1 (21→66) not taken.
|
1 | constraints << ",~{dirflag},~{fpsr},~{flags}"; |
54 | ✗ | } else if (cliOptions.targetArch == "x86" || cliOptions.targetArch == "i386") { | |
55 | static constexpr const char *clobbers[] = {"~{eax}", "~{ebx}", "~{ecx}", "~{edx}", "~{esi}", "~{edi}", "~{ebp}"}; | ||
56 | ✗ | for (uint8_t i = 0; i < numRegs; i++) { | |
57 | ✗ | constraints << "r"; | |
58 | ✗ | if (i != numRegs - 1) | |
59 | ✗ | constraints << ","; | |
60 | } | ||
61 | ✗ | for (uint8_t i = 0; i < numRegs; i++) | |
62 | ✗ | constraints << "," << clobbers[i]; | |
63 | ✗ | constraints << ",~{dirflag},~{fpsr},~{flags}"; | |
64 | ✗ | } else if (cliOptions.targetArch == "aarch64" || cliOptions.targetArch == "arm64") { | |
65 | static constexpr const char *clobbers[] = {"~{x8}", "~{x0}", "~{x1}", "~{x2}", "~{x3}", "~{x4}", "~{x5}"}; | ||
66 | ✗ | for (uint8_t i = 0; i < numRegs; i++) { | |
67 | ✗ | constraints << "r"; | |
68 | ✗ | if (i != numRegs - 1) | |
69 | ✗ | constraints << ","; | |
70 | } | ||
71 | ✗ | for (uint8_t i = 0; i < numRegs; i++) | |
72 | ✗ | constraints << "," << clobbers[i]; | |
73 | ✗ | constraints << ",~{dirflag},~{fpsr},~{flags}"; | |
74 | } else { // LCOV_EXCL_LINE | ||
75 | − | assert_fail("Unsupported target for inline assembly"); // LCOV_EXCL_LINE | |
76 | } // LCOV_EXCL_LINE | ||
77 |
1/2✓ Branch 0 (61→62) taken 1 times.
✗ Branch 1 (61→66) not taken.
|
2 | return constraints.str(); |
78 | 1 | } | |
79 | |||
80 | } // namespace spice::compiler | ||
81 |