Assembler.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695
  1. /*
  2. * Copyright (c) 2023, Andreas Kling <kling@serenityos.org>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #pragma once
  7. #include <AK/Platform.h>
  8. #include <AK/Vector.h>
  9. #if ARCH(X86_64)
  10. namespace JIT {
  11. struct Assembler {
  12. Assembler(Vector<u8>& output)
  13. : m_output(output)
  14. {
  15. }
  16. Vector<u8>& m_output;
  17. enum class Reg {
  18. RAX = 0,
  19. RCX = 1,
  20. RDX = 2,
  21. RBX = 3,
  22. RSP = 4,
  23. RBP = 5,
  24. RSI = 6,
  25. RDI = 7,
  26. R8 = 8,
  27. R9 = 9,
  28. R10 = 10,
  29. R11 = 11,
  30. R12 = 12,
  31. R13 = 13,
  32. R14 = 14,
  33. R15 = 15,
  34. };
  35. struct Operand {
  36. enum class Type {
  37. Reg,
  38. Imm,
  39. Mem64BaseAndOffset,
  40. };
  41. Type type {};
  42. Reg reg {};
  43. u64 offset_or_immediate { 0 };
  44. static Operand Register(Reg reg)
  45. {
  46. Operand operand;
  47. operand.type = Type::Reg;
  48. operand.reg = reg;
  49. return operand;
  50. }
  51. static Operand Imm(u64 imm)
  52. {
  53. Operand operand;
  54. operand.type = Type::Imm;
  55. operand.offset_or_immediate = imm;
  56. return operand;
  57. }
  58. static Operand Mem64BaseAndOffset(Reg base, u64 offset)
  59. {
  60. Operand operand;
  61. operand.type = Type::Mem64BaseAndOffset;
  62. operand.reg = base;
  63. operand.offset_or_immediate = offset;
  64. return operand;
  65. }
  66. bool is_register_or_memory() const
  67. {
  68. return type == Type::Reg || type == Type::Mem64BaseAndOffset;
  69. }
  70. bool fits_in_u8() const
  71. {
  72. VERIFY(type == Type::Imm);
  73. return offset_or_immediate <= NumericLimits<u8>::max();
  74. }
  75. bool fits_in_u32() const
  76. {
  77. VERIFY(type == Type::Imm);
  78. return offset_or_immediate <= NumericLimits<u32>::max();
  79. }
  80. bool fits_in_i8() const
  81. {
  82. VERIFY(type == Type::Imm);
  83. return (offset_or_immediate <= NumericLimits<i8>::max()) || (((~offset_or_immediate) & NumericLimits<i8>::min()) == 0);
  84. }
  85. bool fits_in_i32() const
  86. {
  87. VERIFY(type == Type::Imm);
  88. return (offset_or_immediate <= NumericLimits<i32>::max()) || (((~offset_or_immediate) & NumericLimits<i32>::min()) == 0);
  89. }
  90. };
  91. enum class Condition {
  92. EqualTo = 0x4,
  93. NotEqualTo = 0x5,
  94. UnsignedGreaterThan = 0x7,
  95. UnsignedGreaterThanOrEqualTo = 0x3,
  96. UnsignedLessThan = 0x2,
  97. UnsignedLessThanOrEqualTo = 0x6,
  98. SignedGreaterThan = 0xF,
  99. SignedGreaterThanOrEqualTo = 0xD,
  100. SignedLessThan = 0xC,
  101. SignedLessThanOrEqualTo = 0xE,
  102. };
  103. static constexpr u8 encode_reg(Reg reg)
  104. {
  105. return to_underlying(reg) & 0x7;
  106. }
  107. enum class Patchable {
  108. Yes,
  109. No,
  110. };
  111. union ModRM {
  112. static constexpr u8 Mem = 0b00;
  113. static constexpr u8 MemDisp8 = 0b01;
  114. static constexpr u8 MemDisp32 = 0b10;
  115. static constexpr u8 Reg = 0b11;
  116. struct {
  117. u8 rm : 3;
  118. u8 reg : 3;
  119. u8 mode : 2;
  120. };
  121. u8 raw;
  122. };
  123. void emit_modrm_slash(u8 slash, Operand rm, Patchable patchable = Patchable::No)
  124. {
  125. ModRM raw;
  126. raw.rm = encode_reg(rm.reg);
  127. raw.reg = slash;
  128. emit_modrm(raw, rm, patchable);
  129. }
  130. void emit_modrm_rm(Operand dst, Operand src, Patchable patchable = Patchable::No)
  131. {
  132. VERIFY(dst.type == Operand::Type::Reg);
  133. ModRM raw {};
  134. raw.reg = encode_reg(dst.reg);
  135. raw.rm = encode_reg(src.reg);
  136. emit_modrm(raw, src, patchable);
  137. }
  138. void emit_modrm_mr(Operand dst, Operand src, Patchable patchable = Patchable::No)
  139. {
  140. VERIFY(src.type == Operand::Type::Reg);
  141. ModRM raw {};
  142. raw.reg = encode_reg(src.reg);
  143. raw.rm = encode_reg(dst.reg);
  144. emit_modrm(raw, dst, patchable);
  145. }
  146. void emit_modrm(ModRM raw, Operand rm, Patchable patchable)
  147. {
  148. // FIXME: rm:100 (RSP) is reserved as the SIB marker
  149. VERIFY(rm.type != Operand::Type::Imm);
  150. switch (rm.type) {
  151. case Operand::Type::Reg:
  152. // FIXME: There is mod:00,rm:101(EBP?) -> disp32, that might be something else
  153. raw.mode = ModRM::Reg;
  154. emit8(raw.raw);
  155. break;
  156. case Operand::Type::Mem64BaseAndOffset: {
  157. auto disp = rm.offset_or_immediate;
  158. if (patchable == Patchable::Yes) {
  159. raw.mode = ModRM::MemDisp32;
  160. emit8(raw.raw);
  161. emit32(disp);
  162. } else if (disp == 0) {
  163. raw.mode = ModRM::Mem;
  164. emit8(raw.raw);
  165. } else if (static_cast<i64>(disp) >= -128 && disp <= 127) {
  166. raw.mode = ModRM::MemDisp8;
  167. emit8(raw.raw);
  168. emit8(disp & 0xff);
  169. } else {
  170. raw.mode = ModRM::MemDisp32;
  171. emit8(raw.raw);
  172. emit32(disp);
  173. }
  174. break;
  175. }
  176. case Operand::Type::Imm:
  177. VERIFY_NOT_REACHED();
  178. }
  179. }
  180. union REX {
  181. struct {
  182. u8 B : 1; // ModRM::RM
  183. u8 X : 1; // SIB::Index
  184. u8 R : 1; // ModRM::Reg
  185. u8 W : 1; // Operand size override
  186. u8 _ : 4 { 0b0100 };
  187. };
  188. u8 raw;
  189. };
  190. enum class REX_W : bool {
  191. No = 0,
  192. Yes = 1
  193. };
  194. void emit_rex_for_OI(Operand arg, REX_W W)
  195. {
  196. emit_rex_for_slash(arg, W);
  197. }
  198. void emit_rex_for_slash(Operand arg, REX_W W)
  199. {
  200. VERIFY(arg.is_register_or_memory());
  201. if (W == REX_W::No && to_underlying(arg.reg) < 8)
  202. return;
  203. REX rex {
  204. .B = to_underlying(arg.reg) >= 8,
  205. .X = 0,
  206. .R = 0,
  207. .W = to_underlying(W)
  208. };
  209. emit8(rex.raw);
  210. }
  211. void emit_rex_for_mr(Operand dst, Operand src, REX_W W)
  212. {
  213. VERIFY(dst.is_register_or_memory());
  214. VERIFY(src.type == Operand::Type::Reg);
  215. if (W == REX_W::No && to_underlying(dst.reg) < 8 && to_underlying(src.reg) < 8)
  216. return;
  217. REX rex {
  218. .B = to_underlying(dst.reg) >= 8,
  219. .X = 0,
  220. .R = to_underlying(src.reg) >= 8,
  221. .W = to_underlying(W)
  222. };
  223. emit8(rex.raw);
  224. }
  225. void emit_rex_for_rm(Operand dst, Operand src, REX_W W)
  226. {
  227. VERIFY(src.is_register_or_memory());
  228. VERIFY(dst.type == Operand::Type::Reg);
  229. if (W == REX_W::No && to_underlying(dst.reg) < 8 && to_underlying(src.reg) < 8)
  230. return;
  231. REX rex {
  232. .B = to_underlying(src.reg) >= 8,
  233. .X = 0,
  234. .R = to_underlying(dst.reg) >= 8,
  235. .W = to_underlying(W)
  236. };
  237. emit8(rex.raw);
  238. }
  239. void shift_right(Operand dst, Operand count)
  240. {
  241. VERIFY(dst.type == Operand::Type::Reg);
  242. VERIFY(count.type == Operand::Type::Imm);
  243. VERIFY(count.fits_in_u8());
  244. emit_rex_for_slash(dst, REX_W::Yes);
  245. emit8(0xc1);
  246. emit_modrm_slash(5, dst);
  247. emit8(count.offset_or_immediate);
  248. }
  249. void mov(Operand dst, Operand src, Patchable patchable = Patchable::No)
  250. {
  251. if (dst.is_register_or_memory() && src.type == Operand::Type::Reg) {
  252. if (src.type == Operand::Type::Reg && src.reg == dst.reg)
  253. return;
  254. emit_rex_for_mr(dst, src, REX_W::Yes);
  255. emit8(0x89);
  256. emit_modrm_mr(dst, src, patchable);
  257. return;
  258. }
  259. if (dst.type == Operand::Type::Reg && src.type == Operand::Type::Imm) {
  260. if (patchable == Patchable::No) {
  261. if (src.offset_or_immediate == 0) {
  262. // xor dst, dst
  263. // Note: Operand size does not matter here, as the result is 0-extended to 64 bit,
  264. // so we don't have to set the W flag in the REX prefix,
  265. // or use it at all in case we dont use REX addressed registers (elision is implemented in the helper)
  266. emit_rex_for_mr(dst, dst, REX_W::No);
  267. emit8(0x31);
  268. emit_modrm_mr(dst, dst, patchable);
  269. return;
  270. }
  271. if (src.fits_in_u32()) {
  272. emit_rex_for_OI(dst, REX_W::No);
  273. emit8(0xb8 | encode_reg(dst.reg));
  274. emit32(src.offset_or_immediate);
  275. return;
  276. }
  277. }
  278. emit_rex_for_OI(dst, REX_W::Yes);
  279. emit8(0xb8 | encode_reg(dst.reg));
  280. emit64(src.offset_or_immediate);
  281. return;
  282. }
  283. if (dst.type == Operand::Type::Reg && src.is_register_or_memory()) {
  284. emit_rex_for_rm(dst, src, REX_W::Yes);
  285. emit8(0x8b);
  286. emit_modrm_rm(dst, src, patchable);
  287. return;
  288. }
  289. VERIFY_NOT_REACHED();
  290. }
  291. void emit8(u8 value)
  292. {
  293. m_output.append(value);
  294. }
  295. void emit32(u32 value)
  296. {
  297. m_output.append((value >> 0) & 0xff);
  298. m_output.append((value >> 8) & 0xff);
  299. m_output.append((value >> 16) & 0xff);
  300. m_output.append((value >> 24) & 0xff);
  301. }
  302. void emit64(u64 value)
  303. {
  304. m_output.append((value >> 0) & 0xff);
  305. m_output.append((value >> 8) & 0xff);
  306. m_output.append((value >> 16) & 0xff);
  307. m_output.append((value >> 24) & 0xff);
  308. m_output.append((value >> 32) & 0xff);
  309. m_output.append((value >> 40) & 0xff);
  310. m_output.append((value >> 48) & 0xff);
  311. m_output.append((value >> 56) & 0xff);
  312. }
  313. struct Label {
  314. Optional<size_t> offset_of_label_in_instruction_stream;
  315. Vector<size_t> jump_slot_offsets_in_instruction_stream;
  316. void add_jump(Assembler& assembler, size_t offset)
  317. {
  318. jump_slot_offsets_in_instruction_stream.append(offset);
  319. if (offset_of_label_in_instruction_stream.has_value())
  320. link_jump(assembler, offset);
  321. }
  322. void link(Assembler& assembler)
  323. {
  324. link_to(assembler, assembler.m_output.size());
  325. }
  326. void link_to(Assembler& assembler, size_t link_offset)
  327. {
  328. VERIFY(!offset_of_label_in_instruction_stream.has_value());
  329. offset_of_label_in_instruction_stream = link_offset;
  330. for (auto offset_in_instruction_stream : jump_slot_offsets_in_instruction_stream)
  331. link_jump(assembler, offset_in_instruction_stream);
  332. }
  333. private:
  334. void link_jump(Assembler& assembler, size_t offset_in_instruction_stream)
  335. {
  336. auto offset = offset_of_label_in_instruction_stream.value() - offset_in_instruction_stream;
  337. auto jump_slot = offset_in_instruction_stream - 4;
  338. assembler.m_output[jump_slot + 0] = (offset >> 0) & 0xff;
  339. assembler.m_output[jump_slot + 1] = (offset >> 8) & 0xff;
  340. assembler.m_output[jump_slot + 2] = (offset >> 16) & 0xff;
  341. assembler.m_output[jump_slot + 3] = (offset >> 24) & 0xff;
  342. }
  343. };
  344. [[nodiscard]] Label jump()
  345. {
  346. // jmp target (RIP-relative 32-bit offset)
  347. emit8(0xe9);
  348. emit32(0xdeadbeef);
  349. Assembler::Label label {};
  350. label.add_jump(*this, m_output.size());
  351. return label;
  352. }
  353. void jump(Label& label)
  354. {
  355. // jmp target (RIP-relative 32-bit offset)
  356. emit8(0xe9);
  357. emit32(0xdeadbeef);
  358. label.add_jump(*this, m_output.size());
  359. }
  360. void jump(Operand op)
  361. {
  362. emit_rex_for_slash(op, REX_W::No);
  363. emit8(0xff);
  364. emit_modrm_slash(4, op);
  365. }
  366. void verify_not_reached()
  367. {
  368. // ud2
  369. emit8(0x0f);
  370. emit8(0x0b);
  371. }
  372. void cmp(Operand lhs, Operand rhs)
  373. {
  374. if (lhs.type == Operand::Type::Reg && rhs.type == Operand::Type::Imm && rhs.offset_or_immediate == 0) {
  375. test(lhs, lhs);
  376. } else if (lhs.is_register_or_memory() && rhs.type == Operand::Type::Reg) {
  377. emit_rex_for_mr(lhs, rhs, REX_W::Yes);
  378. emit8(0x39);
  379. emit_modrm_mr(lhs, rhs);
  380. } else if (lhs.is_register_or_memory() && rhs.type == Operand::Type::Imm && rhs.fits_in_i8()) {
  381. emit_rex_for_slash(lhs, REX_W::Yes);
  382. emit8(0x83);
  383. emit_modrm_slash(7, lhs);
  384. emit8(rhs.offset_or_immediate);
  385. } else if (lhs.is_register_or_memory() && rhs.type == Operand::Type::Imm && rhs.fits_in_i32()) {
  386. emit_rex_for_slash(lhs, REX_W::Yes);
  387. emit8(0x81);
  388. emit_modrm_slash(7, lhs);
  389. emit32(rhs.offset_or_immediate);
  390. } else {
  391. VERIFY_NOT_REACHED();
  392. }
  393. }
  394. void test(Operand lhs, Operand rhs)
  395. {
  396. if (lhs.is_register_or_memory() && rhs.type == Operand::Type::Reg) {
  397. emit_rex_for_mr(lhs, rhs, REX_W::Yes);
  398. emit8(0x85);
  399. emit_modrm_mr(lhs, rhs);
  400. } else if (lhs.type != Operand::Type::Imm && rhs.type == Operand::Type::Imm) {
  401. VERIFY(rhs.fits_in_i32());
  402. emit_rex_for_slash(lhs, REX_W::Yes);
  403. emit8(0xf7);
  404. emit_modrm_slash(0, lhs);
  405. emit32(rhs.offset_or_immediate);
  406. } else {
  407. VERIFY_NOT_REACHED();
  408. }
  409. }
  410. void jump_if(Operand lhs, Condition condition, Operand rhs, Label& label)
  411. {
  412. cmp(lhs, rhs);
  413. emit8(0x0F);
  414. emit8(0x80 | to_underlying(condition));
  415. emit32(0xdeadbeef);
  416. label.add_jump(*this, m_output.size());
  417. }
  418. void sign_extend_32_to_64_bits(Reg reg)
  419. {
  420. // movsxd (reg as 64-bit), (reg as 32-bit)
  421. emit_rex_for_rm(Operand::Register(reg), Operand::Register(reg), REX_W::Yes);
  422. emit8(0x63);
  423. emit_modrm_rm(Operand::Register(reg), Operand::Register(reg));
  424. }
  425. void bitwise_and(Operand dst, Operand src)
  426. {
  427. // and dst,src
  428. if (dst.is_register_or_memory() && src.type == Operand::Type::Reg) {
  429. emit_rex_for_mr(dst, src, REX_W::Yes);
  430. emit8(0x21);
  431. emit_modrm_mr(dst, src);
  432. } else if (dst.type == Operand::Type::Reg && src.type == Operand::Type::Imm && src.fits_in_i8()) {
  433. emit_rex_for_slash(dst, REX_W::Yes);
  434. emit8(0x83);
  435. emit_modrm_slash(4, dst);
  436. emit8(src.offset_or_immediate);
  437. } else if (dst.type == Operand::Type::Reg && src.type == Operand::Type::Imm && src.fits_in_i32()) {
  438. emit_rex_for_slash(dst, REX_W::Yes);
  439. emit8(0x81);
  440. emit_modrm_slash(4, dst);
  441. emit32(src.offset_or_immediate);
  442. } else {
  443. VERIFY_NOT_REACHED();
  444. }
  445. }
  446. void bitwise_or(Operand dst, Operand src)
  447. {
  448. // or dst,src
  449. if (dst.is_register_or_memory() && src.type == Operand::Type::Reg) {
  450. emit_rex_for_mr(dst, src, REX_W::Yes);
  451. emit8(0x09);
  452. emit_modrm_mr(dst, src);
  453. } else if (dst.type == Operand::Type::Reg && src.type == Operand::Type::Imm && src.fits_in_i8()) {
  454. emit_rex_for_slash(dst, REX_W::Yes);
  455. emit8(0x83);
  456. emit_modrm_slash(1, dst);
  457. emit8(src.offset_or_immediate);
  458. } else if (dst.type == Operand::Type::Reg && src.type == Operand::Type::Imm && src.fits_in_i32()) {
  459. emit_rex_for_slash(dst, REX_W::Yes);
  460. emit8(0x81);
  461. emit_modrm_slash(1, dst);
  462. emit32(src.offset_or_immediate);
  463. } else {
  464. VERIFY_NOT_REACHED();
  465. }
  466. }
  467. void enter()
  468. {
  469. push_callee_saved_registers();
  470. push(Operand::Register(Reg::RBP));
  471. mov(Operand::Register(Reg::RBP), Operand::Register(Reg::RSP));
  472. }
  473. void exit()
  474. {
  475. // leave
  476. emit8(0xc9);
  477. pop_callee_saved_registers();
  478. // ret
  479. emit8(0xc3);
  480. }
  481. void push_callee_saved_registers()
  482. {
  483. // FIXME: Don't push RBX twice :^)
  484. push(Operand::Register(Reg::RBX));
  485. push(Operand::Register(Reg::RBX));
  486. push(Operand::Register(Reg::R12));
  487. push(Operand::Register(Reg::R13));
  488. push(Operand::Register(Reg::R14));
  489. push(Operand::Register(Reg::R15));
  490. }
  491. void pop_callee_saved_registers()
  492. {
  493. pop(Operand::Register(Reg::R15));
  494. pop(Operand::Register(Reg::R14));
  495. pop(Operand::Register(Reg::R13));
  496. pop(Operand::Register(Reg::R12));
  497. // FIXME: Don't pop RBX twice :^)
  498. pop(Operand::Register(Reg::RBX));
  499. pop(Operand::Register(Reg::RBX));
  500. }
  501. void push(Operand op)
  502. {
  503. if (op.type == Operand::Type::Reg) {
  504. emit_rex_for_OI(op, REX_W::No);
  505. emit8(0x50 | encode_reg(op.reg));
  506. } else if (op.type == Operand::Type::Imm) {
  507. if (op.fits_in_i8()) {
  508. emit8(0x6a);
  509. emit8(op.offset_or_immediate);
  510. } else if (op.fits_in_i32()) {
  511. emit8(0x68);
  512. emit32(op.offset_or_immediate);
  513. } else {
  514. VERIFY_NOT_REACHED();
  515. }
  516. } else {
  517. VERIFY_NOT_REACHED();
  518. }
  519. }
  520. void pop(Operand op)
  521. {
  522. if (op.type == Operand::Type::Reg) {
  523. emit_rex_for_OI(op, REX_W::No);
  524. emit8(0x58 | encode_reg(op.reg));
  525. } else {
  526. VERIFY_NOT_REACHED();
  527. }
  528. }
  529. void add(Operand dst, Operand src)
  530. {
  531. if (dst.is_register_or_memory() && src.type == Operand::Type::Reg) {
  532. emit_rex_for_mr(dst, src, REX_W::Yes);
  533. emit8(0x01);
  534. emit_modrm_mr(dst, src);
  535. } else if (dst.is_register_or_memory() && src.type == Operand::Type::Imm && src.fits_in_i8()) {
  536. emit_rex_for_slash(dst, REX_W::Yes);
  537. emit8(0x83);
  538. emit_modrm_slash(0, dst);
  539. emit8(src.offset_or_immediate);
  540. } else if (dst.is_register_or_memory() && src.type == Operand::Type::Imm && src.fits_in_i32()) {
  541. emit_rex_for_slash(dst, REX_W::Yes);
  542. emit8(0x81);
  543. emit_modrm_slash(0, dst);
  544. emit32(src.offset_or_immediate);
  545. } else {
  546. VERIFY_NOT_REACHED();
  547. }
  548. }
  549. void add32(Operand dst, Operand src, Optional<Label&> label)
  550. {
  551. if (dst.is_register_or_memory() && src.type == Operand::Type::Reg) {
  552. emit_rex_for_mr(dst, src, REX_W::No);
  553. emit8(0x01);
  554. emit_modrm_mr(dst, src);
  555. } else if (dst.is_register_or_memory() && src.type == Operand::Type::Imm && src.fits_in_i8()) {
  556. emit_rex_for_slash(dst, REX_W::No);
  557. emit8(0x83);
  558. emit_modrm_slash(0, dst);
  559. emit8(src.offset_or_immediate);
  560. } else if (dst.is_register_or_memory() && src.type == Operand::Type::Imm && src.fits_in_i32()) {
  561. emit_rex_for_slash(dst, REX_W::No);
  562. emit8(0x81);
  563. emit_modrm_slash(0, dst);
  564. emit32(src.offset_or_immediate);
  565. } else {
  566. VERIFY_NOT_REACHED();
  567. }
  568. if (label.has_value()) {
  569. // jo label (RIP-relative 32-bit offset)
  570. emit8(0x0f);
  571. emit8(0x80);
  572. emit32(0xdeadbeef);
  573. label->add_jump(*this, m_output.size());
  574. }
  575. }
  576. void sub(Operand dst, Operand src)
  577. {
  578. if (dst.is_register_or_memory() && src.type == Operand::Type::Reg) {
  579. emit_rex_for_mr(dst, src, REX_W::Yes);
  580. emit8(0x29);
  581. emit_modrm_mr(dst, src);
  582. } else if (dst.is_register_or_memory() && src.type == Operand::Type::Imm && src.fits_in_i8()) {
  583. emit_rex_for_slash(dst, REX_W::Yes);
  584. emit8(0x83);
  585. emit_modrm_slash(5, dst);
  586. emit8(src.offset_or_immediate);
  587. } else if (dst.is_register_or_memory() && src.type == Operand::Type::Imm && src.fits_in_i32()) {
  588. emit_rex_for_slash(dst, REX_W::Yes);
  589. emit8(0x81);
  590. emit_modrm_slash(5, dst);
  591. emit32(src.offset_or_immediate);
  592. } else {
  593. VERIFY_NOT_REACHED();
  594. }
  595. }
  596. // NOTE: It's up to the caller of this function to preserve registers as needed.
  597. void native_call(void* callee, Vector<Operand> const& stack_arguments = {})
  598. {
  599. // Preserve 16-byte stack alignment for non-even amount of stack-passed arguments
  600. if ((stack_arguments.size() % 2) == 1)
  601. push(Operand::Imm(0));
  602. for (auto const& stack_argument : stack_arguments.in_reverse())
  603. push(stack_argument);
  604. // load callee into RAX
  605. mov(Operand::Register(Reg::RAX), Operand::Imm(bit_cast<u64>(callee)));
  606. // call RAX
  607. emit8(0xff);
  608. emit_modrm_slash(2, Operand::Register(Reg::RAX));
  609. if (!stack_arguments.is_empty())
  610. add(Operand::Register(Reg::RSP), Operand::Imm(align_up_to(stack_arguments.size(), 2) * sizeof(void*)));
  611. }
  612. void trap()
  613. {
  614. // int3
  615. emit8(0xcc);
  616. }
  617. };
  618. }
  619. #endif