SoftCPU.cpp 66 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139
  1. /*
  2. * Copyright (c) 2020, Andreas Kling <kling@serenityos.org>
  3. * All rights reserved.
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright notice, this
  9. * list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright notice,
  12. * this list of conditions and the following disclaimer in the documentation
  13. * and/or other materials provided with the distribution.
  14. *
  15. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
  16. * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  17. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
  18. * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
  19. * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
  20. * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
  21. * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  22. * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  23. * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  24. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. #include "SoftCPU.h"
  27. #include "Emulator.h"
  28. #include <AK/Assertions.h>
  29. #include <stdio.h>
  30. #include <string.h>
  31. #if defined(__GNUC__) && !defined(__clang__)
  32. # pragma GCC optimize("O3")
  33. #endif
  34. //#define MEMORY_DEBUG
  35. #define DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(mnemonic, op) \
  36. void SoftCPU::mnemonic##_RM8_1(const X86::Instruction& insn) { generic_RM8_1(op<u8>, insn); } \
  37. void SoftCPU::mnemonic##_RM8_CL(const X86::Instruction& insn) { generic_RM8_CL(op<u8>, insn); } \
  38. void SoftCPU::mnemonic##_RM8_imm8(const X86::Instruction& insn) { generic_RM8_imm8<true>(op<u8>, insn); } \
  39. void SoftCPU::mnemonic##_RM16_1(const X86::Instruction& insn) { generic_RM16_1(op<u16>, insn); } \
  40. void SoftCPU::mnemonic##_RM16_CL(const X86::Instruction& insn) { generic_RM16_CL(op<u16>, insn); } \
  41. void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) { generic_RM16_imm8<true>(op<u16>, insn); } \
  42. void SoftCPU::mnemonic##_RM32_1(const X86::Instruction& insn) { generic_RM32_1(op<u32>, insn); } \
  43. void SoftCPU::mnemonic##_RM32_CL(const X86::Instruction& insn) { generic_RM32_CL(op<u32>, insn); } \
  44. void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) { generic_RM32_imm8<true>(op<u32>, insn); }
  45. namespace UserspaceEmulator {
  46. template<typename T, typename U>
  47. inline constexpr T sign_extended_to(U value)
  48. {
  49. if (!(value & X86::TypeTrivia<U>::sign_bit))
  50. return value;
  51. return (X86::TypeTrivia<T>::mask & ~X86::TypeTrivia<U>::mask) | value;
  52. }
  53. SoftCPU::SoftCPU(Emulator& emulator)
  54. : m_emulator(emulator)
  55. {
  56. memset(m_gpr, 0, sizeof(m_gpr));
  57. m_segment[(int)X86::SegmentRegister::CS] = 0x18;
  58. m_segment[(int)X86::SegmentRegister::DS] = 0x20;
  59. m_segment[(int)X86::SegmentRegister::ES] = 0x20;
  60. m_segment[(int)X86::SegmentRegister::SS] = 0x20;
  61. m_segment[(int)X86::SegmentRegister::GS] = 0x28;
  62. }
  63. void SoftCPU::dump() const
  64. {
  65. printf("eax=%08x ebx=%08x ecx=%08x edx=%08x ", eax(), ebx(), ecx(), edx());
  66. printf("ebp=%08x esp=%08x esi=%08x edi=%08x ", ebp(), esp(), esi(), edi());
  67. printf("o=%u s=%u z=%u a=%u p=%u c=%u\n", of(), sf(), zf(), af(), pf(), cf());
  68. }
  69. void SoftCPU::did_receive_secret_data()
  70. {
  71. if (m_secret_data[0] == 1) {
  72. if (auto* tracer = m_emulator.malloc_tracer())
  73. tracer->target_did_malloc({}, m_secret_data[2], m_secret_data[1]);
  74. } else if (m_secret_data[0] == 2) {
  75. if (auto* tracer = m_emulator.malloc_tracer())
  76. tracer->target_did_free({}, m_secret_data[1]);
  77. } else {
  78. ASSERT_NOT_REACHED();
  79. }
  80. }
  81. void SoftCPU::update_code_cache()
  82. {
  83. auto* region = m_emulator.mmu().find_region({ cs(), eip() });
  84. ASSERT(region);
  85. m_cached_code_ptr = region->cacheable_ptr(eip() - region->base());
  86. m_cached_code_end = region->cacheable_ptr(region->size());
  87. }
  88. u8 SoftCPU::read_memory8(X86::LogicalAddress address)
  89. {
  90. ASSERT(address.selector() == 0x18 || address.selector() == 0x20 || address.selector() == 0x28);
  91. auto value = m_emulator.mmu().read8(address);
  92. #ifdef MEMORY_DEBUG
  93. printf("\033[36;1mread_memory8: @%08x:%08x -> %02x\033[0m\n", address.selector(), address.offset(), value);
  94. #endif
  95. return value;
  96. }
  97. u16 SoftCPU::read_memory16(X86::LogicalAddress address)
  98. {
  99. ASSERT(address.selector() == 0x18 || address.selector() == 0x20 || address.selector() == 0x28);
  100. auto value = m_emulator.mmu().read16(address);
  101. #ifdef MEMORY_DEBUG
  102. printf("\033[36;1mread_memory16: @%04x:%08x -> %04x\033[0m\n", address.selector(), address.offset(), value);
  103. #endif
  104. return value;
  105. }
  106. u32 SoftCPU::read_memory32(X86::LogicalAddress address)
  107. {
  108. ASSERT(address.selector() == 0x18 || address.selector() == 0x20 || address.selector() == 0x28);
  109. auto value = m_emulator.mmu().read32(address);
  110. #ifdef MEMORY_DEBUG
  111. printf("\033[36;1mread_memory32: @%04x:%08x -> %08x\033[0m\n", address.selector(), address.offset(), value);
  112. #endif
  113. return value;
  114. }
  115. void SoftCPU::write_memory8(X86::LogicalAddress address, u8 value)
  116. {
  117. ASSERT(address.selector() == 0x20 || address.selector() == 0x28);
  118. #ifdef MEMORY_DEBUG
  119. printf("\033[35;1mwrite_memory8: @%04x:%08x <- %02x\033[0m\n", address.selector(), address.offset(), value);
  120. #endif
  121. m_emulator.mmu().write8(address, value);
  122. }
  123. void SoftCPU::write_memory16(X86::LogicalAddress address, u16 value)
  124. {
  125. ASSERT(address.selector() == 0x20 || address.selector() == 0x28);
  126. #ifdef MEMORY_DEBUG
  127. printf("\033[35;1mwrite_memory16: @%04x:%08x <- %04x\033[0m\n", address.selector(), address.offset(), value);
  128. #endif
  129. m_emulator.mmu().write16(address, value);
  130. }
  131. void SoftCPU::write_memory32(X86::LogicalAddress address, u32 value)
  132. {
  133. ASSERT(address.selector() == 0x20 || address.selector() == 0x28);
  134. #ifdef MEMORY_DEBUG
  135. printf("\033[35;1mwrite_memory32: @%04x:%08x <- %08x\033[0m\n", address.selector(), address.offset(), value);
  136. #endif
  137. m_emulator.mmu().write32(address, value);
  138. }
  139. void SoftCPU::push_string(const StringView& string)
  140. {
  141. size_t space_to_allocate = round_up_to_power_of_two(string.length() + 1, 16);
  142. set_esp(esp() - space_to_allocate);
  143. m_emulator.mmu().copy_to_vm(esp(), string.characters_without_null_termination(), string.length());
  144. m_emulator.mmu().write8({ 0x20, esp() + string.length() }, '\0');
  145. }
  146. void SoftCPU::push32(u32 value)
  147. {
  148. set_esp(esp() - sizeof(value));
  149. write_memory32({ ss(), esp() }, value);
  150. }
  151. u32 SoftCPU::pop32()
  152. {
  153. auto value = read_memory32({ ss(), esp() });
  154. set_esp(esp() + sizeof(value));
  155. return value;
  156. }
  157. template<bool check_zf, typename Callback>
  158. void SoftCPU::do_once_or_repeat(const X86::Instruction& insn, Callback callback)
  159. {
  160. if (!insn.has_rep_prefix())
  161. return callback();
  162. if (insn.has_address_size_override_prefix()) {
  163. while (cx()) {
  164. callback();
  165. set_cx(cx() - 1);
  166. if constexpr (check_zf) {
  167. if (insn.rep_prefix() == X86::Prefix::REPZ && !zf())
  168. break;
  169. if (insn.rep_prefix() == X86::Prefix::REPNZ && zf())
  170. break;
  171. }
  172. }
  173. return;
  174. }
  175. while (ecx()) {
  176. callback();
  177. set_ecx(ecx() - 1);
  178. if constexpr (check_zf) {
  179. if (insn.rep_prefix() == X86::Prefix::REPZ && !zf())
  180. break;
  181. if (insn.rep_prefix() == X86::Prefix::REPNZ && zf())
  182. break;
  183. }
  184. }
  185. }
  186. template<typename T>
  187. ALWAYS_INLINE static T op_inc(SoftCPU& cpu, T data)
  188. {
  189. T result = 0;
  190. u32 new_flags = 0;
  191. if constexpr (sizeof(T) == 4) {
  192. asm volatile("incl %%eax\n"
  193. : "=a"(result)
  194. : "a"(data));
  195. } else if constexpr (sizeof(T) == 2) {
  196. asm volatile("incw %%ax\n"
  197. : "=a"(result)
  198. : "a"(data));
  199. } else if constexpr (sizeof(T) == 1) {
  200. asm volatile("incb %%al\n"
  201. : "=a"(result)
  202. : "a"(data));
  203. }
  204. asm volatile(
  205. "pushf\n"
  206. "pop %%ebx"
  207. : "=b"(new_flags));
  208. cpu.set_flags_oszap(new_flags);
  209. return result;
  210. }
  211. template<typename T>
  212. ALWAYS_INLINE static T op_dec(SoftCPU& cpu, T data)
  213. {
  214. T result = 0;
  215. u32 new_flags = 0;
  216. if constexpr (sizeof(T) == 4) {
  217. asm volatile("decl %%eax\n"
  218. : "=a"(result)
  219. : "a"(data));
  220. } else if constexpr (sizeof(T) == 2) {
  221. asm volatile("decw %%ax\n"
  222. : "=a"(result)
  223. : "a"(data));
  224. } else if constexpr (sizeof(T) == 1) {
  225. asm volatile("decb %%al\n"
  226. : "=a"(result)
  227. : "a"(data));
  228. }
  229. asm volatile(
  230. "pushf\n"
  231. "pop %%ebx"
  232. : "=b"(new_flags));
  233. cpu.set_flags_oszap(new_flags);
  234. return result;
  235. }
  236. template<typename T>
  237. ALWAYS_INLINE static T op_xor(SoftCPU& cpu, const T& dest, const T& src)
  238. {
  239. T result = 0;
  240. u32 new_flags = 0;
  241. if constexpr (sizeof(T) == 4) {
  242. asm volatile("xorl %%ecx, %%eax\n"
  243. : "=a"(result)
  244. : "a"(dest), "c"((u32)src));
  245. } else if constexpr (sizeof(T) == 2) {
  246. asm volatile("xor %%cx, %%ax\n"
  247. : "=a"(result)
  248. : "a"(dest), "c"((u16)src));
  249. } else if constexpr (sizeof(T) == 1) {
  250. asm volatile("xorb %%cl, %%al\n"
  251. : "=a"(result)
  252. : "a"(dest), "c"((u8)src));
  253. } else {
  254. ASSERT_NOT_REACHED();
  255. }
  256. asm volatile(
  257. "pushf\n"
  258. "pop %%ebx"
  259. : "=b"(new_flags));
  260. cpu.set_flags_oszpc(new_flags);
  261. return result;
  262. }
  263. template<typename T>
  264. ALWAYS_INLINE static T op_or(SoftCPU& cpu, const T& dest, const T& src)
  265. {
  266. T result = 0;
  267. u32 new_flags = 0;
  268. if constexpr (sizeof(T) == 4) {
  269. asm volatile("orl %%ecx, %%eax\n"
  270. : "=a"(result)
  271. : "a"(dest), "c"((u32)src));
  272. } else if constexpr (sizeof(T) == 2) {
  273. asm volatile("or %%cx, %%ax\n"
  274. : "=a"(result)
  275. : "a"(dest), "c"((u16)src));
  276. } else if constexpr (sizeof(T) == 1) {
  277. asm volatile("orb %%cl, %%al\n"
  278. : "=a"(result)
  279. : "a"(dest), "c"((u8)src));
  280. } else {
  281. ASSERT_NOT_REACHED();
  282. }
  283. asm volatile(
  284. "pushf\n"
  285. "pop %%ebx"
  286. : "=b"(new_flags));
  287. cpu.set_flags_oszpc(new_flags);
  288. return result;
  289. }
  290. template<typename T>
  291. ALWAYS_INLINE static T op_sub(SoftCPU& cpu, const T& dest, const T& src)
  292. {
  293. T result = 0;
  294. u32 new_flags = 0;
  295. if constexpr (sizeof(T) == 4) {
  296. asm volatile("subl %%ecx, %%eax\n"
  297. : "=a"(result)
  298. : "a"(dest), "c"((u32)src));
  299. } else if constexpr (sizeof(T) == 2) {
  300. asm volatile("subw %%cx, %%ax\n"
  301. : "=a"(result)
  302. : "a"(dest), "c"((u16)src));
  303. } else if constexpr (sizeof(T) == 1) {
  304. asm volatile("subb %%cl, %%al\n"
  305. : "=a"(result)
  306. : "a"(dest), "c"((u8)src));
  307. } else {
  308. ASSERT_NOT_REACHED();
  309. }
  310. asm volatile(
  311. "pushf\n"
  312. "pop %%ebx"
  313. : "=b"(new_flags));
  314. cpu.set_flags_oszapc(new_flags);
  315. return result;
  316. }
  317. template<typename T, bool cf>
  318. ALWAYS_INLINE static T op_sbb_impl(SoftCPU& cpu, const T& dest, const T& src)
  319. {
  320. T result = 0;
  321. u32 new_flags = 0;
  322. if constexpr (cf)
  323. asm volatile("stc");
  324. else
  325. asm volatile("clc");
  326. if constexpr (sizeof(T) == 4) {
  327. asm volatile("sbbl %%ecx, %%eax\n"
  328. : "=a"(result)
  329. : "a"(dest), "c"((u32)src));
  330. } else if constexpr (sizeof(T) == 2) {
  331. asm volatile("sbbw %%cx, %%ax\n"
  332. : "=a"(result)
  333. : "a"(dest), "c"((u16)src));
  334. } else if constexpr (sizeof(T) == 1) {
  335. asm volatile("sbbb %%cl, %%al\n"
  336. : "=a"(result)
  337. : "a"(dest), "c"((u8)src));
  338. } else {
  339. ASSERT_NOT_REACHED();
  340. }
  341. asm volatile(
  342. "pushf\n"
  343. "pop %%ebx"
  344. : "=b"(new_flags));
  345. cpu.set_flags_oszapc(new_flags);
  346. return result;
  347. }
  348. template<typename T>
  349. ALWAYS_INLINE static T op_sbb(SoftCPU& cpu, T& dest, const T& src)
  350. {
  351. if (cpu.cf())
  352. return op_sbb_impl<T, true>(cpu, dest, src);
  353. return op_sbb_impl<T, false>(cpu, dest, src);
  354. }
  355. template<typename T>
  356. ALWAYS_INLINE static T op_add(SoftCPU& cpu, T& dest, const T& src)
  357. {
  358. T result = 0;
  359. u32 new_flags = 0;
  360. if constexpr (sizeof(T) == 4) {
  361. asm volatile("addl %%ecx, %%eax\n"
  362. : "=a"(result)
  363. : "a"(dest), "c"((u32)src));
  364. } else if constexpr (sizeof(T) == 2) {
  365. asm volatile("addw %%cx, %%ax\n"
  366. : "=a"(result)
  367. : "a"(dest), "c"((u16)src));
  368. } else if constexpr (sizeof(T) == 1) {
  369. asm volatile("addb %%cl, %%al\n"
  370. : "=a"(result)
  371. : "a"(dest), "c"((u8)src));
  372. } else {
  373. ASSERT_NOT_REACHED();
  374. }
  375. asm volatile(
  376. "pushf\n"
  377. "pop %%ebx"
  378. : "=b"(new_flags));
  379. cpu.set_flags_oszapc(new_flags);
  380. return result;
  381. }
  382. template<typename T, bool cf>
  383. ALWAYS_INLINE static T op_adc_impl(SoftCPU& cpu, T& dest, const T& src)
  384. {
  385. T result = 0;
  386. u32 new_flags = 0;
  387. if constexpr (cf)
  388. asm volatile("stc");
  389. else
  390. asm volatile("clc");
  391. if constexpr (sizeof(T) == 4) {
  392. asm volatile("adcl %%ecx, %%eax\n"
  393. : "=a"(result)
  394. : "a"(dest), "c"((u32)src));
  395. } else if constexpr (sizeof(T) == 2) {
  396. asm volatile("adcw %%cx, %%ax\n"
  397. : "=a"(result)
  398. : "a"(dest), "c"((u16)src));
  399. } else if constexpr (sizeof(T) == 1) {
  400. asm volatile("adcb %%cl, %%al\n"
  401. : "=a"(result)
  402. : "a"(dest), "c"((u8)src));
  403. } else {
  404. ASSERT_NOT_REACHED();
  405. }
  406. asm volatile(
  407. "pushf\n"
  408. "pop %%ebx"
  409. : "=b"(new_flags));
  410. cpu.set_flags_oszapc(new_flags);
  411. return result;
  412. }
  413. template<typename T>
  414. ALWAYS_INLINE static T op_adc(SoftCPU& cpu, T& dest, const T& src)
  415. {
  416. if (cpu.cf())
  417. return op_adc_impl<T, true>(cpu, dest, src);
  418. return op_adc_impl<T, false>(cpu, dest, src);
  419. }
  420. template<typename T>
  421. ALWAYS_INLINE static T op_and(SoftCPU& cpu, const T& dest, const T& src)
  422. {
  423. T result = 0;
  424. u32 new_flags = 0;
  425. if constexpr (sizeof(T) == 4) {
  426. asm volatile("andl %%ecx, %%eax\n"
  427. : "=a"(result)
  428. : "a"(dest), "c"((u32)src));
  429. } else if constexpr (sizeof(T) == 2) {
  430. asm volatile("andw %%cx, %%ax\n"
  431. : "=a"(result)
  432. : "a"(dest), "c"((u16)src));
  433. } else if constexpr (sizeof(T) == 1) {
  434. asm volatile("andb %%cl, %%al\n"
  435. : "=a"(result)
  436. : "a"(dest), "c"((u8)src));
  437. } else {
  438. ASSERT_NOT_REACHED();
  439. }
  440. asm volatile(
  441. "pushf\n"
  442. "pop %%ebx"
  443. : "=b"(new_flags));
  444. cpu.set_flags_oszpc(new_flags);
  445. return result;
  446. }
  447. template<typename T>
  448. ALWAYS_INLINE static T op_imul(SoftCPU& cpu, const T& dest, const T& src)
  449. {
  450. u32 result_high = 0;
  451. u32 result_low = 0;
  452. T result = 0;
  453. u32 new_flags = 0;
  454. if constexpr (sizeof(T) == 8) {
  455. asm volatile("imull %%ecx"
  456. : "=a"(result_low), "=d"(result_high)
  457. : "a"((i32)dest), "c"((i32)src));
  458. } else if constexpr (sizeof(T) == 4) {
  459. asm volatile("imull %%ecx, %%eax\n"
  460. : "=a"(result)
  461. : "a"(dest), "c"((i32)src));
  462. } else if constexpr (sizeof(T) == 2) {
  463. asm volatile("imulw %%cx, %%ax\n"
  464. : "=a"(result)
  465. : "a"(dest), "c"((i16)src));
  466. } else {
  467. ASSERT_NOT_REACHED();
  468. }
  469. asm volatile(
  470. "pushf\n"
  471. "pop %%ebx"
  472. : "=b"(new_flags));
  473. if constexpr (sizeof(T) == 8)
  474. result = ((u64)result_high << 32) | result_low;
  475. cpu.set_flags_oc(new_flags);
  476. return result;
  477. }
  478. template<typename T>
  479. ALWAYS_INLINE static T op_shr(SoftCPU& cpu, T data, u8 steps)
  480. {
  481. if (steps == 0)
  482. return data;
  483. u32 result = 0;
  484. u32 new_flags = 0;
  485. if constexpr (sizeof(T) == 4) {
  486. asm volatile("shrl %%cl, %%eax\n"
  487. : "=a"(result)
  488. : "a"(data), "c"(steps));
  489. } else if constexpr (sizeof(T) == 2) {
  490. asm volatile("shrw %%cl, %%ax\n"
  491. : "=a"(result)
  492. : "a"(data), "c"(steps));
  493. } else if constexpr (sizeof(T) == 1) {
  494. asm volatile("shrb %%cl, %%al\n"
  495. : "=a"(result)
  496. : "a"(data), "c"(steps));
  497. }
  498. asm volatile(
  499. "pushf\n"
  500. "pop %%ebx"
  501. : "=b"(new_flags));
  502. cpu.set_flags_oszapc(new_flags);
  503. return result;
  504. }
  505. template<typename T>
  506. ALWAYS_INLINE static T op_shl(SoftCPU& cpu, T data, u8 steps)
  507. {
  508. if (steps == 0)
  509. return data;
  510. u32 result = 0;
  511. u32 new_flags = 0;
  512. if constexpr (sizeof(T) == 4) {
  513. asm volatile("shll %%cl, %%eax\n"
  514. : "=a"(result)
  515. : "a"(data), "c"(steps));
  516. } else if constexpr (sizeof(T) == 2) {
  517. asm volatile("shlw %%cl, %%ax\n"
  518. : "=a"(result)
  519. : "a"(data), "c"(steps));
  520. } else if constexpr (sizeof(T) == 1) {
  521. asm volatile("shlb %%cl, %%al\n"
  522. : "=a"(result)
  523. : "a"(data), "c"(steps));
  524. }
  525. asm volatile(
  526. "pushf\n"
  527. "pop %%ebx"
  528. : "=b"(new_flags));
  529. cpu.set_flags_oszapc(new_flags);
  530. return result;
  531. }
  532. template<typename T>
  533. ALWAYS_INLINE static T op_shrd(SoftCPU& cpu, T data, T extra_bits, u8 steps)
  534. {
  535. if (steps == 0)
  536. return data;
  537. u32 result = 0;
  538. u32 new_flags = 0;
  539. if constexpr (sizeof(T) == 4) {
  540. asm volatile("shrd %%cl, %%edx, %%eax\n"
  541. : "=a"(result)
  542. : "a"(data), "d"(extra_bits), "c"(steps));
  543. } else if constexpr (sizeof(T) == 2) {
  544. asm volatile("shrd %%cl, %%dx, %%ax\n"
  545. : "=a"(result)
  546. : "a"(data), "d"(extra_bits), "c"(steps));
  547. }
  548. asm volatile(
  549. "pushf\n"
  550. "pop %%ebx"
  551. : "=b"(new_flags));
  552. cpu.set_flags_oszapc(new_flags);
  553. return result;
  554. }
  555. template<typename T>
  556. ALWAYS_INLINE static T op_shld(SoftCPU& cpu, T data, T extra_bits, u8 steps)
  557. {
  558. if (steps == 0)
  559. return data;
  560. u32 result = 0;
  561. u32 new_flags = 0;
  562. if constexpr (sizeof(T) == 4) {
  563. asm volatile("shld %%cl, %%edx, %%eax\n"
  564. : "=a"(result)
  565. : "a"(data), "d"(extra_bits), "c"(steps));
  566. } else if constexpr (sizeof(T) == 2) {
  567. asm volatile("shld %%cl, %%dx, %%ax\n"
  568. : "=a"(result)
  569. : "a"(data), "d"(extra_bits), "c"(steps));
  570. }
  571. asm volatile(
  572. "pushf\n"
  573. "pop %%ebx"
  574. : "=b"(new_flags));
  575. cpu.set_flags_oszapc(new_flags);
  576. return result;
  577. }
  578. template<bool update_dest, typename Op>
  579. ALWAYS_INLINE void SoftCPU::generic_AL_imm8(Op op, const X86::Instruction& insn)
  580. {
  581. auto dest = al();
  582. auto src = insn.imm8();
  583. auto result = op(*this, dest, src);
  584. if (update_dest)
  585. set_al(result);
  586. }
  587. template<bool update_dest, typename Op>
  588. ALWAYS_INLINE void SoftCPU::generic_AX_imm16(Op op, const X86::Instruction& insn)
  589. {
  590. auto dest = ax();
  591. auto src = insn.imm16();
  592. auto result = op(*this, dest, src);
  593. if (update_dest)
  594. set_ax(result);
  595. }
  596. template<bool update_dest, typename Op>
  597. ALWAYS_INLINE void SoftCPU::generic_EAX_imm32(Op op, const X86::Instruction& insn)
  598. {
  599. auto dest = eax();
  600. auto src = insn.imm32();
  601. auto result = op(*this, dest, src);
  602. if (update_dest)
  603. set_eax(result);
  604. }
  605. template<bool update_dest, typename Op>
  606. ALWAYS_INLINE void SoftCPU::generic_RM16_imm16(Op op, const X86::Instruction& insn)
  607. {
  608. auto dest = insn.modrm().read16(*this, insn);
  609. auto src = insn.imm16();
  610. auto result = op(*this, dest, src);
  611. if (update_dest)
  612. insn.modrm().write16(*this, insn, result);
  613. }
  614. template<bool update_dest, typename Op>
  615. ALWAYS_INLINE void SoftCPU::generic_RM16_imm8(Op op, const X86::Instruction& insn)
  616. {
  617. auto dest = insn.modrm().read16(*this, insn);
  618. auto src = sign_extended_to<u16>(insn.imm8());
  619. auto result = op(*this, dest, src);
  620. if (update_dest)
  621. insn.modrm().write16(*this, insn, result);
  622. }
  623. template<bool update_dest, typename Op>
  624. ALWAYS_INLINE void SoftCPU::generic_RM16_reg16(Op op, const X86::Instruction& insn)
  625. {
  626. auto dest = insn.modrm().read16(*this, insn);
  627. auto src = gpr16(insn.reg16());
  628. auto result = op(*this, dest, src);
  629. if (update_dest)
  630. insn.modrm().write16(*this, insn, result);
  631. }
  632. template<bool update_dest, typename Op>
  633. ALWAYS_INLINE void SoftCPU::generic_RM32_imm32(Op op, const X86::Instruction& insn)
  634. {
  635. auto dest = insn.modrm().read32(*this, insn);
  636. auto src = insn.imm32();
  637. auto result = op(*this, dest, src);
  638. if (update_dest)
  639. insn.modrm().write32(*this, insn, result);
  640. }
  641. template<bool update_dest, typename Op>
  642. ALWAYS_INLINE void SoftCPU::generic_RM32_imm8(Op op, const X86::Instruction& insn)
  643. {
  644. auto dest = insn.modrm().read32(*this, insn);
  645. auto src = sign_extended_to<u32>(insn.imm8());
  646. auto result = op(*this, dest, src);
  647. if (update_dest)
  648. insn.modrm().write32(*this, insn, result);
  649. }
  650. template<bool update_dest, typename Op>
  651. ALWAYS_INLINE void SoftCPU::generic_RM32_reg32(Op op, const X86::Instruction& insn)
  652. {
  653. auto dest = insn.modrm().read32(*this, insn);
  654. auto src = gpr32(insn.reg32());
  655. auto result = op(*this, dest, src);
  656. if (update_dest)
  657. insn.modrm().write32(*this, insn, result);
  658. }
  659. template<bool update_dest, typename Op>
  660. ALWAYS_INLINE void SoftCPU::generic_RM8_imm8(Op op, const X86::Instruction& insn)
  661. {
  662. auto dest = insn.modrm().read8(*this, insn);
  663. auto src = insn.imm8();
  664. auto result = op(*this, dest, src);
  665. if (update_dest)
  666. insn.modrm().write8(*this, insn, result);
  667. }
  668. template<bool update_dest, typename Op>
  669. ALWAYS_INLINE void SoftCPU::generic_RM8_reg8(Op op, const X86::Instruction& insn)
  670. {
  671. auto dest = insn.modrm().read8(*this, insn);
  672. auto src = gpr8(insn.reg8());
  673. auto result = op(*this, dest, src);
  674. if (update_dest)
  675. insn.modrm().write8(*this, insn, result);
  676. }
  677. template<bool update_dest, typename Op>
  678. ALWAYS_INLINE void SoftCPU::generic_reg16_RM16(Op op, const X86::Instruction& insn)
  679. {
  680. auto dest = gpr16(insn.reg16());
  681. auto src = insn.modrm().read16(*this, insn);
  682. auto result = op(*this, dest, src);
  683. if (update_dest)
  684. gpr16(insn.reg16()) = result;
  685. }
  686. template<bool update_dest, typename Op>
  687. ALWAYS_INLINE void SoftCPU::generic_reg32_RM32(Op op, const X86::Instruction& insn)
  688. {
  689. auto dest = gpr32(insn.reg32());
  690. auto src = insn.modrm().read32(*this, insn);
  691. auto result = op(*this, dest, src);
  692. if (update_dest)
  693. gpr32(insn.reg32()) = result;
  694. }
  695. template<bool update_dest, typename Op>
  696. ALWAYS_INLINE void SoftCPU::generic_reg8_RM8(Op op, const X86::Instruction& insn)
  697. {
  698. auto dest = gpr8(insn.reg8());
  699. auto src = insn.modrm().read8(*this, insn);
  700. auto result = op(*this, dest, src);
  701. if (update_dest)
  702. gpr8(insn.reg8()) = result;
  703. }
  704. template<typename Op>
  705. ALWAYS_INLINE void SoftCPU::generic_RM8_1(Op op, const X86::Instruction& insn)
  706. {
  707. auto data = insn.modrm().read8(*this, insn);
  708. insn.modrm().write8(*this, insn, op(*this, data, 1));
  709. }
  710. template<typename Op>
  711. ALWAYS_INLINE void SoftCPU::generic_RM8_CL(Op op, const X86::Instruction& insn)
  712. {
  713. auto data = insn.modrm().read8(*this, insn);
  714. insn.modrm().write8(*this, insn, op(*this, data, cl()));
  715. }
  716. template<typename Op>
  717. ALWAYS_INLINE void SoftCPU::generic_RM16_1(Op op, const X86::Instruction& insn)
  718. {
  719. auto data = insn.modrm().read16(*this, insn);
  720. insn.modrm().write16(*this, insn, op(*this, data, 1));
  721. }
  722. template<typename Op>
  723. ALWAYS_INLINE void SoftCPU::generic_RM16_CL(Op op, const X86::Instruction& insn)
  724. {
  725. auto data = insn.modrm().read16(*this, insn);
  726. insn.modrm().write16(*this, insn, op(*this, data, cl()));
  727. }
  728. template<typename Op>
  729. ALWAYS_INLINE void SoftCPU::generic_RM32_1(Op op, const X86::Instruction& insn)
  730. {
  731. auto data = insn.modrm().read32(*this, insn);
  732. insn.modrm().write32(*this, insn, op(*this, data, 1));
  733. }
  734. template<typename Op>
  735. ALWAYS_INLINE void SoftCPU::generic_RM32_CL(Op op, const X86::Instruction& insn)
  736. {
  737. auto data = insn.modrm().read32(*this, insn);
  738. insn.modrm().write32(*this, insn, op(*this, data, cl()));
  739. }
  740. void SoftCPU::AAA(const X86::Instruction&) { TODO(); }
  741. void SoftCPU::AAD(const X86::Instruction&) { TODO(); }
  742. void SoftCPU::AAM(const X86::Instruction&) { TODO(); }
  743. void SoftCPU::AAS(const X86::Instruction&) { TODO(); }
  744. void SoftCPU::ARPL(const X86::Instruction&) { TODO(); }
  745. void SoftCPU::BOUND(const X86::Instruction&) { TODO(); }
  746. void SoftCPU::BSF_reg16_RM16(const X86::Instruction&) { }
  747. void SoftCPU::BSF_reg32_RM32(const X86::Instruction&) { }
  748. void SoftCPU::BSR_reg16_RM16(const X86::Instruction&) { }
  749. void SoftCPU::BSR_reg32_RM32(const X86::Instruction&) { }
  750. void SoftCPU::BSWAP_reg32(const X86::Instruction& insn)
  751. {
  752. gpr32(insn.reg32()) = __builtin_bswap32(gpr32(insn.reg32()));
  753. }
  754. template<typename T>
  755. ALWAYS_INLINE static T op_bt(T value, T)
  756. {
  757. return value;
  758. }
  759. template<typename T>
  760. ALWAYS_INLINE static T op_bts(T value, T bit_mask)
  761. {
  762. return value | bit_mask;
  763. }
  764. template<typename T>
  765. ALWAYS_INLINE static T op_btr(T value, T bit_mask)
  766. {
  767. return value & ~bit_mask;
  768. }
  769. template<typename T>
  770. ALWAYS_INLINE static T op_btc(T value, T bit_mask)
  771. {
  772. return value ^ bit_mask;
  773. }
  774. template<bool should_update, typename Op>
  775. ALWAYS_INLINE void BTx_RM16_reg16(SoftCPU& cpu, const X86::Instruction& insn, Op op)
  776. {
  777. if (insn.modrm().is_register()) {
  778. unsigned bit_index = cpu.gpr16(insn.reg16()) & (X86::TypeTrivia<u16>::bits - 1);
  779. u16 original = insn.modrm().read16(cpu, insn);
  780. u16 bit_mask = 1 << bit_index;
  781. u16 result = op(original, bit_mask);
  782. cpu.set_cf((original & bit_mask) != 0);
  783. if (should_update)
  784. insn.modrm().write16(cpu, insn, result);
  785. return;
  786. }
  787. // FIXME: Is this supposed to perform a full 16-bit read/modify/write?
  788. unsigned bit_offset_in_array = cpu.gpr16(insn.reg16()) / 8;
  789. unsigned bit_offset_in_byte = cpu.gpr16(insn.reg16()) & 7;
  790. auto address = insn.modrm().resolve(cpu, insn);
  791. address.set_offset(address.offset() + bit_offset_in_array);
  792. u8 dest = cpu.read_memory8(address);
  793. u8 bit_mask = 1 << bit_offset_in_byte;
  794. u8 result = op(dest, bit_mask);
  795. cpu.set_cf((dest & bit_mask) != 0);
  796. if (should_update)
  797. cpu.write_memory8(address, result);
  798. }
  799. template<bool should_update, typename Op>
  800. ALWAYS_INLINE void BTx_RM32_reg32(SoftCPU& cpu, const X86::Instruction& insn, Op op)
  801. {
  802. if (insn.modrm().is_register()) {
  803. unsigned bit_index = cpu.gpr32(insn.reg32()) & (X86::TypeTrivia<u32>::bits - 1);
  804. u32 original = insn.modrm().read32(cpu, insn);
  805. u32 bit_mask = 1 << bit_index;
  806. u32 result = op(original, bit_mask);
  807. cpu.set_cf((original & bit_mask) != 0);
  808. if (should_update)
  809. insn.modrm().write32(cpu, insn, result);
  810. return;
  811. }
  812. // FIXME: Is this supposed to perform a full 32-bit read/modify/write?
  813. unsigned bit_offset_in_array = cpu.gpr32(insn.reg32()) / 8;
  814. unsigned bit_offset_in_byte = cpu.gpr32(insn.reg32()) & 7;
  815. auto address = insn.modrm().resolve(cpu, insn);
  816. address.set_offset(address.offset() + bit_offset_in_array);
  817. u8 dest = cpu.read_memory8(address);
  818. u8 bit_mask = 1 << bit_offset_in_byte;
  819. u8 result = op(dest, bit_mask);
  820. cpu.set_cf((dest & bit_mask) != 0);
  821. if (should_update)
  822. cpu.write_memory8(address, result);
  823. }
  824. template<bool should_update, typename Op>
  825. ALWAYS_INLINE void BTx_RM16_imm8(SoftCPU& cpu, const X86::Instruction& insn, Op op)
  826. {
  827. unsigned bit_index = insn.imm8() & (X86::TypeTrivia<u16>::mask);
  828. // FIXME: Support higher bit indices
  829. ASSERT(bit_index < 16);
  830. u16 original = insn.modrm().read16(cpu, insn);
  831. u16 bit_mask = 1 << bit_index;
  832. u16 result = op(original, bit_mask);
  833. cpu.set_cf((original & bit_mask) != 0);
  834. if (should_update)
  835. insn.modrm().write16(cpu, insn, result);
  836. }
  837. template<bool should_update, typename Op>
  838. ALWAYS_INLINE void BTx_RM32_imm8(SoftCPU& cpu, const X86::Instruction& insn, Op op)
  839. {
  840. unsigned bit_index = insn.imm8() & (X86::TypeTrivia<u32>::mask);
  841. // FIXME: Support higher bit indices
  842. ASSERT(bit_index < 32);
  843. u32 original = insn.modrm().read32(cpu, insn);
  844. u32 bit_mask = 1 << bit_index;
  845. u32 result = op(original, bit_mask);
  846. cpu.set_cf((original & bit_mask) != 0);
  847. if (should_update)
  848. insn.modrm().write32(cpu, insn, result);
  849. }
  850. #define DEFINE_GENERIC_BTx_INSN_HANDLERS(mnemonic, op, update_dest) \
  851. void SoftCPU::mnemonic##_RM32_reg32(const X86::Instruction& insn) { BTx_RM32_reg32<update_dest>(*this, insn, op<u32>); } \
  852. void SoftCPU::mnemonic##_RM16_reg16(const X86::Instruction& insn) { BTx_RM16_reg16<update_dest>(*this, insn, op<u16>); } \
  853. void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) { BTx_RM32_imm8<update_dest>(*this, insn, op<u32>); } \
  854. void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) { BTx_RM16_imm8<update_dest>(*this, insn, op<u16>); }
  855. DEFINE_GENERIC_BTx_INSN_HANDLERS(BTS, op_bts, true);
  856. DEFINE_GENERIC_BTx_INSN_HANDLERS(BTR, op_btr, true);
  857. DEFINE_GENERIC_BTx_INSN_HANDLERS(BTC, op_btc, true);
  858. DEFINE_GENERIC_BTx_INSN_HANDLERS(BT, op_bt, false);
  859. void SoftCPU::CALL_FAR_mem16(const X86::Instruction&)
  860. {
  861. TODO();
  862. }
  863. void SoftCPU::CALL_FAR_mem32(const X86::Instruction&) { TODO(); }
  864. void SoftCPU::CALL_RM16(const X86::Instruction&) { TODO(); }
  865. void SoftCPU::CALL_RM32(const X86::Instruction& insn)
  866. {
  867. push32(eip());
  868. set_eip(insn.modrm().read32(*this, insn));
  869. }
  870. void SoftCPU::CALL_imm16(const X86::Instruction&) { TODO(); }
  871. void SoftCPU::CALL_imm16_imm16(const X86::Instruction&) { TODO(); }
  872. void SoftCPU::CALL_imm16_imm32(const X86::Instruction&) { TODO(); }
  873. void SoftCPU::CALL_imm32(const X86::Instruction& insn)
  874. {
  875. push32(eip());
  876. set_eip(eip() + (i32)insn.imm32());
  877. }
  878. void SoftCPU::CBW(const X86::Instruction&)
  879. {
  880. set_ah((al() & 0x80) ? 0xff : 0x00);
  881. }
  882. void SoftCPU::CDQ(const X86::Instruction&)
  883. {
  884. if (eax() & 0x80000000)
  885. set_edx(0xffffffff);
  886. else
  887. set_edx(0x00000000);
  888. }
  889. void SoftCPU::CLC(const X86::Instruction&)
  890. {
  891. set_cf(false);
  892. }
  893. void SoftCPU::CLD(const X86::Instruction&)
  894. {
  895. set_df(false);
  896. }
  897. void SoftCPU::CLI(const X86::Instruction&) { TODO(); }
  898. void SoftCPU::CLTS(const X86::Instruction&) { TODO(); }
  899. void SoftCPU::CMC(const X86::Instruction&) { TODO(); }
  900. void SoftCPU::CMOVcc_reg16_RM16(const X86::Instruction& insn)
  901. {
  902. if (evaluate_condition(insn.cc()))
  903. gpr16(insn.reg16()) = insn.modrm().read16(*this, insn);
  904. }
  905. void SoftCPU::CMOVcc_reg32_RM32(const X86::Instruction& insn)
  906. {
  907. if (evaluate_condition(insn.cc()))
  908. gpr32(insn.reg32()) = insn.modrm().read32(*this, insn);
  909. }
  910. void SoftCPU::CMPSB(const X86::Instruction&) { TODO(); }
  911. void SoftCPU::CMPSD(const X86::Instruction&) { TODO(); }
  912. void SoftCPU::CMPSW(const X86::Instruction&) { TODO(); }
  913. void SoftCPU::CMPXCHG_RM16_reg16(const X86::Instruction& insn)
  914. {
  915. auto current = insn.modrm().read16(*this, insn);
  916. if (current == eax()) {
  917. set_zf(true);
  918. insn.modrm().write16(*this, insn, gpr16(insn.reg16()));
  919. } else {
  920. set_zf(false);
  921. set_eax(current);
  922. }
  923. }
  924. void SoftCPU::CMPXCHG_RM32_reg32(const X86::Instruction& insn)
  925. {
  926. auto current = insn.modrm().read32(*this, insn);
  927. if (current == eax()) {
  928. set_zf(true);
  929. insn.modrm().write32(*this, insn, gpr32(insn.reg32()));
  930. } else {
  931. set_zf(false);
  932. set_eax(current);
  933. }
  934. }
  935. void SoftCPU::CMPXCHG_RM8_reg8(const X86::Instruction& insn)
  936. {
  937. auto current = insn.modrm().read8(*this, insn);
  938. if (current == eax()) {
  939. set_zf(true);
  940. insn.modrm().write8(*this, insn, gpr8(insn.reg8()));
  941. } else {
  942. set_zf(false);
  943. set_eax(current);
  944. }
  945. }
  946. void SoftCPU::CPUID(const X86::Instruction&) { TODO(); }
  947. void SoftCPU::CWD(const X86::Instruction&)
  948. {
  949. set_dx((ax() & 0x8000) ? 0xffff : 0x0000);
  950. }
  951. void SoftCPU::CWDE(const X86::Instruction&)
  952. {
  953. set_eax(sign_extended_to<u32>(ax()));
  954. }
  955. void SoftCPU::DAA(const X86::Instruction&) { TODO(); }
  956. void SoftCPU::DAS(const X86::Instruction&) { TODO(); }
  957. void SoftCPU::DEC_RM16(const X86::Instruction& insn)
  958. {
  959. insn.modrm().write16(*this, insn, op_dec(*this, insn.modrm().read16(*this, insn)));
  960. }
  961. void SoftCPU::DEC_RM32(const X86::Instruction& insn)
  962. {
  963. insn.modrm().write32(*this, insn, op_dec(*this, insn.modrm().read32(*this, insn)));
  964. }
  965. void SoftCPU::DEC_RM8(const X86::Instruction& insn)
  966. {
  967. insn.modrm().write8(*this, insn, op_dec(*this, insn.modrm().read8(*this, insn)));
  968. }
  969. void SoftCPU::DEC_reg16(const X86::Instruction& insn)
  970. {
  971. gpr16(insn.reg16()) = op_dec(*this, gpr16(insn.reg16()));
  972. }
  973. void SoftCPU::DEC_reg32(const X86::Instruction& insn)
  974. {
  975. gpr32(insn.reg32()) = op_dec(*this, gpr32(insn.reg32()));
  976. }
  977. void SoftCPU::DIV_RM16(const X86::Instruction&) { TODO(); }
  978. void SoftCPU::DIV_RM32(const X86::Instruction& insn)
  979. {
  980. auto divisor = insn.modrm().read32(*this, insn);
  981. if (divisor == 0) {
  982. warn() << "Divide by zero";
  983. TODO();
  984. }
  985. u64 dividend = ((u64)edx() << 32) | eax();
  986. auto result = dividend / divisor;
  987. if (result > NumericLimits<u32>::max()) {
  988. warn() << "Divide overflow";
  989. TODO();
  990. }
  991. set_eax(result);
  992. set_edx(dividend % divisor);
  993. }
  994. void SoftCPU::DIV_RM8(const X86::Instruction&) { TODO(); }
  995. void SoftCPU::ENTER16(const X86::Instruction&) { TODO(); }
  996. void SoftCPU::ENTER32(const X86::Instruction&) { TODO(); }
  997. void SoftCPU::ESCAPE(const X86::Instruction&)
  998. {
  999. dbg() << "FIXME: x87 floating-point support";
  1000. m_emulator.dump_backtrace();
  1001. TODO();
  1002. }
  1003. void SoftCPU::HLT(const X86::Instruction&) { TODO(); }
  1004. void SoftCPU::IDIV_RM16(const X86::Instruction&) { TODO(); }
  1005. void SoftCPU::IDIV_RM32(const X86::Instruction& insn)
  1006. {
  1007. auto divisor = insn.modrm().read32(*this, insn);
  1008. if (divisor == 0) {
  1009. warn() << "Divide by zero";
  1010. TODO();
  1011. }
  1012. i64 dividend = ((i64)edx() << 32) | eax();
  1013. auto result = dividend / divisor;
  1014. if (result > NumericLimits<i32>::max()) {
  1015. warn() << "Divide overflow";
  1016. TODO();
  1017. }
  1018. set_eax(result);
  1019. set_edx(dividend % divisor);
  1020. }
  1021. void SoftCPU::IDIV_RM8(const X86::Instruction&) { TODO(); }
  1022. void SoftCPU::IMUL_RM16(const X86::Instruction&) { TODO(); }
  1023. void SoftCPU::IMUL_RM32(const X86::Instruction& insn)
  1024. {
  1025. i64 value = op_imul<i64>(*this, insn.modrm().read32(*this, insn), eax());
  1026. set_edx(value >> 32);
  1027. set_eax(value & 0xffffffff);
  1028. }
  1029. void SoftCPU::IMUL_RM8(const X86::Instruction& insn)
  1030. {
  1031. set_ax(op_imul<i16>(*this, insn.modrm().read8(*this, insn), al()));
  1032. }
  1033. void SoftCPU::IMUL_reg16_RM16(const X86::Instruction& insn)
  1034. {
  1035. gpr16(insn.reg16()) = op_imul<i16>(*this, gpr16(insn.reg16()), insn.modrm().read16(*this, insn));
  1036. }
  1037. void SoftCPU::IMUL_reg16_RM16_imm16(const X86::Instruction& insn)
  1038. {
  1039. gpr16(insn.reg16()) = op_imul<i16>(*this, insn.modrm().read16(*this, insn), insn.imm16());
  1040. }
  1041. void SoftCPU::IMUL_reg16_RM16_imm8(const X86::Instruction& insn)
  1042. {
  1043. gpr16(insn.reg16()) = op_imul<i16>(*this, insn.modrm().read16(*this, insn), sign_extended_to<i16>(insn.imm8()));
  1044. }
  1045. void SoftCPU::IMUL_reg32_RM32(const X86::Instruction& insn)
  1046. {
  1047. gpr32(insn.reg32()) = op_imul<i32>(*this, gpr32(insn.reg32()), insn.modrm().read32(*this, insn));
  1048. }
  1049. void SoftCPU::IMUL_reg32_RM32_imm32(const X86::Instruction& insn)
  1050. {
  1051. gpr32(insn.reg32()) = op_imul<i32>(*this, insn.modrm().read32(*this, insn), insn.imm32());
  1052. }
  1053. void SoftCPU::IMUL_reg32_RM32_imm8(const X86::Instruction& insn)
  1054. {
  1055. gpr32(insn.reg32()) = op_imul<i32>(*this, insn.modrm().read32(*this, insn), sign_extended_to<i32>(insn.imm8()));
  1056. }
  1057. void SoftCPU::INC_RM16(const X86::Instruction& insn)
  1058. {
  1059. insn.modrm().write16(*this, insn, op_inc(*this, insn.modrm().read16(*this, insn)));
  1060. }
  1061. void SoftCPU::INC_RM32(const X86::Instruction& insn)
  1062. {
  1063. insn.modrm().write32(*this, insn, op_inc(*this, insn.modrm().read32(*this, insn)));
  1064. }
  1065. void SoftCPU::INC_RM8(const X86::Instruction& insn)
  1066. {
  1067. insn.modrm().write8(*this, insn, op_inc(*this, insn.modrm().read8(*this, insn)));
  1068. }
  1069. void SoftCPU::INC_reg16(const X86::Instruction& insn)
  1070. {
  1071. gpr16(insn.reg16()) = op_inc(*this, gpr16(insn.reg16()));
  1072. }
  1073. void SoftCPU::INC_reg32(const X86::Instruction& insn)
  1074. {
  1075. gpr32(insn.reg32()) = op_inc(*this, gpr32(insn.reg32()));
  1076. }
  1077. void SoftCPU::INSB(const X86::Instruction&) { TODO(); }
  1078. void SoftCPU::INSD(const X86::Instruction&) { TODO(); }
  1079. void SoftCPU::INSW(const X86::Instruction&) { TODO(); }
  1080. void SoftCPU::INT3(const X86::Instruction&) { TODO(); }
  1081. void SoftCPU::INTO(const X86::Instruction&) { TODO(); }
  1082. void SoftCPU::INT_imm8(const X86::Instruction& insn)
  1083. {
  1084. ASSERT(insn.imm8() == 0x82);
  1085. set_eax(m_emulator.virt_syscall(eax(), edx(), ecx(), ebx()));
  1086. }
  1087. void SoftCPU::INVLPG(const X86::Instruction&) { TODO(); }
  1088. void SoftCPU::IN_AL_DX(const X86::Instruction&) { TODO(); }
  1089. void SoftCPU::IN_AL_imm8(const X86::Instruction&) { TODO(); }
  1090. void SoftCPU::IN_AX_DX(const X86::Instruction&) { TODO(); }
  1091. void SoftCPU::IN_AX_imm8(const X86::Instruction&) { TODO(); }
  1092. void SoftCPU::IN_EAX_DX(const X86::Instruction&) { TODO(); }
  1093. void SoftCPU::IN_EAX_imm8(const X86::Instruction&) { TODO(); }
  1094. void SoftCPU::IRET(const X86::Instruction&) { TODO(); }
  1095. void SoftCPU::JCXZ_imm8(const X86::Instruction&) { TODO(); }
  1096. void SoftCPU::JMP_FAR_mem16(const X86::Instruction&) { TODO(); }
  1097. void SoftCPU::JMP_FAR_mem32(const X86::Instruction&) { TODO(); }
  1098. void SoftCPU::JMP_RM16(const X86::Instruction&) { TODO(); }
  1099. void SoftCPU::JMP_RM32(const X86::Instruction& insn)
  1100. {
  1101. set_eip(insn.modrm().read32(*this, insn));
  1102. }
  1103. void SoftCPU::JMP_imm16(const X86::Instruction& insn)
  1104. {
  1105. set_eip(eip() + (i16)insn.imm16());
  1106. }
  1107. void SoftCPU::JMP_imm16_imm16(const X86::Instruction&) { TODO(); }
  1108. void SoftCPU::JMP_imm16_imm32(const X86::Instruction&) { TODO(); }
  1109. void SoftCPU::JMP_imm32(const X86::Instruction& insn)
  1110. {
  1111. set_eip(eip() + (i32)insn.imm32());
  1112. }
  1113. void SoftCPU::JMP_short_imm8(const X86::Instruction& insn)
  1114. {
  1115. set_eip(eip() + (i8)insn.imm8());
  1116. }
  1117. void SoftCPU::Jcc_NEAR_imm(const X86::Instruction& insn)
  1118. {
  1119. if (evaluate_condition(insn.cc()))
  1120. set_eip(eip() + (i32)insn.imm32());
  1121. }
  1122. void SoftCPU::Jcc_imm8(const X86::Instruction& insn)
  1123. {
  1124. if (evaluate_condition(insn.cc()))
  1125. set_eip(eip() + (i8)insn.imm8());
  1126. }
  1127. void SoftCPU::LAHF(const X86::Instruction&) { TODO(); }
  1128. void SoftCPU::LAR_reg16_RM16(const X86::Instruction&) { TODO(); }
  1129. void SoftCPU::LAR_reg32_RM32(const X86::Instruction&) { TODO(); }
  1130. void SoftCPU::LDS_reg16_mem16(const X86::Instruction&) { TODO(); }
  1131. void SoftCPU::LDS_reg32_mem32(const X86::Instruction&) { TODO(); }
  1132. void SoftCPU::LEAVE16(const X86::Instruction&) { TODO(); }
  1133. void SoftCPU::LEAVE32(const X86::Instruction&)
  1134. {
  1135. u32 new_ebp = read_memory32({ ss(), ebp() });
  1136. set_esp(ebp() + 4);
  1137. set_ebp(new_ebp);
  1138. }
  1139. void SoftCPU::LEA_reg16_mem16(const X86::Instruction& insn)
  1140. {
  1141. gpr16(insn.reg16()) = insn.modrm().resolve(*this, insn).offset();
  1142. }
  1143. void SoftCPU::LEA_reg32_mem32(const X86::Instruction& insn)
  1144. {
  1145. gpr32(insn.reg32()) = insn.modrm().resolve(*this, insn).offset();
  1146. }
  1147. void SoftCPU::LES_reg16_mem16(const X86::Instruction&) { TODO(); }
  1148. void SoftCPU::LES_reg32_mem32(const X86::Instruction&) { TODO(); }
  1149. void SoftCPU::LFS_reg16_mem16(const X86::Instruction&) { TODO(); }
  1150. void SoftCPU::LFS_reg32_mem32(const X86::Instruction&) { TODO(); }
  1151. void SoftCPU::LGDT(const X86::Instruction&) { TODO(); }
  1152. void SoftCPU::LGS_reg16_mem16(const X86::Instruction&) { TODO(); }
  1153. void SoftCPU::LGS_reg32_mem32(const X86::Instruction&) { TODO(); }
  1154. void SoftCPU::LIDT(const X86::Instruction&) { TODO(); }
  1155. void SoftCPU::LLDT_RM16(const X86::Instruction&) { TODO(); }
  1156. void SoftCPU::LMSW_RM16(const X86::Instruction&) { TODO(); }
  1157. void SoftCPU::LODSB(const X86::Instruction&) { TODO(); }
  1158. void SoftCPU::LODSD(const X86::Instruction&) { TODO(); }
  1159. void SoftCPU::LODSW(const X86::Instruction&) { TODO(); }
  1160. void SoftCPU::LOOPNZ_imm8(const X86::Instruction&) { TODO(); }
  1161. void SoftCPU::LOOPZ_imm8(const X86::Instruction&) { TODO(); }
  1162. void SoftCPU::LOOP_imm8(const X86::Instruction&) { TODO(); }
  1163. void SoftCPU::LSL_reg16_RM16(const X86::Instruction&) { TODO(); }
  1164. void SoftCPU::LSL_reg32_RM32(const X86::Instruction&) { TODO(); }
  1165. void SoftCPU::LSS_reg16_mem16(const X86::Instruction&) { TODO(); }
  1166. void SoftCPU::LSS_reg32_mem32(const X86::Instruction&) { TODO(); }
  1167. void SoftCPU::LTR_RM16(const X86::Instruction&) { TODO(); }
  1168. void SoftCPU::MOVSB(const X86::Instruction& insn)
  1169. {
  1170. auto src_segment = segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS));
  1171. if (insn.has_address_size_override_prefix()) {
  1172. do_once_or_repeat<false>(insn, [&] {
  1173. auto src = read_memory8({ src_segment, si() });
  1174. write_memory8({ es(), di() }, src);
  1175. set_di(di() + (df() ? -1 : 1));
  1176. set_si(si() + (df() ? -1 : 1));
  1177. });
  1178. } else {
  1179. do_once_or_repeat<false>(insn, [&] {
  1180. auto src = read_memory8({ src_segment, esi() });
  1181. write_memory8({ es(), edi() }, src);
  1182. set_edi(edi() + (df() ? -1 : 1));
  1183. set_esi(esi() + (df() ? -1 : 1));
  1184. });
  1185. }
  1186. }
  1187. void SoftCPU::MOVSD(const X86::Instruction& insn)
  1188. {
  1189. auto src_segment = segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS));
  1190. if (insn.has_address_size_override_prefix()) {
  1191. do_once_or_repeat<false>(insn, [&] {
  1192. auto src = read_memory32({ src_segment, si() });
  1193. write_memory32({ es(), di() }, src);
  1194. set_di(di() + (df() ? -4 : 4));
  1195. set_si(si() + (df() ? -4 : 4));
  1196. });
  1197. } else {
  1198. do_once_or_repeat<false>(insn, [&] {
  1199. auto src = read_memory32({ src_segment, esi() });
  1200. write_memory32({ es(), edi() }, src);
  1201. set_edi(edi() + (df() ? -4 : 4));
  1202. set_esi(esi() + (df() ? -4 : 4));
  1203. });
  1204. }
  1205. }
  1206. void SoftCPU::MOVSW(const X86::Instruction& insn)
  1207. {
  1208. auto src_segment = segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS));
  1209. if (insn.has_address_size_override_prefix()) {
  1210. do_once_or_repeat<false>(insn, [&] {
  1211. auto src = read_memory16({ src_segment, si() });
  1212. write_memory16({ es(), di() }, src);
  1213. set_di(di() + (df() ? -2 : 2));
  1214. set_si(si() + (df() ? -2 : 2));
  1215. });
  1216. } else {
  1217. do_once_or_repeat<false>(insn, [&] {
  1218. auto src = read_memory16({ src_segment, esi() });
  1219. write_memory16({ es(), edi() }, src);
  1220. set_edi(edi() + (df() ? -2 : 2));
  1221. set_esi(esi() + (df() ? -2 : 2));
  1222. });
  1223. }
  1224. }
  1225. void SoftCPU::MOVSX_reg16_RM8(const X86::Instruction& insn)
  1226. {
  1227. gpr16(insn.reg16()) = sign_extended_to<u16>(insn.modrm().read8(*this, insn));
  1228. }
  1229. void SoftCPU::MOVSX_reg32_RM16(const X86::Instruction& insn)
  1230. {
  1231. gpr32(insn.reg32()) = sign_extended_to<u32>(insn.modrm().read16(*this, insn));
  1232. }
  1233. void SoftCPU::MOVSX_reg32_RM8(const X86::Instruction& insn)
  1234. {
  1235. gpr32(insn.reg32()) = sign_extended_to<u32>(insn.modrm().read8(*this, insn));
  1236. }
  1237. void SoftCPU::MOVZX_reg16_RM8(const X86::Instruction& insn)
  1238. {
  1239. gpr16(insn.reg16()) = insn.modrm().read8(*this, insn);
  1240. }
  1241. void SoftCPU::MOVZX_reg32_RM16(const X86::Instruction& insn)
  1242. {
  1243. gpr32(insn.reg32()) = insn.modrm().read16(*this, insn);
  1244. }
  1245. void SoftCPU::MOVZX_reg32_RM8(const X86::Instruction& insn)
  1246. {
  1247. gpr32(insn.reg32()) = insn.modrm().read8(*this, insn);
  1248. }
  1249. void SoftCPU::MOV_AL_moff8(const X86::Instruction& insn)
  1250. {
  1251. set_al(read_memory8({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), insn.imm_address() }));
  1252. }
  1253. void SoftCPU::MOV_AX_moff16(const X86::Instruction& insn)
  1254. {
  1255. set_ax(read_memory16({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), insn.imm_address() }));
  1256. }
  1257. void SoftCPU::MOV_CR_reg32(const X86::Instruction&) { TODO(); }
  1258. void SoftCPU::MOV_DR_reg32(const X86::Instruction&) { TODO(); }
  1259. void SoftCPU::MOV_EAX_moff32(const X86::Instruction& insn)
  1260. {
  1261. set_eax(read_memory32({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), insn.imm_address() }));
  1262. }
  1263. void SoftCPU::MOV_RM16_imm16(const X86::Instruction& insn)
  1264. {
  1265. insn.modrm().write16(*this, insn, insn.imm16());
  1266. }
  1267. void SoftCPU::MOV_RM16_reg16(const X86::Instruction& insn)
  1268. {
  1269. insn.modrm().write16(*this, insn, gpr16(insn.reg16()));
  1270. }
  1271. void SoftCPU::MOV_RM16_seg(const X86::Instruction&) { TODO(); }
  1272. void SoftCPU::MOV_RM32_imm32(const X86::Instruction& insn)
  1273. {
  1274. insn.modrm().write32(*this, insn, insn.imm32());
  1275. }
  1276. void SoftCPU::MOV_RM32_reg32(const X86::Instruction& insn)
  1277. {
  1278. insn.modrm().write32(*this, insn, gpr32(insn.reg32()));
  1279. }
  1280. void SoftCPU::MOV_RM8_imm8(const X86::Instruction& insn)
  1281. {
  1282. insn.modrm().write8(*this, insn, insn.imm8());
  1283. }
  1284. void SoftCPU::MOV_RM8_reg8(const X86::Instruction& insn)
  1285. {
  1286. insn.modrm().write8(*this, insn, gpr8(insn.reg8()));
  1287. }
  1288. void SoftCPU::MOV_moff16_AX(const X86::Instruction& insn)
  1289. {
  1290. write_memory16({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), insn.imm_address() }, ax());
  1291. }
  1292. void SoftCPU::MOV_moff32_EAX(const X86::Instruction& insn)
  1293. {
  1294. write_memory32({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), insn.imm_address() }, eax());
  1295. }
  1296. void SoftCPU::MOV_moff8_AL(const X86::Instruction& insn)
  1297. {
  1298. write_memory8({ segment(insn.segment_prefix().value_or(X86::SegmentRegister::DS)), insn.imm_address() }, al());
  1299. }
  1300. void SoftCPU::MOV_reg16_RM16(const X86::Instruction& insn)
  1301. {
  1302. gpr16(insn.reg16()) = insn.modrm().read16(*this, insn);
  1303. }
  1304. void SoftCPU::MOV_reg16_imm16(const X86::Instruction& insn)
  1305. {
  1306. gpr16(insn.reg16()) = insn.imm16();
  1307. }
  1308. void SoftCPU::MOV_reg32_CR(const X86::Instruction&) { TODO(); }
  1309. void SoftCPU::MOV_reg32_DR(const X86::Instruction&) { TODO(); }
  1310. void SoftCPU::MOV_reg32_RM32(const X86::Instruction& insn)
  1311. {
  1312. gpr32(insn.reg32()) = insn.modrm().read32(*this, insn);
  1313. }
  1314. void SoftCPU::MOV_reg32_imm32(const X86::Instruction& insn)
  1315. {
  1316. gpr32(insn.reg32()) = insn.imm32();
  1317. }
  1318. void SoftCPU::MOV_reg8_RM8(const X86::Instruction& insn)
  1319. {
  1320. gpr8(insn.reg8()) = insn.modrm().read8(*this, insn);
  1321. }
  1322. void SoftCPU::MOV_reg8_imm8(const X86::Instruction& insn)
  1323. {
  1324. gpr8(insn.reg8()) = insn.imm8();
  1325. }
  1326. void SoftCPU::MOV_seg_RM16(const X86::Instruction&) { TODO(); }
  1327. void SoftCPU::MOV_seg_RM32(const X86::Instruction&) { TODO(); }
  1328. void SoftCPU::MUL_RM16(const X86::Instruction&) { TODO(); }
  1329. void SoftCPU::MUL_RM32(const X86::Instruction& insn)
  1330. {
  1331. u64 result = (u64)eax() * (u64)insn.modrm().read32(*this, insn);
  1332. set_eax(result & 0xffffffff);
  1333. set_edx(result >> 32);
  1334. set_cf(edx() != 0);
  1335. set_of(edx() != 0);
  1336. }
  1337. void SoftCPU::MUL_RM8(const X86::Instruction&) { TODO(); }
  1338. void SoftCPU::NEG_RM16(const X86::Instruction& insn)
  1339. {
  1340. insn.modrm().write16(*this, insn, op_sub<u16>(*this, 0, insn.modrm().read16(*this, insn)));
  1341. }
  1342. void SoftCPU::NEG_RM32(const X86::Instruction& insn)
  1343. {
  1344. insn.modrm().write32(*this, insn, op_sub<u32>(*this, 0, insn.modrm().read32(*this, insn)));
  1345. }
  1346. void SoftCPU::NEG_RM8(const X86::Instruction& insn)
  1347. {
  1348. insn.modrm().write8(*this, insn, op_sub<u8>(*this, 0, insn.modrm().read8(*this, insn)));
  1349. }
  1350. void SoftCPU::NOP(const X86::Instruction&)
  1351. {
  1352. }
  1353. void SoftCPU::NOT_RM16(const X86::Instruction& insn)
  1354. {
  1355. insn.modrm().write16(*this, insn, ~insn.modrm().read16(*this, insn));
  1356. }
  1357. void SoftCPU::NOT_RM32(const X86::Instruction& insn)
  1358. {
  1359. insn.modrm().write32(*this, insn, ~insn.modrm().read32(*this, insn));
  1360. }
  1361. void SoftCPU::NOT_RM8(const X86::Instruction& insn)
  1362. {
  1363. insn.modrm().write8(*this, insn, ~insn.modrm().read8(*this, insn));
  1364. }
  1365. void SoftCPU::OUTSB(const X86::Instruction&) { TODO(); }
  1366. void SoftCPU::OUTSD(const X86::Instruction&) { TODO(); }
  1367. void SoftCPU::OUTSW(const X86::Instruction&) { TODO(); }
  1368. void SoftCPU::OUT_DX_AL(const X86::Instruction&) { TODO(); }
  1369. void SoftCPU::OUT_DX_AX(const X86::Instruction&) { TODO(); }
  1370. void SoftCPU::OUT_DX_EAX(const X86::Instruction&) { TODO(); }
  1371. void SoftCPU::OUT_imm8_AL(const X86::Instruction&) { TODO(); }
  1372. void SoftCPU::OUT_imm8_AX(const X86::Instruction&) { TODO(); }
  1373. void SoftCPU::OUT_imm8_EAX(const X86::Instruction&) { TODO(); }
  1374. void SoftCPU::PADDB_mm1_mm2m64(const X86::Instruction&) { TODO(); }
  1375. void SoftCPU::PADDW_mm1_mm2m64(const X86::Instruction&) { TODO(); }
  1376. void SoftCPU::PADDD_mm1_mm2m64(const X86::Instruction&) { TODO(); }
  1377. void SoftCPU::POPA(const X86::Instruction&) { TODO(); }
  1378. void SoftCPU::POPAD(const X86::Instruction&) { TODO(); }
  1379. void SoftCPU::POPF(const X86::Instruction&) { TODO(); }
  1380. void SoftCPU::POPFD(const X86::Instruction&)
  1381. {
  1382. m_eflags &= ~0x00fcffff;
  1383. m_eflags |= pop32() & 0x00fcffff;
  1384. }
  1385. void SoftCPU::POP_DS(const X86::Instruction&) { TODO(); }
  1386. void SoftCPU::POP_ES(const X86::Instruction&) { TODO(); }
  1387. void SoftCPU::POP_FS(const X86::Instruction&) { TODO(); }
  1388. void SoftCPU::POP_GS(const X86::Instruction&) { TODO(); }
  1389. void SoftCPU::POP_RM16(const X86::Instruction&) { TODO(); }
  1390. void SoftCPU::POP_RM32(const X86::Instruction&) { TODO(); }
  1391. void SoftCPU::POP_SS(const X86::Instruction&) { TODO(); }
  1392. void SoftCPU::POP_reg16(const X86::Instruction&) { TODO(); }
  1393. void SoftCPU::POP_reg32(const X86::Instruction& insn)
  1394. {
  1395. gpr32(insn.reg32()) = pop32();
  1396. }
  1397. void SoftCPU::PUSHA(const X86::Instruction&) { TODO(); }
  1398. void SoftCPU::PUSHAD(const X86::Instruction&) { TODO(); }
  1399. void SoftCPU::PUSHF(const X86::Instruction&) { TODO(); }
  1400. void SoftCPU::PUSHFD(const X86::Instruction&)
  1401. {
  1402. push32(m_eflags & 0x00fcffff);
  1403. }
  1404. void SoftCPU::PUSH_CS(const X86::Instruction&) { TODO(); }
  1405. void SoftCPU::PUSH_DS(const X86::Instruction&) { TODO(); }
  1406. void SoftCPU::PUSH_ES(const X86::Instruction&) { TODO(); }
  1407. void SoftCPU::PUSH_FS(const X86::Instruction&) { TODO(); }
  1408. void SoftCPU::PUSH_GS(const X86::Instruction&) { TODO(); }
  1409. void SoftCPU::PUSH_RM16(const X86::Instruction&) { TODO(); }
  1410. void SoftCPU::PUSH_RM32(const X86::Instruction& insn)
  1411. {
  1412. push32(insn.modrm().read32(*this, insn));
  1413. }
  1414. void SoftCPU::PUSH_SP_8086_80186(const X86::Instruction&) { TODO(); }
  1415. void SoftCPU::PUSH_SS(const X86::Instruction&) { TODO(); }
  1416. void SoftCPU::PUSH_imm16(const X86::Instruction&) { TODO(); }
  1417. void SoftCPU::PUSH_imm32(const X86::Instruction& insn)
  1418. {
  1419. push32(insn.imm32());
  1420. }
  1421. void SoftCPU::PUSH_imm8(const X86::Instruction& insn)
  1422. {
  1423. ASSERT(!insn.has_operand_size_override_prefix());
  1424. push32(sign_extended_to<i32>(insn.imm8()));
  1425. }
  1426. void SoftCPU::PUSH_reg16(const X86::Instruction&) { TODO(); }
  1427. void SoftCPU::PUSH_reg32(const X86::Instruction& insn)
  1428. {
  1429. push32(gpr32(insn.reg32()));
  1430. if (m_secret_handshake_state == 2) {
  1431. m_secret_data[0] = gpr32(insn.reg32());
  1432. ++m_secret_handshake_state;
  1433. } else if (m_secret_handshake_state == 3) {
  1434. m_secret_data[1] = gpr32(insn.reg32());
  1435. ++m_secret_handshake_state;
  1436. } else if (m_secret_handshake_state == 4) {
  1437. m_secret_data[2] = gpr32(insn.reg32());
  1438. m_secret_handshake_state = 0;
  1439. did_receive_secret_data();
  1440. }
  1441. }
  1442. template<typename T, bool cf>
  1443. ALWAYS_INLINE static T op_rcl_impl(SoftCPU& cpu, T data, u8 steps)
  1444. {
  1445. if (steps == 0)
  1446. return data;
  1447. u32 result = 0;
  1448. u32 new_flags = 0;
  1449. if constexpr (cf)
  1450. asm volatile("stc");
  1451. else
  1452. asm volatile("clc");
  1453. if constexpr (sizeof(T) == 4) {
  1454. asm volatile("rcll %%cl, %%eax\n"
  1455. : "=a"(result)
  1456. : "a"(data), "c"(steps));
  1457. } else if constexpr (sizeof(T) == 2) {
  1458. asm volatile("rclw %%cl, %%ax\n"
  1459. : "=a"(result)
  1460. : "a"(data), "c"(steps));
  1461. } else if constexpr (sizeof(T) == 1) {
  1462. asm volatile("rclb %%cl, %%al\n"
  1463. : "=a"(result)
  1464. : "a"(data), "c"(steps));
  1465. }
  1466. asm volatile(
  1467. "pushf\n"
  1468. "pop %%ebx"
  1469. : "=b"(new_flags));
  1470. cpu.set_flags_oc(new_flags);
  1471. return result;
  1472. }
  1473. template<typename T>
  1474. ALWAYS_INLINE static T op_rcl(SoftCPU& cpu, T data, u8 steps)
  1475. {
  1476. if (cpu.cf())
  1477. return op_rcl_impl<T, true>(cpu, data, steps);
  1478. return op_rcl_impl<T, false>(cpu, data, steps);
  1479. }
  1480. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(RCL, op_rcl)
  1481. template<typename T, bool cf>
  1482. ALWAYS_INLINE static T op_rcr_impl(SoftCPU& cpu, T data, u8 steps)
  1483. {
  1484. if (steps == 0)
  1485. return data;
  1486. u32 result = 0;
  1487. u32 new_flags = 0;
  1488. if constexpr (cf)
  1489. asm volatile("stc");
  1490. else
  1491. asm volatile("clc");
  1492. if constexpr (sizeof(T) == 4) {
  1493. asm volatile("rcrl %%cl, %%eax\n"
  1494. : "=a"(result)
  1495. : "a"(data), "c"(steps));
  1496. } else if constexpr (sizeof(T) == 2) {
  1497. asm volatile("rcrw %%cl, %%ax\n"
  1498. : "=a"(result)
  1499. : "a"(data), "c"(steps));
  1500. } else if constexpr (sizeof(T) == 1) {
  1501. asm volatile("rcrb %%cl, %%al\n"
  1502. : "=a"(result)
  1503. : "a"(data), "c"(steps));
  1504. }
  1505. asm volatile(
  1506. "pushf\n"
  1507. "pop %%ebx"
  1508. : "=b"(new_flags));
  1509. cpu.set_flags_oc(new_flags);
  1510. return result;
  1511. }
  1512. template<typename T>
  1513. ALWAYS_INLINE static T op_rcr(SoftCPU& cpu, T data, u8 steps)
  1514. {
  1515. if (cpu.cf())
  1516. return op_rcr_impl<T, true>(cpu, data, steps);
  1517. return op_rcr_impl<T, false>(cpu, data, steps);
  1518. }
  1519. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(RCR, op_rcr)
  1520. void SoftCPU::RDTSC(const X86::Instruction&) { TODO(); }
  1521. void SoftCPU::RET(const X86::Instruction& insn)
  1522. {
  1523. ASSERT(!insn.has_operand_size_override_prefix());
  1524. set_eip(pop32());
  1525. }
  1526. void SoftCPU::RETF(const X86::Instruction&) { TODO(); }
  1527. void SoftCPU::RETF_imm16(const X86::Instruction&) { TODO(); }
  1528. void SoftCPU::RET_imm16(const X86::Instruction& insn)
  1529. {
  1530. ASSERT(!insn.has_operand_size_override_prefix());
  1531. set_eip(pop32());
  1532. set_esp(esp() + insn.imm16());
  1533. }
  1534. template<typename T>
  1535. ALWAYS_INLINE static T op_rol(SoftCPU& cpu, T data, u8 steps)
  1536. {
  1537. if (steps == 0)
  1538. return data;
  1539. u32 result = 0;
  1540. u32 new_flags = 0;
  1541. if constexpr (sizeof(T) == 4) {
  1542. asm volatile("roll %%cl, %%eax\n"
  1543. : "=a"(result)
  1544. : "a"(data), "c"(steps));
  1545. } else if constexpr (sizeof(T) == 2) {
  1546. asm volatile("rolw %%cl, %%ax\n"
  1547. : "=a"(result)
  1548. : "a"(data), "c"(steps));
  1549. } else if constexpr (sizeof(T) == 1) {
  1550. asm volatile("rolb %%cl, %%al\n"
  1551. : "=a"(result)
  1552. : "a"(data), "c"(steps));
  1553. }
  1554. asm volatile(
  1555. "pushf\n"
  1556. "pop %%ebx"
  1557. : "=b"(new_flags));
  1558. cpu.set_flags_oc(new_flags);
  1559. return result;
  1560. }
  1561. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(ROL, op_rol)
  1562. template<typename T>
  1563. ALWAYS_INLINE static T op_ror(SoftCPU& cpu, T data, u8 steps)
  1564. {
  1565. if (steps == 0)
  1566. return data;
  1567. u32 result = 0;
  1568. u32 new_flags = 0;
  1569. if constexpr (sizeof(T) == 4) {
  1570. asm volatile("rorl %%cl, %%eax\n"
  1571. : "=a"(result)
  1572. : "a"(data), "c"(steps));
  1573. } else if constexpr (sizeof(T) == 2) {
  1574. asm volatile("rorw %%cl, %%ax\n"
  1575. : "=a"(result)
  1576. : "a"(data), "c"(steps));
  1577. } else if constexpr (sizeof(T) == 1) {
  1578. asm volatile("rorb %%cl, %%al\n"
  1579. : "=a"(result)
  1580. : "a"(data), "c"(steps));
  1581. }
  1582. asm volatile(
  1583. "pushf\n"
  1584. "pop %%ebx"
  1585. : "=b"(new_flags));
  1586. cpu.set_flags_oc(new_flags);
  1587. return result;
  1588. }
  1589. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(ROR, op_ror)
  1590. void SoftCPU::SAHF(const X86::Instruction&) { TODO(); }
  1591. void SoftCPU::SALC(const X86::Instruction&)
  1592. {
  1593. set_al(cf() ? 0xff : 0x00);
  1594. if (m_secret_handshake_state < 2)
  1595. ++m_secret_handshake_state;
  1596. else
  1597. m_secret_handshake_state = 0;
  1598. }
  1599. template<typename T>
  1600. static T op_sar(SoftCPU& cpu, T data, u8 steps)
  1601. {
  1602. if (steps == 0)
  1603. return data;
  1604. u32 result = 0;
  1605. u32 new_flags = 0;
  1606. if constexpr (sizeof(T) == 4) {
  1607. asm volatile("sarl %%cl, %%eax\n"
  1608. : "=a"(result)
  1609. : "a"(data), "c"(steps));
  1610. } else if constexpr (sizeof(T) == 2) {
  1611. asm volatile("sarw %%cl, %%ax\n"
  1612. : "=a"(result)
  1613. : "a"(data), "c"(steps));
  1614. } else if constexpr (sizeof(T) == 1) {
  1615. asm volatile("sarb %%cl, %%al\n"
  1616. : "=a"(result)
  1617. : "a"(data), "c"(steps));
  1618. }
  1619. asm volatile(
  1620. "pushf\n"
  1621. "pop %%ebx"
  1622. : "=b"(new_flags));
  1623. cpu.set_flags_oszapc(new_flags);
  1624. return result;
  1625. }
  1626. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(SAR, op_sar)
  1627. void SoftCPU::SCASB(const X86::Instruction&) { TODO(); }
  1628. void SoftCPU::SCASD(const X86::Instruction&) { TODO(); }
  1629. void SoftCPU::SCASW(const X86::Instruction&) { TODO(); }
  1630. void SoftCPU::SETcc_RM8(const X86::Instruction& insn)
  1631. {
  1632. insn.modrm().write8(*this, insn, evaluate_condition(insn.cc()));
  1633. }
  1634. void SoftCPU::SGDT(const X86::Instruction&) { TODO(); }
  1635. void SoftCPU::SHLD_RM16_reg16_CL(const X86::Instruction& insn)
  1636. {
  1637. insn.modrm().write16(*this, insn, op_shld(*this, insn.modrm().read16(*this, insn), gpr16(insn.reg16()), cl()));
  1638. }
  1639. void SoftCPU::SHLD_RM16_reg16_imm8(const X86::Instruction& insn)
  1640. {
  1641. insn.modrm().write16(*this, insn, op_shld(*this, insn.modrm().read16(*this, insn), gpr16(insn.reg16()), insn.imm8()));
  1642. }
  1643. void SoftCPU::SHLD_RM32_reg32_CL(const X86::Instruction& insn)
  1644. {
  1645. insn.modrm().write32(*this, insn, op_shld(*this, insn.modrm().read32(*this, insn), gpr32(insn.reg32()), cl()));
  1646. }
  1647. void SoftCPU::SHLD_RM32_reg32_imm8(const X86::Instruction& insn)
  1648. {
  1649. insn.modrm().write32(*this, insn, op_shld(*this, insn.modrm().read32(*this, insn), gpr32(insn.reg32()), insn.imm8()));
  1650. }
  1651. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(SHL, op_shl)
  1652. void SoftCPU::SHRD_RM16_reg16_CL(const X86::Instruction& insn)
  1653. {
  1654. insn.modrm().write16(*this, insn, op_shrd(*this, insn.modrm().read16(*this, insn), gpr16(insn.reg16()), cl()));
  1655. }
  1656. void SoftCPU::SHRD_RM16_reg16_imm8(const X86::Instruction& insn)
  1657. {
  1658. insn.modrm().write16(*this, insn, op_shrd(*this, insn.modrm().read16(*this, insn), gpr16(insn.reg16()), insn.imm8()));
  1659. }
  1660. void SoftCPU::SHRD_RM32_reg32_CL(const X86::Instruction& insn)
  1661. {
  1662. insn.modrm().write32(*this, insn, op_shrd(*this, insn.modrm().read32(*this, insn), gpr32(insn.reg32()), cl()));
  1663. }
  1664. void SoftCPU::SHRD_RM32_reg32_imm8(const X86::Instruction& insn)
  1665. {
  1666. insn.modrm().write32(*this, insn, op_shrd(*this, insn.modrm().read32(*this, insn), gpr32(insn.reg32()), insn.imm8()));
  1667. }
  1668. DEFINE_GENERIC_SHIFT_ROTATE_INSN_HANDLERS(SHR, op_shr)
  1669. void SoftCPU::SIDT(const X86::Instruction&) { TODO(); }
  1670. void SoftCPU::SLDT_RM16(const X86::Instruction&) { TODO(); }
  1671. void SoftCPU::SMSW_RM16(const X86::Instruction&) { TODO(); }
  1672. void SoftCPU::STC(const X86::Instruction&)
  1673. {
  1674. set_cf(true);
  1675. }
  1676. void SoftCPU::STD(const X86::Instruction&)
  1677. {
  1678. set_df(true);
  1679. }
  1680. void SoftCPU::STI(const X86::Instruction&) { TODO(); }
  1681. void SoftCPU::STOSB(const X86::Instruction& insn)
  1682. {
  1683. if (insn.has_address_size_override_prefix()) {
  1684. do_once_or_repeat<false>(insn, [&] {
  1685. write_memory8({ es(), di() }, al());
  1686. set_di(di() + (df() ? -1 : 1));
  1687. });
  1688. } else {
  1689. do_once_or_repeat<false>(insn, [&] {
  1690. write_memory8({ es(), edi() }, al());
  1691. set_edi(edi() + (df() ? -1 : 1));
  1692. });
  1693. }
  1694. }
  1695. void SoftCPU::STOSD(const X86::Instruction& insn)
  1696. {
  1697. if (insn.has_address_size_override_prefix()) {
  1698. do_once_or_repeat<false>(insn, [&] {
  1699. write_memory32({ es(), di() }, eax());
  1700. set_di(di() + (df() ? -4 : 4));
  1701. });
  1702. } else {
  1703. do_once_or_repeat<false>(insn, [&] {
  1704. write_memory32({ es(), edi() }, eax());
  1705. set_edi(edi() + (df() ? -4 : 4));
  1706. });
  1707. }
  1708. }
  1709. void SoftCPU::STOSW(const X86::Instruction& insn)
  1710. {
  1711. if (insn.has_address_size_override_prefix()) {
  1712. do_once_or_repeat<false>(insn, [&] {
  1713. write_memory16({ es(), di() }, ax());
  1714. set_di(di() + (df() ? -2 : 2));
  1715. });
  1716. } else {
  1717. do_once_or_repeat<false>(insn, [&] {
  1718. write_memory16({ es(), edi() }, ax());
  1719. set_edi(edi() + (df() ? -2 : 2));
  1720. });
  1721. }
  1722. }
  1723. void SoftCPU::STR_RM16(const X86::Instruction&) { TODO(); }
  1724. void SoftCPU::UD0(const X86::Instruction&) { TODO(); }
  1725. void SoftCPU::UD1(const X86::Instruction&) { TODO(); }
  1726. void SoftCPU::UD2(const X86::Instruction&) { TODO(); }
  1727. void SoftCPU::VERR_RM16(const X86::Instruction&) { TODO(); }
  1728. void SoftCPU::VERW_RM16(const X86::Instruction&) { TODO(); }
  1729. void SoftCPU::WAIT(const X86::Instruction&) { TODO(); }
  1730. void SoftCPU::WBINVD(const X86::Instruction&) { TODO(); }
  1731. void SoftCPU::XADD_RM16_reg16(const X86::Instruction& insn)
  1732. {
  1733. auto dest = insn.modrm().read16(*this, insn);
  1734. auto src = gpr16(insn.reg16());
  1735. auto result = op_add(*this, dest, src);
  1736. gpr16(insn.reg16()) = dest;
  1737. insn.modrm().write16(*this, insn, result);
  1738. }
  1739. void SoftCPU::XADD_RM32_reg32(const X86::Instruction& insn)
  1740. {
  1741. auto dest = insn.modrm().read32(*this, insn);
  1742. auto src = gpr32(insn.reg32());
  1743. auto result = op_add(*this, dest, src);
  1744. gpr32(insn.reg32()) = dest;
  1745. insn.modrm().write32(*this, insn, result);
  1746. }
  1747. void SoftCPU::XADD_RM8_reg8(const X86::Instruction& insn)
  1748. {
  1749. auto dest = insn.modrm().read8(*this, insn);
  1750. auto src = gpr8(insn.reg8());
  1751. auto result = op_add(*this, dest, src);
  1752. gpr8(insn.reg8()) = dest;
  1753. insn.modrm().write8(*this, insn, result);
  1754. }
  1755. void SoftCPU::XCHG_AX_reg16(const X86::Instruction& insn)
  1756. {
  1757. auto temp = gpr16(insn.reg16());
  1758. gpr16(insn.reg16()) = eax();
  1759. set_eax(temp);
  1760. }
  1761. void SoftCPU::XCHG_EAX_reg32(const X86::Instruction& insn)
  1762. {
  1763. auto temp = gpr32(insn.reg32());
  1764. gpr32(insn.reg32()) = eax();
  1765. set_eax(temp);
  1766. }
  1767. void SoftCPU::XCHG_reg16_RM16(const X86::Instruction& insn)
  1768. {
  1769. auto temp = insn.modrm().read16(*this, insn);
  1770. insn.modrm().write16(*this, insn, gpr16(insn.reg16()));
  1771. gpr16(insn.reg16()) = temp;
  1772. }
  1773. void SoftCPU::XCHG_reg32_RM32(const X86::Instruction& insn)
  1774. {
  1775. auto temp = insn.modrm().read32(*this, insn);
  1776. insn.modrm().write32(*this, insn, gpr32(insn.reg32()));
  1777. gpr32(insn.reg32()) = temp;
  1778. }
  1779. void SoftCPU::XCHG_reg8_RM8(const X86::Instruction& insn)
  1780. {
  1781. auto temp = insn.modrm().read8(*this, insn);
  1782. insn.modrm().write8(*this, insn, gpr8(insn.reg8()));
  1783. gpr8(insn.reg8()) = temp;
  1784. }
  1785. void SoftCPU::XLAT(const X86::Instruction&) { TODO(); }
  1786. #define DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(mnemonic, op, update_dest) \
  1787. void SoftCPU::mnemonic##_AL_imm8(const X86::Instruction& insn) { generic_AL_imm8<update_dest>(op<u8>, insn); } \
  1788. void SoftCPU::mnemonic##_AX_imm16(const X86::Instruction& insn) { generic_AX_imm16<update_dest>(op<u16>, insn); } \
  1789. void SoftCPU::mnemonic##_EAX_imm32(const X86::Instruction& insn) { generic_EAX_imm32<update_dest>(op<u32>, insn); } \
  1790. void SoftCPU::mnemonic##_RM16_imm16(const X86::Instruction& insn) { generic_RM16_imm16<update_dest>(op<u16>, insn); } \
  1791. void SoftCPU::mnemonic##_RM16_reg16(const X86::Instruction& insn) { generic_RM16_reg16<update_dest>(op<u16>, insn); } \
  1792. void SoftCPU::mnemonic##_RM32_imm32(const X86::Instruction& insn) { generic_RM32_imm32<update_dest>(op<u32>, insn); } \
  1793. void SoftCPU::mnemonic##_RM32_reg32(const X86::Instruction& insn) { generic_RM32_reg32<update_dest>(op<u32>, insn); } \
  1794. void SoftCPU::mnemonic##_RM8_imm8(const X86::Instruction& insn) { generic_RM8_imm8<update_dest>(op<u8>, insn); } \
  1795. void SoftCPU::mnemonic##_RM8_reg8(const X86::Instruction& insn) { generic_RM8_reg8<update_dest>(op<u8>, insn); }
  1796. #define DEFINE_GENERIC_INSN_HANDLERS(mnemonic, op, update_dest) \
  1797. DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(mnemonic, op, update_dest) \
  1798. void SoftCPU::mnemonic##_RM16_imm8(const X86::Instruction& insn) { generic_RM16_imm8<update_dest>(op<u16>, insn); } \
  1799. void SoftCPU::mnemonic##_RM32_imm8(const X86::Instruction& insn) { generic_RM32_imm8<update_dest>(op<u32>, insn); } \
  1800. void SoftCPU::mnemonic##_reg16_RM16(const X86::Instruction& insn) { generic_reg16_RM16<update_dest>(op<u16>, insn); } \
  1801. void SoftCPU::mnemonic##_reg32_RM32(const X86::Instruction& insn) { generic_reg32_RM32<update_dest>(op<u32>, insn); } \
  1802. void SoftCPU::mnemonic##_reg8_RM8(const X86::Instruction& insn) { generic_reg8_RM8<update_dest>(op<u8>, insn); }
  1803. DEFINE_GENERIC_INSN_HANDLERS(XOR, op_xor, true)
  1804. DEFINE_GENERIC_INSN_HANDLERS(OR, op_or, true)
  1805. DEFINE_GENERIC_INSN_HANDLERS(ADD, op_add, true)
  1806. DEFINE_GENERIC_INSN_HANDLERS(ADC, op_adc, true)
  1807. DEFINE_GENERIC_INSN_HANDLERS(SUB, op_sub, true)
  1808. DEFINE_GENERIC_INSN_HANDLERS(SBB, op_sbb, true)
  1809. DEFINE_GENERIC_INSN_HANDLERS(AND, op_and, true)
  1810. DEFINE_GENERIC_INSN_HANDLERS(CMP, op_sub, false)
  1811. DEFINE_GENERIC_INSN_HANDLERS_PARTIAL(TEST, op_and, false)
  1812. void SoftCPU::MOVQ_mm1_mm2m64(const X86::Instruction&) { TODO(); }
  1813. void SoftCPU::EMMS(const X86::Instruction&) { TODO(); }
  1814. void SoftCPU::MOVQ_mm1_m64_mm2(const X86::Instruction&) { TODO(); }
  1815. void SoftCPU::wrap_0xC0(const X86::Instruction&) { TODO(); }
  1816. void SoftCPU::wrap_0xC1_16(const X86::Instruction&) { TODO(); }
  1817. void SoftCPU::wrap_0xC1_32(const X86::Instruction&) { TODO(); }
  1818. void SoftCPU::wrap_0xD0(const X86::Instruction&) { TODO(); }
  1819. void SoftCPU::wrap_0xD1_16(const X86::Instruction&) { TODO(); }
  1820. void SoftCPU::wrap_0xD1_32(const X86::Instruction&) { TODO(); }
  1821. void SoftCPU::wrap_0xD2(const X86::Instruction&) { TODO(); }
  1822. void SoftCPU::wrap_0xD3_16(const X86::Instruction&) { TODO(); }
  1823. void SoftCPU::wrap_0xD3_32(const X86::Instruction&) { TODO(); }
  1824. }