Parser.cpp 65 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751
  1. /*
  2. * Copyright (c) 2021, Hunter Salyer <thefalsehonesty@gmail.com>
  3. * Copyright (c) 2022, Gregory Bertilson <zaggy1024@gmail.com>
  4. *
  5. * SPDX-License-Identifier: BSD-2-Clause
  6. */
  7. #include <AK/String.h>
  8. #include <LibGfx/Point.h>
  9. #include <LibGfx/Size.h>
  10. #include "Decoder.h"
  11. #include "Parser.h"
  12. #include "Utilities.h"
  13. namespace Video::VP9 {
  14. #define TRY_READ(expression) DECODER_TRY(DecoderErrorCategory::Corrupted, expression)
  15. Parser::Parser(Decoder& decoder)
  16. : m_probability_tables(make<ProbabilityTables>())
  17. , m_tree_parser(make<TreeParser>(*this))
  18. , m_decoder(decoder)
  19. {
  20. }
  21. Parser::~Parser()
  22. {
  23. }
  24. Vector<size_t> Parser::parse_superframe_sizes(ReadonlyBytes frame_data)
  25. {
  26. if (frame_data.size() < 1)
  27. return {};
  28. // The decoder determines the presence of a superframe by:
  29. // 1. parsing the final byte of the chunk and checking that the superframe_marker equals 0b110,
  30. // If the checks in steps 1 and 3 both pass, then the chunk is determined to contain a superframe and each
  31. // frame in the superframe is passed to the decoding process in turn.
  32. // Otherwise, the chunk is determined to not contain a superframe, and the whole chunk is passed to the
  33. // decoding process.
  34. // NOTE: Reading from span data will be quicker than spinning up a BitStream.
  35. u8 superframe_byte = frame_data[frame_data.size() - 1];
  36. // NOTE: We have to read out of the byte from the little end first, hence the padding bits in the masks below.
  37. u8 superframe_marker = superframe_byte & 0b1110'0000;
  38. if (superframe_marker == 0b1100'0000) {
  39. u8 bytes_per_framesize = ((superframe_byte >> 3) & 0b11) + 1;
  40. u8 frames_in_superframe = (superframe_byte & 0b111) + 1;
  41. // 2. setting the total size of the superframe_index SzIndex equal to 2 + NumFrames * SzBytes,
  42. size_t index_size = 2 + bytes_per_framesize * frames_in_superframe;
  43. if (index_size > frame_data.size())
  44. return {};
  45. auto superframe_header_data = frame_data.data() + frame_data.size() - index_size;
  46. u8 start_superframe_byte = *(superframe_header_data++);
  47. // 3. checking that the first byte of the superframe_index matches the final byte.
  48. if (superframe_byte != start_superframe_byte)
  49. return {};
  50. Vector<size_t> result;
  51. for (u8 i = 0; i < frames_in_superframe; i++) {
  52. size_t frame_size = 0;
  53. for (u8 j = 0; j < bytes_per_framesize; j++)
  54. frame_size |= (static_cast<size_t>(*(superframe_header_data++)) << (j * 8));
  55. result.append(frame_size);
  56. }
  57. return result;
  58. }
  59. return {};
  60. }
  61. /* (6.1) */
  62. DecoderErrorOr<void> Parser::parse_frame(ReadonlyBytes frame_data)
  63. {
  64. m_bit_stream = make<BitStream>(frame_data.data(), frame_data.size());
  65. m_syntax_element_counter = make<SyntaxElementCounter>();
  66. TRY(uncompressed_header());
  67. if (!trailing_bits())
  68. return DecoderError::corrupted("Trailing bits were non-zero"sv);
  69. if (m_header_size_in_bytes == 0)
  70. return DecoderError::corrupted("Frame header is zero-sized"sv);
  71. m_probability_tables->load_probs(m_frame_context_idx);
  72. m_probability_tables->load_probs2(m_frame_context_idx);
  73. m_syntax_element_counter->clear_counts();
  74. TRY_READ(m_bit_stream->init_bool(m_header_size_in_bytes));
  75. TRY(compressed_header());
  76. TRY_READ(m_bit_stream->exit_bool());
  77. TRY(m_decoder.allocate_buffers());
  78. TRY(decode_tiles());
  79. TRY(refresh_probs());
  80. return {};
  81. }
  82. bool Parser::trailing_bits()
  83. {
  84. while (m_bit_stream->bits_remaining() & 7u) {
  85. if (MUST(m_bit_stream->read_bit()))
  86. return false;
  87. }
  88. return true;
  89. }
  90. DecoderErrorOr<void> Parser::refresh_probs()
  91. {
  92. if (!m_error_resilient_mode && !m_frame_parallel_decoding_mode) {
  93. m_probability_tables->load_probs(m_frame_context_idx);
  94. TRY(m_decoder.adapt_coef_probs());
  95. if (!m_frame_is_intra) {
  96. m_probability_tables->load_probs2(m_frame_context_idx);
  97. TRY(m_decoder.adapt_non_coef_probs());
  98. }
  99. }
  100. if (m_refresh_frame_context)
  101. m_probability_tables->save_probs(m_frame_context_idx);
  102. return {};
  103. }
  104. DecoderErrorOr<FrameType> Parser::read_frame_type()
  105. {
  106. if (TRY_READ(m_bit_stream->read_bit()))
  107. return NonKeyFrame;
  108. return KeyFrame;
  109. }
  110. DecoderErrorOr<ColorRange> Parser::read_color_range()
  111. {
  112. if (TRY_READ(m_bit_stream->read_bit()))
  113. return ColorRange::Full;
  114. return ColorRange::Studio;
  115. }
  116. /* (6.2) */
  117. DecoderErrorOr<void> Parser::uncompressed_header()
  118. {
  119. auto frame_marker = TRY_READ(m_bit_stream->read_bits(2));
  120. if (frame_marker != 2)
  121. return DecoderError::corrupted("uncompressed_header: Frame marker must be 2"sv);
  122. auto profile_low_bit = TRY_READ(m_bit_stream->read_bit());
  123. auto profile_high_bit = TRY_READ(m_bit_stream->read_bit());
  124. m_profile = (profile_high_bit << 1u) + profile_low_bit;
  125. if (m_profile == 3 && TRY_READ(m_bit_stream->read_bit()))
  126. return DecoderError::corrupted("uncompressed_header: Profile 3 reserved bit was non-zero"sv);
  127. m_show_existing_frame = TRY_READ(m_bit_stream->read_bit());
  128. if (m_show_existing_frame) {
  129. m_frame_to_show_map_index = TRY_READ(m_bit_stream->read_bits(3));
  130. m_header_size_in_bytes = 0;
  131. m_refresh_frame_flags = 0;
  132. m_loop_filter_level = 0;
  133. return {};
  134. }
  135. m_last_frame_type = m_frame_type;
  136. m_frame_type = TRY(read_frame_type());
  137. m_show_frame = TRY_READ(m_bit_stream->read_bit());
  138. m_error_resilient_mode = TRY_READ(m_bit_stream->read_bit());
  139. if (m_frame_type == KeyFrame) {
  140. TRY(frame_sync_code());
  141. TRY(color_config());
  142. m_frame_size = TRY(frame_size());
  143. m_render_size = TRY(render_size(m_frame_size));
  144. m_refresh_frame_flags = 0xFF;
  145. m_frame_is_intra = true;
  146. } else {
  147. m_frame_is_intra = !m_show_frame && TRY_READ(m_bit_stream->read_bit());
  148. if (!m_error_resilient_mode) {
  149. m_reset_frame_context = TRY_READ(m_bit_stream->read_bits(2));
  150. } else {
  151. m_reset_frame_context = 0;
  152. }
  153. if (m_frame_is_intra) {
  154. TRY(frame_sync_code());
  155. if (m_profile > 0) {
  156. TRY(color_config());
  157. } else {
  158. m_color_space = Bt601;
  159. m_subsampling_x = true;
  160. m_subsampling_y = true;
  161. m_bit_depth = 8;
  162. }
  163. m_refresh_frame_flags = TRY_READ(m_bit_stream->read_f8());
  164. m_frame_size = TRY(frame_size());
  165. m_render_size = TRY(render_size(m_frame_size));
  166. } else {
  167. m_refresh_frame_flags = TRY_READ(m_bit_stream->read_f8());
  168. for (auto i = 0; i < 3; i++) {
  169. m_ref_frame_idx[i] = TRY_READ(m_bit_stream->read_bits(3));
  170. m_ref_frame_sign_bias[LastFrame + i] = TRY_READ(m_bit_stream->read_bit());
  171. }
  172. m_frame_size = TRY(frame_size_with_refs());
  173. m_render_size = TRY(render_size(m_frame_size));
  174. m_allow_high_precision_mv = TRY_READ(m_bit_stream->read_bit());
  175. TRY(read_interpolation_filter());
  176. }
  177. }
  178. compute_image_size();
  179. if (!m_error_resilient_mode) {
  180. m_refresh_frame_context = TRY_READ(m_bit_stream->read_bit());
  181. m_frame_parallel_decoding_mode = TRY_READ(m_bit_stream->read_bit());
  182. } else {
  183. m_refresh_frame_context = false;
  184. m_frame_parallel_decoding_mode = true;
  185. }
  186. m_frame_context_idx = TRY_READ(m_bit_stream->read_bits(2));
  187. if (m_frame_is_intra || m_error_resilient_mode) {
  188. setup_past_independence();
  189. if (m_frame_type == KeyFrame || m_error_resilient_mode || m_reset_frame_context == 3) {
  190. for (auto i = 0; i < 4; i++) {
  191. m_probability_tables->save_probs(i);
  192. }
  193. } else if (m_reset_frame_context == 2) {
  194. m_probability_tables->save_probs(m_frame_context_idx);
  195. }
  196. m_frame_context_idx = 0;
  197. }
  198. TRY(loop_filter_params());
  199. TRY(quantization_params());
  200. TRY(segmentation_params());
  201. TRY(tile_info());
  202. m_header_size_in_bytes = TRY_READ(m_bit_stream->read_f16());
  203. return {};
  204. }
  205. DecoderErrorOr<void> Parser::frame_sync_code()
  206. {
  207. if (TRY_READ(m_bit_stream->read_f8()) != 0x49)
  208. return DecoderError::corrupted("frame_sync_code: Byte 0 was not 0x49."sv);
  209. if (TRY_READ(m_bit_stream->read_f8()) != 0x83)
  210. return DecoderError::corrupted("frame_sync_code: Byte 1 was not 0x83."sv);
  211. if (TRY_READ(m_bit_stream->read_f8()) != 0x42)
  212. return DecoderError::corrupted("frame_sync_code: Byte 2 was not 0x42."sv);
  213. return {};
  214. }
  215. DecoderErrorOr<void> Parser::color_config()
  216. {
  217. if (m_profile >= 2) {
  218. m_bit_depth = TRY_READ(m_bit_stream->read_bit()) ? 12 : 10;
  219. } else {
  220. m_bit_depth = 8;
  221. }
  222. auto color_space = TRY_READ(m_bit_stream->read_bits(3));
  223. VERIFY(color_space <= RGB);
  224. m_color_space = static_cast<ColorSpace>(color_space);
  225. if (color_space != RGB) {
  226. m_color_range = TRY(read_color_range());
  227. if (m_profile == 1 || m_profile == 3) {
  228. m_subsampling_x = TRY_READ(m_bit_stream->read_bit());
  229. m_subsampling_y = TRY_READ(m_bit_stream->read_bit());
  230. if (TRY_READ(m_bit_stream->read_bit()))
  231. return DecoderError::corrupted("color_config: Subsampling reserved zero was set"sv);
  232. } else {
  233. m_subsampling_x = true;
  234. m_subsampling_y = true;
  235. }
  236. } else {
  237. m_color_range = ColorRange::Full;
  238. if (m_profile == 1 || m_profile == 3) {
  239. m_subsampling_x = false;
  240. m_subsampling_y = false;
  241. if (TRY_READ(m_bit_stream->read_bit()))
  242. return DecoderError::corrupted("color_config: RGB reserved zero was set"sv);
  243. }
  244. }
  245. return {};
  246. }
  247. DecoderErrorOr<Gfx::Size<u32>> Parser::frame_size()
  248. {
  249. return Gfx::Size<u32> { TRY_READ(m_bit_stream->read_f16()) + 1, TRY_READ(m_bit_stream->read_f16()) + 1 };
  250. }
  251. DecoderErrorOr<Gfx::Size<u32>> Parser::render_size(Gfx::Size<u32> frame_size)
  252. {
  253. if (!TRY_READ(m_bit_stream->read_bit()))
  254. return frame_size;
  255. return Gfx::Size<u32> { TRY_READ(m_bit_stream->read_f16()) + 1, TRY_READ(m_bit_stream->read_f16()) + 1 };
  256. }
  257. DecoderErrorOr<Gfx::Size<u32>> Parser::frame_size_with_refs()
  258. {
  259. Optional<Gfx::Size<u32>> size;
  260. for (auto frame_index : m_ref_frame_idx) {
  261. if (TRY_READ(m_bit_stream->read_bit())) {
  262. size.emplace(m_ref_frame_size[frame_index]);
  263. break;
  264. }
  265. }
  266. if (size.has_value())
  267. return size.value();
  268. return TRY(frame_size());
  269. }
  270. void Parser::compute_image_size()
  271. {
  272. auto new_cols = (m_frame_size.width() + 7u) >> 3u;
  273. auto new_rows = (m_frame_size.height() + 7u) >> 3u;
  274. // 7.2.6 Compute image size semantics
  275. // When compute_image_size is invoked, the following ordered steps occur:
  276. // 1. If this is the first time compute_image_size is invoked, or if either FrameWidth or FrameHeight have
  277. // changed in value compared to the previous time this function was invoked, then the segmentation map is
  278. // cleared to all zeros by setting SegmentId[ row ][ col ] equal to 0 for row = 0..MiRows-1 and col =
  279. // 0..MiCols-1.
  280. bool first_invoke = !m_mi_cols && !m_mi_rows;
  281. bool same_size = m_mi_cols == new_cols && m_mi_rows == new_rows;
  282. if (first_invoke || !same_size) {
  283. // m_segment_ids will be resized from decode_tiles() later.
  284. m_segment_ids.clear_with_capacity();
  285. }
  286. // 2. The variable UsePrevFrameMvs is set equal to 1 if all of the following conditions are true:
  287. // a. This is not the first time compute_image_size is invoked.
  288. // b. Both FrameWidth and FrameHeight have the same value compared to the previous time this function
  289. // was invoked.
  290. // c. show_frame was equal to 1 the previous time this function was invoked.
  291. // d. error_resilient_mode is equal to 0.
  292. // e. FrameIsIntra is equal to 0.
  293. // Otherwise, UsePrevFrameMvs is set equal to 0.
  294. m_use_prev_frame_mvs = !first_invoke && same_size && m_prev_show_frame && !m_error_resilient_mode && !m_frame_is_intra;
  295. m_prev_show_frame = m_show_frame;
  296. m_mi_cols = new_cols;
  297. m_mi_rows = new_rows;
  298. m_sb64_cols = (m_mi_cols + 7u) >> 3u;
  299. m_sb64_rows = (m_mi_rows + 7u) >> 3u;
  300. }
  301. DecoderErrorOr<void> Parser::read_interpolation_filter()
  302. {
  303. if (TRY_READ(m_bit_stream->read_bit())) {
  304. m_interpolation_filter = Switchable;
  305. } else {
  306. m_interpolation_filter = literal_to_type[TRY_READ(m_bit_stream->read_bits(2))];
  307. }
  308. return {};
  309. }
  310. DecoderErrorOr<void> Parser::loop_filter_params()
  311. {
  312. m_loop_filter_level = TRY_READ(m_bit_stream->read_bits(6));
  313. m_loop_filter_sharpness = TRY_READ(m_bit_stream->read_bits(3));
  314. m_loop_filter_delta_enabled = TRY_READ(m_bit_stream->read_bit());
  315. if (m_loop_filter_delta_enabled) {
  316. if (TRY_READ(m_bit_stream->read_bit())) {
  317. for (auto& loop_filter_ref_delta : m_loop_filter_ref_deltas) {
  318. if (TRY_READ(m_bit_stream->read_bit()))
  319. loop_filter_ref_delta = TRY_READ(m_bit_stream->read_s(6));
  320. }
  321. for (auto& loop_filter_mode_delta : m_loop_filter_mode_deltas) {
  322. if (TRY_READ(m_bit_stream->read_bit()))
  323. loop_filter_mode_delta = TRY_READ(m_bit_stream->read_s(6));
  324. }
  325. }
  326. }
  327. return {};
  328. }
  329. DecoderErrorOr<void> Parser::quantization_params()
  330. {
  331. m_base_q_idx = TRY_READ(m_bit_stream->read_f8());
  332. m_delta_q_y_dc = TRY(read_delta_q());
  333. m_delta_q_uv_dc = TRY(read_delta_q());
  334. m_delta_q_uv_ac = TRY(read_delta_q());
  335. m_lossless = m_base_q_idx == 0 && m_delta_q_y_dc == 0 && m_delta_q_uv_dc == 0 && m_delta_q_uv_ac == 0;
  336. return {};
  337. }
  338. DecoderErrorOr<i8> Parser::read_delta_q()
  339. {
  340. if (TRY_READ(m_bit_stream->read_bit()))
  341. return TRY_READ(m_bit_stream->read_s(4));
  342. return 0;
  343. }
  344. DecoderErrorOr<void> Parser::segmentation_params()
  345. {
  346. m_segmentation_enabled = TRY_READ(m_bit_stream->read_bit());
  347. if (!m_segmentation_enabled)
  348. return {};
  349. m_segmentation_update_map = TRY_READ(m_bit_stream->read_bit());
  350. if (m_segmentation_update_map) {
  351. for (auto& segmentation_tree_prob : m_segmentation_tree_probs)
  352. segmentation_tree_prob = TRY(read_prob());
  353. m_segmentation_temporal_update = TRY_READ(m_bit_stream->read_bit());
  354. for (auto& segmentation_pred_prob : m_segmentation_pred_prob)
  355. segmentation_pred_prob = m_segmentation_temporal_update ? TRY(read_prob()) : 255;
  356. }
  357. auto segmentation_update_data = (TRY_READ(m_bit_stream->read_bit()));
  358. if (!segmentation_update_data)
  359. return {};
  360. m_segmentation_abs_or_delta_update = TRY_READ(m_bit_stream->read_bit());
  361. for (auto i = 0; i < MAX_SEGMENTS; i++) {
  362. for (auto j = 0; j < SEG_LVL_MAX; j++) {
  363. auto feature_value = 0;
  364. auto feature_enabled = TRY_READ(m_bit_stream->read_bit());
  365. m_feature_enabled[i][j] = feature_enabled;
  366. if (feature_enabled) {
  367. auto bits_to_read = segmentation_feature_bits[j];
  368. feature_value = TRY_READ(m_bit_stream->read_bits(bits_to_read));
  369. if (segmentation_feature_signed[j]) {
  370. if (TRY_READ(m_bit_stream->read_bit()))
  371. feature_value = -feature_value;
  372. }
  373. }
  374. m_feature_data[i][j] = feature_value;
  375. }
  376. }
  377. return {};
  378. }
  379. DecoderErrorOr<u8> Parser::read_prob()
  380. {
  381. if (TRY_READ(m_bit_stream->read_bit()))
  382. return TRY_READ(m_bit_stream->read_f8());
  383. return 255;
  384. }
  385. DecoderErrorOr<void> Parser::tile_info()
  386. {
  387. auto min_log2_tile_cols = calc_min_log2_tile_cols();
  388. auto max_log2_tile_cols = calc_max_log2_tile_cols();
  389. m_tile_cols_log2 = min_log2_tile_cols;
  390. while (m_tile_cols_log2 < max_log2_tile_cols) {
  391. if (TRY_READ(m_bit_stream->read_bit()))
  392. m_tile_cols_log2++;
  393. else
  394. break;
  395. }
  396. m_tile_rows_log2 = TRY_READ(m_bit_stream->read_bit());
  397. if (m_tile_rows_log2) {
  398. m_tile_rows_log2 += TRY_READ(m_bit_stream->read_bit());
  399. }
  400. return {};
  401. }
  402. u16 Parser::calc_min_log2_tile_cols()
  403. {
  404. auto min_log_2 = 0u;
  405. while ((u32)(MAX_TILE_WIDTH_B64 << min_log_2) < m_sb64_cols)
  406. min_log_2++;
  407. return min_log_2;
  408. }
  409. u16 Parser::calc_max_log2_tile_cols()
  410. {
  411. u16 max_log_2 = 1;
  412. while ((m_sb64_cols >> max_log_2) >= MIN_TILE_WIDTH_B64)
  413. max_log_2++;
  414. return max_log_2 - 1;
  415. }
  416. void Parser::setup_past_independence()
  417. {
  418. for (auto i = 0; i < 8; i++) {
  419. for (auto j = 0; j < 4; j++) {
  420. m_feature_data[i][j] = 0;
  421. m_feature_enabled[i][j] = false;
  422. }
  423. }
  424. m_segmentation_abs_or_delta_update = false;
  425. m_prev_segment_ids.clear_with_capacity();
  426. m_prev_segment_ids.resize_and_keep_capacity(m_mi_rows * m_mi_cols);
  427. m_loop_filter_delta_enabled = true;
  428. m_loop_filter_ref_deltas[IntraFrame] = 1;
  429. m_loop_filter_ref_deltas[LastFrame] = 0;
  430. m_loop_filter_ref_deltas[GoldenFrame] = -1;
  431. m_loop_filter_ref_deltas[AltRefFrame] = -1;
  432. for (auto& loop_filter_mode_delta : m_loop_filter_mode_deltas)
  433. loop_filter_mode_delta = 0;
  434. m_probability_tables->reset_probs();
  435. }
  436. DecoderErrorOr<void> Parser::compressed_header()
  437. {
  438. TRY(read_tx_mode());
  439. if (m_tx_mode == TXModeSelect)
  440. TRY(tx_mode_probs());
  441. TRY(read_coef_probs());
  442. TRY(read_skip_prob());
  443. if (!m_frame_is_intra) {
  444. TRY(read_inter_mode_probs());
  445. if (m_interpolation_filter == Switchable)
  446. TRY(read_interp_filter_probs());
  447. TRY(read_is_inter_probs());
  448. TRY(frame_reference_mode());
  449. TRY(frame_reference_mode_probs());
  450. TRY(read_y_mode_probs());
  451. TRY(read_partition_probs());
  452. TRY(mv_probs());
  453. }
  454. return {};
  455. }
  456. DecoderErrorOr<void> Parser::read_tx_mode()
  457. {
  458. if (m_lossless) {
  459. m_tx_mode = Only_4x4;
  460. } else {
  461. auto tx_mode = TRY_READ(m_bit_stream->read_literal(2));
  462. if (tx_mode == Allow_32x32)
  463. tx_mode += TRY_READ(m_bit_stream->read_literal(1));
  464. m_tx_mode = static_cast<TXMode>(tx_mode);
  465. }
  466. return {};
  467. }
  468. DecoderErrorOr<void> Parser::tx_mode_probs()
  469. {
  470. auto& tx_probs = m_probability_tables->tx_probs();
  471. for (auto i = 0; i < TX_SIZE_CONTEXTS; i++) {
  472. for (auto j = 0; j < TX_SIZES - 3; j++)
  473. tx_probs[TX_8x8][i][j] = TRY(diff_update_prob(tx_probs[TX_8x8][i][j]));
  474. }
  475. for (auto i = 0; i < TX_SIZE_CONTEXTS; i++) {
  476. for (auto j = 0; j < TX_SIZES - 2; j++)
  477. tx_probs[TX_16x16][i][j] = TRY(diff_update_prob(tx_probs[TX_16x16][i][j]));
  478. }
  479. for (auto i = 0; i < TX_SIZE_CONTEXTS; i++) {
  480. for (auto j = 0; j < TX_SIZES - 1; j++)
  481. tx_probs[TX_32x32][i][j] = TRY(diff_update_prob(tx_probs[TX_32x32][i][j]));
  482. }
  483. return {};
  484. }
  485. DecoderErrorOr<u8> Parser::diff_update_prob(u8 prob)
  486. {
  487. auto update_prob = TRY_READ(m_bit_stream->read_bool(252));
  488. if (update_prob) {
  489. auto delta_prob = TRY(decode_term_subexp());
  490. prob = inv_remap_prob(delta_prob, prob);
  491. }
  492. return prob;
  493. }
  494. DecoderErrorOr<u8> Parser::decode_term_subexp()
  495. {
  496. if (TRY_READ(m_bit_stream->read_literal(1)) == 0)
  497. return TRY_READ(m_bit_stream->read_literal(4));
  498. if (TRY_READ(m_bit_stream->read_literal(1)) == 0)
  499. return TRY_READ(m_bit_stream->read_literal(4)) + 16;
  500. if (TRY_READ(m_bit_stream->read_literal(1)) == 0)
  501. return TRY_READ(m_bit_stream->read_literal(5)) + 32;
  502. auto v = TRY_READ(m_bit_stream->read_literal(7));
  503. if (v < 65)
  504. return v + 64;
  505. return (v << 1u) - 1 + TRY_READ(m_bit_stream->read_literal(1));
  506. }
  507. u8 Parser::inv_remap_prob(u8 delta_prob, u8 prob)
  508. {
  509. u8 m = prob - 1;
  510. auto v = inv_map_table[delta_prob];
  511. if ((m << 1u) <= 255)
  512. return 1 + inv_recenter_nonneg(v, m);
  513. return 255 - inv_recenter_nonneg(v, 254 - m);
  514. }
  515. u8 Parser::inv_recenter_nonneg(u8 v, u8 m)
  516. {
  517. if (v > 2 * m)
  518. return v;
  519. if (v & 1u)
  520. return m - ((v + 1u) >> 1u);
  521. return m + (v >> 1u);
  522. }
  523. DecoderErrorOr<void> Parser::read_coef_probs()
  524. {
  525. m_max_tx_size = tx_mode_to_biggest_tx_size[m_tx_mode];
  526. for (u8 tx_size = 0; tx_size <= m_max_tx_size; tx_size++) {
  527. auto update_probs = TRY_READ(m_bit_stream->read_literal(1));
  528. if (update_probs == 1) {
  529. for (auto i = 0; i < 2; i++) {
  530. for (auto j = 0; j < 2; j++) {
  531. for (auto k = 0; k < 6; k++) {
  532. auto max_l = (k == 0) ? 3 : 6;
  533. for (auto l = 0; l < max_l; l++) {
  534. for (auto m = 0; m < 3; m++) {
  535. auto& prob = m_probability_tables->coef_probs()[tx_size][i][j][k][l][m];
  536. prob = TRY(diff_update_prob(prob));
  537. }
  538. }
  539. }
  540. }
  541. }
  542. }
  543. }
  544. return {};
  545. }
  546. DecoderErrorOr<void> Parser::read_skip_prob()
  547. {
  548. for (auto i = 0; i < SKIP_CONTEXTS; i++)
  549. m_probability_tables->skip_prob()[i] = TRY(diff_update_prob(m_probability_tables->skip_prob()[i]));
  550. return {};
  551. }
  552. DecoderErrorOr<void> Parser::read_inter_mode_probs()
  553. {
  554. for (auto i = 0; i < INTER_MODE_CONTEXTS; i++) {
  555. for (auto j = 0; j < INTER_MODES - 1; j++)
  556. m_probability_tables->inter_mode_probs()[i][j] = TRY(diff_update_prob(m_probability_tables->inter_mode_probs()[i][j]));
  557. }
  558. return {};
  559. }
  560. DecoderErrorOr<void> Parser::read_interp_filter_probs()
  561. {
  562. for (auto i = 0; i < INTERP_FILTER_CONTEXTS; i++) {
  563. for (auto j = 0; j < SWITCHABLE_FILTERS - 1; j++)
  564. m_probability_tables->interp_filter_probs()[i][j] = TRY(diff_update_prob(m_probability_tables->interp_filter_probs()[i][j]));
  565. }
  566. return {};
  567. }
  568. DecoderErrorOr<void> Parser::read_is_inter_probs()
  569. {
  570. for (auto i = 0; i < IS_INTER_CONTEXTS; i++)
  571. m_probability_tables->is_inter_prob()[i] = TRY(diff_update_prob(m_probability_tables->is_inter_prob()[i]));
  572. return {};
  573. }
  574. DecoderErrorOr<void> Parser::frame_reference_mode()
  575. {
  576. auto compound_reference_allowed = false;
  577. for (size_t i = 2; i <= REFS_PER_FRAME; i++) {
  578. if (m_ref_frame_sign_bias[i] != m_ref_frame_sign_bias[1])
  579. compound_reference_allowed = true;
  580. }
  581. if (compound_reference_allowed) {
  582. auto non_single_reference = TRY_READ(m_bit_stream->read_literal(1));
  583. if (non_single_reference == 0) {
  584. m_reference_mode = SingleReference;
  585. } else {
  586. auto reference_select = TRY_READ(m_bit_stream->read_literal(1));
  587. if (reference_select == 0)
  588. m_reference_mode = CompoundReference;
  589. else
  590. m_reference_mode = ReferenceModeSelect;
  591. setup_compound_reference_mode();
  592. }
  593. } else {
  594. m_reference_mode = SingleReference;
  595. }
  596. return {};
  597. }
  598. DecoderErrorOr<void> Parser::frame_reference_mode_probs()
  599. {
  600. if (m_reference_mode == ReferenceModeSelect) {
  601. for (auto i = 0; i < COMP_MODE_CONTEXTS; i++) {
  602. auto& comp_mode_prob = m_probability_tables->comp_mode_prob();
  603. comp_mode_prob[i] = TRY(diff_update_prob(comp_mode_prob[i]));
  604. }
  605. }
  606. if (m_reference_mode != CompoundReference) {
  607. for (auto i = 0; i < REF_CONTEXTS; i++) {
  608. auto& single_ref_prob = m_probability_tables->single_ref_prob();
  609. single_ref_prob[i][0] = TRY(diff_update_prob(single_ref_prob[i][0]));
  610. single_ref_prob[i][1] = TRY(diff_update_prob(single_ref_prob[i][1]));
  611. }
  612. }
  613. if (m_reference_mode != SingleReference) {
  614. for (auto i = 0; i < REF_CONTEXTS; i++) {
  615. auto& comp_ref_prob = m_probability_tables->comp_ref_prob();
  616. comp_ref_prob[i] = TRY(diff_update_prob(comp_ref_prob[i]));
  617. }
  618. }
  619. return {};
  620. }
  621. DecoderErrorOr<void> Parser::read_y_mode_probs()
  622. {
  623. for (auto i = 0; i < BLOCK_SIZE_GROUPS; i++) {
  624. for (auto j = 0; j < INTRA_MODES - 1; j++) {
  625. auto& y_mode_probs = m_probability_tables->y_mode_probs();
  626. y_mode_probs[i][j] = TRY(diff_update_prob(y_mode_probs[i][j]));
  627. }
  628. }
  629. return {};
  630. }
  631. DecoderErrorOr<void> Parser::read_partition_probs()
  632. {
  633. for (auto i = 0; i < PARTITION_CONTEXTS; i++) {
  634. for (auto j = 0; j < PARTITION_TYPES - 1; j++) {
  635. auto& partition_probs = m_probability_tables->partition_probs();
  636. partition_probs[i][j] = TRY(diff_update_prob(partition_probs[i][j]));
  637. }
  638. }
  639. return {};
  640. }
  641. DecoderErrorOr<void> Parser::mv_probs()
  642. {
  643. for (auto j = 0; j < MV_JOINTS - 1; j++) {
  644. auto& mv_joint_probs = m_probability_tables->mv_joint_probs();
  645. mv_joint_probs[j] = TRY(update_mv_prob(mv_joint_probs[j]));
  646. }
  647. for (auto i = 0; i < 2; i++) {
  648. auto& mv_sign_prob = m_probability_tables->mv_sign_prob();
  649. mv_sign_prob[i] = TRY(update_mv_prob(mv_sign_prob[i]));
  650. for (auto j = 0; j < MV_CLASSES - 1; j++) {
  651. auto& mv_class_probs = m_probability_tables->mv_class_probs();
  652. mv_class_probs[i][j] = TRY(update_mv_prob(mv_class_probs[i][j]));
  653. }
  654. auto& mv_class0_bit_prob = m_probability_tables->mv_class0_bit_prob();
  655. mv_class0_bit_prob[i] = TRY(update_mv_prob(mv_class0_bit_prob[i]));
  656. for (auto j = 0; j < MV_OFFSET_BITS; j++) {
  657. auto& mv_bits_prob = m_probability_tables->mv_bits_prob();
  658. mv_bits_prob[i][j] = TRY(update_mv_prob(mv_bits_prob[i][j]));
  659. }
  660. }
  661. for (auto i = 0; i < 2; i++) {
  662. for (auto j = 0; j < CLASS0_SIZE; j++) {
  663. for (auto k = 0; k < MV_FR_SIZE - 1; k++) {
  664. auto& mv_class0_fr_probs = m_probability_tables->mv_class0_fr_probs();
  665. mv_class0_fr_probs[i][j][k] = TRY(update_mv_prob(mv_class0_fr_probs[i][j][k]));
  666. }
  667. }
  668. for (auto k = 0; k < MV_FR_SIZE - 1; k++) {
  669. auto& mv_fr_probs = m_probability_tables->mv_fr_probs();
  670. mv_fr_probs[i][k] = TRY(update_mv_prob(mv_fr_probs[i][k]));
  671. }
  672. }
  673. if (m_allow_high_precision_mv) {
  674. for (auto i = 0; i < 2; i++) {
  675. auto& mv_class0_hp_prob = m_probability_tables->mv_class0_hp_prob();
  676. auto& mv_hp_prob = m_probability_tables->mv_hp_prob();
  677. mv_class0_hp_prob[i] = TRY(update_mv_prob(mv_class0_hp_prob[i]));
  678. mv_hp_prob[i] = TRY(update_mv_prob(mv_hp_prob[i]));
  679. }
  680. }
  681. return {};
  682. }
  683. DecoderErrorOr<u8> Parser::update_mv_prob(u8 prob)
  684. {
  685. if (TRY_READ(m_bit_stream->read_bool(252))) {
  686. return (TRY_READ(m_bit_stream->read_literal(7)) << 1u) | 1u;
  687. }
  688. return prob;
  689. }
  690. void Parser::setup_compound_reference_mode()
  691. {
  692. if (m_ref_frame_sign_bias[LastFrame] == m_ref_frame_sign_bias[GoldenFrame]) {
  693. m_comp_fixed_ref = AltRefFrame;
  694. m_comp_var_ref[0] = LastFrame;
  695. m_comp_var_ref[1] = GoldenFrame;
  696. } else if (m_ref_frame_sign_bias[LastFrame] == m_ref_frame_sign_bias[AltRefFrame]) {
  697. m_comp_fixed_ref = GoldenFrame;
  698. m_comp_var_ref[0] = LastFrame;
  699. m_comp_var_ref[1] = AltRefFrame;
  700. } else {
  701. m_comp_fixed_ref = LastFrame;
  702. m_comp_var_ref[0] = GoldenFrame;
  703. m_comp_var_ref[1] = AltRefFrame;
  704. }
  705. }
  706. void Parser::cleanup_tile_allocations()
  707. {
  708. // FIXME: Is this necessary? Data should be truncated and
  709. // overwritten by the next tile.
  710. m_skips.clear_with_capacity();
  711. m_tx_sizes.clear_with_capacity();
  712. m_mi_sizes.clear_with_capacity();
  713. m_y_modes.clear_with_capacity();
  714. m_segment_ids.clear_with_capacity();
  715. m_ref_frames.clear_with_capacity();
  716. m_interp_filters.clear_with_capacity();
  717. m_mvs.clear_with_capacity();
  718. m_sub_mvs.clear_with_capacity();
  719. m_sub_modes.clear_with_capacity();
  720. }
  721. DecoderErrorOr<void> Parser::allocate_tile_data()
  722. {
  723. auto dimensions = m_mi_rows * m_mi_cols;
  724. cleanup_tile_allocations();
  725. DECODER_TRY_ALLOC(m_skips.try_resize_and_keep_capacity(dimensions));
  726. DECODER_TRY_ALLOC(m_tx_sizes.try_resize_and_keep_capacity(dimensions));
  727. DECODER_TRY_ALLOC(m_mi_sizes.try_resize_and_keep_capacity(dimensions));
  728. DECODER_TRY_ALLOC(m_y_modes.try_resize_and_keep_capacity(dimensions));
  729. DECODER_TRY_ALLOC(m_segment_ids.try_resize_and_keep_capacity(dimensions));
  730. DECODER_TRY_ALLOC(m_ref_frames.try_resize_and_keep_capacity(dimensions));
  731. DECODER_TRY_ALLOC(m_interp_filters.try_resize_and_keep_capacity(dimensions));
  732. DECODER_TRY_ALLOC(m_mvs.try_resize_and_keep_capacity(dimensions));
  733. DECODER_TRY_ALLOC(m_sub_mvs.try_resize_and_keep_capacity(dimensions));
  734. DECODER_TRY_ALLOC(m_sub_modes.try_resize_and_keep_capacity(dimensions));
  735. return {};
  736. }
  737. DecoderErrorOr<void> Parser::decode_tiles()
  738. {
  739. auto tile_cols = 1 << m_tile_cols_log2;
  740. auto tile_rows = 1 << m_tile_rows_log2;
  741. TRY(allocate_tile_data());
  742. clear_above_context();
  743. for (auto tile_row = 0; tile_row < tile_rows; tile_row++) {
  744. for (auto tile_col = 0; tile_col < tile_cols; tile_col++) {
  745. auto last_tile = (tile_row == tile_rows - 1) && (tile_col == tile_cols - 1);
  746. u64 tile_size;
  747. if (last_tile)
  748. tile_size = m_bit_stream->bytes_remaining();
  749. else
  750. tile_size = TRY_READ(m_bit_stream->read_bits(32));
  751. m_mi_row_start = get_tile_offset(tile_row, m_mi_rows, m_tile_rows_log2);
  752. m_mi_row_end = get_tile_offset(tile_row + 1, m_mi_rows, m_tile_rows_log2);
  753. m_mi_col_start = get_tile_offset(tile_col, m_mi_cols, m_tile_cols_log2);
  754. m_mi_col_end = get_tile_offset(tile_col + 1, m_mi_cols, m_tile_cols_log2);
  755. TRY_READ(m_bit_stream->init_bool(tile_size));
  756. TRY(decode_tile());
  757. TRY_READ(m_bit_stream->exit_bool());
  758. }
  759. }
  760. return {};
  761. }
  762. template<typename T>
  763. void Parser::clear_context(Vector<T>& context, size_t size)
  764. {
  765. context.resize_and_keep_capacity(size);
  766. __builtin_memset(context.data(), 0, sizeof(T) * size);
  767. }
  768. template<typename T>
  769. void Parser::clear_context(Vector<Vector<T>>& context, size_t outer_size, size_t inner_size)
  770. {
  771. if (context.size() < outer_size)
  772. context.resize(outer_size);
  773. for (auto& sub_vector : context)
  774. clear_context(sub_vector, inner_size);
  775. }
  776. void Parser::clear_above_context()
  777. {
  778. for (auto i = 0u; i < m_above_nonzero_context.size(); i++)
  779. clear_context(m_above_nonzero_context[i], 2 * m_mi_cols);
  780. clear_context(m_above_seg_pred_context, m_mi_cols);
  781. clear_context(m_above_partition_context, m_sb64_cols * 8);
  782. }
  783. u32 Parser::get_tile_offset(u32 tile_num, u32 mis, u32 tile_size_log2)
  784. {
  785. u32 super_blocks = (mis + 7) >> 3u;
  786. u32 offset = ((tile_num * super_blocks) >> tile_size_log2) << 3u;
  787. return min(offset, mis);
  788. }
  789. DecoderErrorOr<void> Parser::decode_tile()
  790. {
  791. for (auto row = m_mi_row_start; row < m_mi_row_end; row += 8) {
  792. clear_left_context();
  793. for (auto col = m_mi_col_start; col < m_mi_col_end; col += 8) {
  794. TRY(decode_partition(row, col, Block_64x64));
  795. }
  796. }
  797. return {};
  798. }
  799. void Parser::clear_left_context()
  800. {
  801. for (auto i = 0u; i < m_left_nonzero_context.size(); i++)
  802. clear_context(m_left_nonzero_context[i], 2 * m_mi_rows);
  803. clear_context(m_left_seg_pred_context, m_mi_rows);
  804. clear_context(m_left_partition_context, m_sb64_rows * 8);
  805. }
  806. DecoderErrorOr<void> Parser::decode_partition(u32 row, u32 col, BlockSubsize block_subsize)
  807. {
  808. if (row >= m_mi_rows || col >= m_mi_cols)
  809. return {};
  810. m_block_subsize = block_subsize;
  811. m_num_8x8 = num_8x8_blocks_wide_lookup[block_subsize];
  812. auto half_block_8x8 = m_num_8x8 >> 1;
  813. m_has_rows = (row + half_block_8x8) < m_mi_rows;
  814. m_has_cols = (col + half_block_8x8) < m_mi_cols;
  815. m_row = row;
  816. m_col = col;
  817. auto partition = TRY_READ(TreeParser::parse_partition(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, m_has_rows, m_has_cols, m_block_subsize, m_num_8x8, m_above_partition_context, m_left_partition_context, row, col, m_frame_is_intra));
  818. auto subsize = subsize_lookup[partition][block_subsize];
  819. if (subsize < Block_8x8 || partition == PartitionNone) {
  820. TRY(decode_block(row, col, subsize));
  821. } else if (partition == PartitionHorizontal) {
  822. TRY(decode_block(row, col, subsize));
  823. if (m_has_rows)
  824. TRY(decode_block(row + half_block_8x8, col, subsize));
  825. } else if (partition == PartitionVertical) {
  826. TRY(decode_block(row, col, subsize));
  827. if (m_has_cols)
  828. TRY(decode_block(row, col + half_block_8x8, subsize));
  829. } else {
  830. TRY(decode_partition(row, col, subsize));
  831. TRY(decode_partition(row, col + half_block_8x8, subsize));
  832. TRY(decode_partition(row + half_block_8x8, col, subsize));
  833. TRY(decode_partition(row + half_block_8x8, col + half_block_8x8, subsize));
  834. }
  835. if (block_subsize == Block_8x8 || partition != PartitionSplit) {
  836. auto above_context = 15 >> b_width_log2_lookup[subsize];
  837. auto left_context = 15 >> b_height_log2_lookup[subsize];
  838. for (size_t i = 0; i < m_num_8x8; i++) {
  839. m_above_partition_context[col + i] = above_context;
  840. m_left_partition_context[row + i] = left_context;
  841. }
  842. }
  843. return {};
  844. }
  845. size_t Parser::get_image_index(u32 row, u32 column)
  846. {
  847. VERIFY(row < m_mi_rows && column < m_mi_cols);
  848. return row * m_mi_cols + column;
  849. }
  850. DecoderErrorOr<void> Parser::decode_block(u32 row, u32 col, BlockSubsize subsize)
  851. {
  852. m_mi_row = row;
  853. m_mi_col = col;
  854. m_mi_size = subsize;
  855. m_available_u = row > 0;
  856. m_available_l = col > m_mi_col_start;
  857. TRY(mode_info());
  858. m_eob_total = 0;
  859. TRY(residual());
  860. if (m_is_inter && subsize >= Block_8x8 && m_eob_total == 0)
  861. m_skip = true;
  862. // Spec doesn't specify whether it might index outside the frame here, but it seems that it can. Ensure that we don't
  863. // write out of bounds. This check seems consistent with libvpx.
  864. // See here:
  865. // https://github.com/webmproject/libvpx/blob/705bf9de8c96cfe5301451f1d7e5c90a41c64e5f/vp9/decoder/vp9_decodeframe.c#L917
  866. auto maximum_block_y = min<u32>(num_8x8_blocks_high_lookup[subsize], m_mi_rows - row);
  867. auto maximum_block_x = min<u32>(num_8x8_blocks_wide_lookup[subsize], m_mi_cols - col);
  868. for (size_t y = 0; y < maximum_block_y; y++) {
  869. for (size_t x = 0; x < maximum_block_x; x++) {
  870. auto pos = get_image_index(row + y, col + x);
  871. m_skips[pos] = m_skip;
  872. m_tx_sizes[pos] = m_tx_size;
  873. m_mi_sizes[pos] = m_mi_size;
  874. m_y_modes[pos] = m_y_mode;
  875. m_segment_ids[pos] = m_segment_id;
  876. for (size_t ref_list = 0; ref_list < 2; ref_list++)
  877. m_ref_frames[pos][ref_list] = m_ref_frame[ref_list];
  878. if (m_is_inter) {
  879. m_interp_filters[pos] = m_interp_filter;
  880. for (size_t ref_list = 0; ref_list < 2; ref_list++) {
  881. // FIXME: Can we just store all the sub_mvs and then look up
  882. // the main one by index 3?
  883. m_mvs[pos][ref_list] = m_block_mvs[ref_list][3];
  884. for (size_t b = 0; b < 4; b++)
  885. m_sub_mvs[pos][ref_list][b] = m_block_mvs[ref_list][b];
  886. }
  887. } else {
  888. for (size_t b = 0; b < 4; b++)
  889. m_sub_modes[pos][b] = static_cast<PredictionMode>(m_block_sub_modes[b]);
  890. }
  891. }
  892. }
  893. return {};
  894. }
  895. DecoderErrorOr<void> Parser::mode_info()
  896. {
  897. if (m_frame_is_intra)
  898. TRY(intra_frame_mode_info());
  899. else
  900. TRY(inter_frame_mode_info());
  901. return {};
  902. }
  903. DecoderErrorOr<void> Parser::intra_frame_mode_info()
  904. {
  905. TRY(intra_segment_id());
  906. TRY(read_skip());
  907. TRY(read_tx_size(true));
  908. m_ref_frame[0] = IntraFrame;
  909. m_ref_frame[1] = None;
  910. m_is_inter = false;
  911. // FIXME: This if statement is also present in parse_default_intra_mode. The selection of parameters for
  912. // the probability table lookup should be inlined here.
  913. if (m_mi_size >= Block_8x8) {
  914. // FIXME: This context should be available in the block setup. Make a struct to store the context
  915. // that is needed to call the tree parses and set it in decode_block().
  916. auto above_context = Optional<Array<PredictionMode, 4> const&>();
  917. auto left_context = Optional<Array<PredictionMode, 4> const&>();
  918. if (m_available_u)
  919. above_context = m_sub_modes[get_image_index(m_mi_row - 1, m_mi_col)];
  920. if (m_available_l)
  921. left_context = m_sub_modes[get_image_index(m_mi_row, m_mi_col - 1)];
  922. m_default_intra_mode = TRY_READ(TreeParser::parse_default_intra_mode(*m_bit_stream, *m_probability_tables, m_mi_size, above_context, left_context, m_block_sub_modes, 0, 0));
  923. m_y_mode = m_default_intra_mode;
  924. for (auto& block_sub_mode : m_block_sub_modes)
  925. block_sub_mode = m_y_mode;
  926. } else {
  927. m_num_4x4_w = num_4x4_blocks_wide_lookup[m_mi_size];
  928. m_num_4x4_h = num_4x4_blocks_high_lookup[m_mi_size];
  929. for (auto idy = 0; idy < 2; idy += m_num_4x4_h) {
  930. for (auto idx = 0; idx < 2; idx += m_num_4x4_w) {
  931. // FIXME: See the FIXME above.
  932. auto above_context = Optional<Array<PredictionMode, 4> const&>();
  933. auto left_context = Optional<Array<PredictionMode, 4> const&>();
  934. if (m_available_u)
  935. above_context = m_sub_modes[get_image_index(m_mi_row - 1, m_mi_col)];
  936. if (m_available_l)
  937. left_context = m_sub_modes[get_image_index(m_mi_row, m_mi_col - 1)];
  938. m_default_intra_mode = TRY_READ(TreeParser::parse_default_intra_mode(*m_bit_stream, *m_probability_tables, m_mi_size, above_context, left_context, m_block_sub_modes, idx, idy));
  939. for (auto y = 0; y < m_num_4x4_h; y++) {
  940. for (auto x = 0; x < m_num_4x4_w; x++) {
  941. auto index = (idy + y) * 2 + idx + x;
  942. m_block_sub_modes[index] = m_default_intra_mode;
  943. }
  944. }
  945. }
  946. }
  947. m_y_mode = m_default_intra_mode;
  948. }
  949. m_uv_mode = TRY_READ(TreeParser::parse_default_uv_mode(*m_bit_stream, *m_probability_tables, m_y_mode));
  950. return {};
  951. }
  952. DecoderErrorOr<void> Parser::intra_segment_id()
  953. {
  954. if (m_segmentation_enabled && m_segmentation_update_map)
  955. m_segment_id = TRY_READ(TreeParser::parse_segment_id(*m_bit_stream, m_segmentation_tree_probs));
  956. else
  957. m_segment_id = 0;
  958. return {};
  959. }
  960. DecoderErrorOr<void> Parser::read_skip()
  961. {
  962. if (seg_feature_active(SEG_LVL_SKIP)) {
  963. m_skip = true;
  964. } else {
  965. Optional<bool> above_skip = m_available_u ? m_skips[get_image_index(m_mi_row - 1, m_mi_col)] : Optional<bool>();
  966. Optional<bool> left_skip = m_available_l ? m_skips[get_image_index(m_mi_row, m_mi_col - 1)] : Optional<bool>();
  967. m_skip = TRY_READ(TreeParser::parse_skip(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, above_skip, left_skip));
  968. }
  969. return {};
  970. }
  971. bool Parser::seg_feature_active(u8 feature)
  972. {
  973. return m_segmentation_enabled && m_feature_enabled[m_segment_id][feature];
  974. }
  975. DecoderErrorOr<void> Parser::read_tx_size(bool allow_select)
  976. {
  977. m_max_tx_size = max_txsize_lookup[m_mi_size];
  978. if (allow_select && m_tx_mode == TXModeSelect && m_mi_size >= Block_8x8)
  979. m_tx_size = TRY_READ(m_tree_parser->parse_tree<TXSize>(SyntaxElementType::TXSize));
  980. else
  981. m_tx_size = min(m_max_tx_size, tx_mode_to_biggest_tx_size[m_tx_mode]);
  982. return {};
  983. }
  984. DecoderErrorOr<void> Parser::inter_frame_mode_info()
  985. {
  986. m_left_ref_frame[0] = m_available_l ? m_ref_frames[get_image_index(m_mi_row, m_mi_col - 1)][0] : IntraFrame;
  987. m_above_ref_frame[0] = m_available_u ? m_ref_frames[get_image_index(m_mi_row - 1, m_mi_col)][0] : IntraFrame;
  988. m_left_ref_frame[1] = m_available_l ? m_ref_frames[get_image_index(m_mi_row, m_mi_col - 1)][1] : None;
  989. m_above_ref_frame[1] = m_available_u ? m_ref_frames[get_image_index(m_mi_row - 1, m_mi_col)][1] : None;
  990. m_left_intra = m_left_ref_frame[0] <= IntraFrame;
  991. m_above_intra = m_above_ref_frame[0] <= IntraFrame;
  992. m_left_single = m_left_ref_frame[1] <= None;
  993. m_above_single = m_above_ref_frame[1] <= None;
  994. TRY(inter_segment_id());
  995. TRY(read_skip());
  996. TRY(read_is_inter());
  997. TRY(read_tx_size(!m_skip || !m_is_inter));
  998. if (m_is_inter) {
  999. TRY(inter_block_mode_info());
  1000. } else {
  1001. TRY(intra_block_mode_info());
  1002. }
  1003. return {};
  1004. }
  1005. DecoderErrorOr<void> Parser::inter_segment_id()
  1006. {
  1007. if (!m_segmentation_enabled) {
  1008. m_segment_id = 0;
  1009. return {};
  1010. }
  1011. auto predicted_segment_id = get_segment_id();
  1012. if (!m_segmentation_update_map) {
  1013. m_segment_id = predicted_segment_id;
  1014. return {};
  1015. }
  1016. if (!m_segmentation_temporal_update) {
  1017. m_segment_id = TRY_READ(TreeParser::parse_segment_id(*m_bit_stream, m_segmentation_tree_probs));
  1018. return {};
  1019. }
  1020. auto seg_id_predicted = TRY_READ(TreeParser::parse_segment_id_predicted(*m_bit_stream, m_segmentation_pred_prob, m_left_seg_pred_context[m_mi_row], m_above_seg_pred_context[m_mi_col]));
  1021. if (seg_id_predicted)
  1022. m_segment_id = predicted_segment_id;
  1023. else
  1024. m_segment_id = TRY_READ(TreeParser::parse_segment_id(*m_bit_stream, m_segmentation_tree_probs));
  1025. for (size_t i = 0; i < num_8x8_blocks_wide_lookup[m_mi_size]; i++) {
  1026. auto index = m_mi_col + i;
  1027. // (7.4.1) AboveSegPredContext[ i ] only needs to be set to 0 for i = 0..MiCols-1.
  1028. if (index < m_above_seg_pred_context.size())
  1029. m_above_seg_pred_context[index] = seg_id_predicted;
  1030. }
  1031. for (size_t i = 0; i < num_8x8_blocks_high_lookup[m_mi_size]; i++) {
  1032. auto index = m_mi_row + i;
  1033. // (7.4.1) LeftSegPredContext[ i ] only needs to be set to 0 for i = 0..MiRows-1.
  1034. if (index < m_above_seg_pred_context.size())
  1035. m_left_seg_pred_context[m_mi_row + i] = seg_id_predicted;
  1036. }
  1037. return {};
  1038. }
  1039. u8 Parser::get_segment_id()
  1040. {
  1041. auto bw = num_8x8_blocks_wide_lookup[m_mi_size];
  1042. auto bh = num_8x8_blocks_high_lookup[m_mi_size];
  1043. auto xmis = min(m_mi_cols - m_mi_col, (u32)bw);
  1044. auto ymis = min(m_mi_rows - m_mi_row, (u32)bh);
  1045. u8 segment = 7;
  1046. for (size_t y = 0; y < ymis; y++) {
  1047. for (size_t x = 0; x < xmis; x++) {
  1048. segment = min(segment, m_prev_segment_ids[(m_mi_row + y) + (m_mi_col + x)]);
  1049. }
  1050. }
  1051. return segment;
  1052. }
  1053. DecoderErrorOr<void> Parser::read_is_inter()
  1054. {
  1055. if (seg_feature_active(SEG_LVL_REF_FRAME))
  1056. m_is_inter = m_feature_data[m_segment_id][SEG_LVL_REF_FRAME] != IntraFrame;
  1057. else
  1058. m_is_inter = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::IsInter));
  1059. return {};
  1060. }
  1061. DecoderErrorOr<void> Parser::intra_block_mode_info()
  1062. {
  1063. m_ref_frame[0] = IntraFrame;
  1064. m_ref_frame[1] = None;
  1065. if (m_mi_size >= Block_8x8) {
  1066. m_y_mode = TRY_READ(TreeParser::parse_intra_mode(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, m_mi_size));
  1067. for (auto& block_sub_mode : m_block_sub_modes)
  1068. block_sub_mode = m_y_mode;
  1069. } else {
  1070. m_num_4x4_w = num_4x4_blocks_wide_lookup[m_mi_size];
  1071. m_num_4x4_h = num_4x4_blocks_high_lookup[m_mi_size];
  1072. PredictionMode sub_intra_mode;
  1073. for (auto idy = 0; idy < 2; idy += m_num_4x4_h) {
  1074. for (auto idx = 0; idx < 2; idx += m_num_4x4_w) {
  1075. sub_intra_mode = TRY_READ(TreeParser::parse_sub_intra_mode(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter));
  1076. for (auto y = 0; y < m_num_4x4_h; y++) {
  1077. for (auto x = 0; x < m_num_4x4_w; x++)
  1078. m_block_sub_modes[(idy + y) * 2 + idx + x] = sub_intra_mode;
  1079. }
  1080. }
  1081. }
  1082. m_y_mode = sub_intra_mode;
  1083. }
  1084. m_uv_mode = TRY_READ(TreeParser::parse_uv_mode(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, m_y_mode));
  1085. return {};
  1086. }
  1087. DecoderErrorOr<void> Parser::inter_block_mode_info()
  1088. {
  1089. TRY(read_ref_frames());
  1090. for (auto j = 0; j < 2; j++) {
  1091. if (m_ref_frame[j] > IntraFrame) {
  1092. find_mv_refs(m_ref_frame[j], -1);
  1093. find_best_ref_mvs(j);
  1094. }
  1095. }
  1096. auto is_compound = m_ref_frame[1] > IntraFrame;
  1097. if (seg_feature_active(SEG_LVL_SKIP)) {
  1098. m_y_mode = PredictionMode::ZeroMv;
  1099. } else if (m_mi_size >= Block_8x8) {
  1100. m_y_mode = TRY_READ(TreeParser::parse_inter_mode(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, m_mode_context[m_ref_frame[0]]));
  1101. }
  1102. if (m_interpolation_filter == Switchable) {
  1103. Optional<ReferenceFrameType> above_ref_frame = m_available_u ? m_ref_frames[get_image_index(m_mi_row - 1, m_mi_col)][0] : Optional<ReferenceFrameType>();
  1104. Optional<ReferenceFrameType> left_ref_frame = m_available_l ? m_ref_frames[get_image_index(m_mi_row, m_mi_col - 1)][0] : Optional<ReferenceFrameType>();
  1105. Optional<InterpolationFilter> above_interpolation_filter = m_available_u ? m_interp_filters[get_image_index(m_mi_row - 1, m_mi_col)] : Optional<InterpolationFilter>();
  1106. Optional<InterpolationFilter> left_interpolation_filter = m_available_l ? m_interp_filters[get_image_index(m_mi_row, m_mi_col - 1)] : Optional<InterpolationFilter>();
  1107. m_interp_filter = TRY_READ(TreeParser::parse_interpolation_filter(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, above_ref_frame, left_ref_frame, above_interpolation_filter, left_interpolation_filter));
  1108. } else {
  1109. m_interp_filter = m_interpolation_filter;
  1110. }
  1111. if (m_mi_size < Block_8x8) {
  1112. m_num_4x4_w = num_4x4_blocks_wide_lookup[m_mi_size];
  1113. m_num_4x4_h = num_4x4_blocks_high_lookup[m_mi_size];
  1114. for (auto idy = 0; idy < 2; idy += m_num_4x4_h) {
  1115. for (auto idx = 0; idx < 2; idx += m_num_4x4_w) {
  1116. m_y_mode = TRY_READ(TreeParser::parse_inter_mode(*m_bit_stream, *m_probability_tables, *m_syntax_element_counter, m_mode_context[m_ref_frame[0]]));
  1117. if (m_y_mode == PredictionMode::NearestMv || m_y_mode == PredictionMode::NearMv) {
  1118. for (auto j = 0; j < 1 + is_compound; j++)
  1119. append_sub8x8_mvs(idy * 2 + idx, j);
  1120. }
  1121. TRY(assign_mv(is_compound));
  1122. for (auto y = 0; y < m_num_4x4_h; y++) {
  1123. for (auto x = 0; x < m_num_4x4_w; x++) {
  1124. auto block = (idy + y) * 2 + idx + x;
  1125. for (auto ref_list = 0; ref_list < 1 + is_compound; ref_list++) {
  1126. m_block_mvs[ref_list][block] = m_mv[ref_list];
  1127. }
  1128. }
  1129. }
  1130. }
  1131. }
  1132. return {};
  1133. }
  1134. TRY(assign_mv(is_compound));
  1135. for (auto ref_list = 0; ref_list < 1 + is_compound; ref_list++) {
  1136. for (auto block = 0; block < 4; block++) {
  1137. m_block_mvs[ref_list][block] = m_mv[ref_list];
  1138. }
  1139. }
  1140. return {};
  1141. }
  1142. DecoderErrorOr<void> Parser::read_ref_frames()
  1143. {
  1144. if (seg_feature_active(SEG_LVL_REF_FRAME)) {
  1145. m_ref_frame[0] = static_cast<ReferenceFrameType>(m_feature_data[m_segment_id][SEG_LVL_REF_FRAME]);
  1146. m_ref_frame[1] = None;
  1147. return {};
  1148. }
  1149. ReferenceMode comp_mode;
  1150. if (m_reference_mode == ReferenceModeSelect)
  1151. comp_mode = TRY_READ(m_tree_parser->parse_tree<ReferenceMode>(SyntaxElementType::CompMode));
  1152. else
  1153. comp_mode = m_reference_mode;
  1154. if (comp_mode == CompoundReference) {
  1155. auto idx = m_ref_frame_sign_bias[m_comp_fixed_ref];
  1156. auto comp_ref = TRY_READ(m_tree_parser->parse_tree(SyntaxElementType::CompRef));
  1157. m_ref_frame[idx] = m_comp_fixed_ref;
  1158. m_ref_frame[!idx] = m_comp_var_ref[comp_ref];
  1159. return {};
  1160. }
  1161. auto single_ref_p1 = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::SingleRefP1));
  1162. if (single_ref_p1) {
  1163. auto single_ref_p2 = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::SingleRefP2));
  1164. m_ref_frame[0] = single_ref_p2 ? AltRefFrame : GoldenFrame;
  1165. } else {
  1166. m_ref_frame[0] = LastFrame;
  1167. }
  1168. m_ref_frame[1] = None;
  1169. return {};
  1170. }
  1171. DecoderErrorOr<void> Parser::assign_mv(bool is_compound)
  1172. {
  1173. m_mv[1] = {};
  1174. for (auto i = 0; i < 1 + is_compound; i++) {
  1175. if (m_y_mode == PredictionMode::NewMv) {
  1176. TRY(read_mv(i));
  1177. } else if (m_y_mode == PredictionMode::NearestMv) {
  1178. m_mv[i] = m_nearest_mv[i];
  1179. } else if (m_y_mode == PredictionMode::NearMv) {
  1180. m_mv[i] = m_near_mv[i];
  1181. } else {
  1182. m_mv[i] = {};
  1183. }
  1184. }
  1185. return {};
  1186. }
  1187. DecoderErrorOr<void> Parser::read_mv(u8 ref)
  1188. {
  1189. m_use_hp = m_allow_high_precision_mv && use_mv_hp(m_best_mv[ref]);
  1190. MotionVector diff_mv;
  1191. auto mv_joint = TRY_READ(m_tree_parser->parse_tree<MvJoint>(SyntaxElementType::MVJoint));
  1192. if (mv_joint == MvJointHzvnz || mv_joint == MvJointHnzvnz)
  1193. diff_mv.set_row(TRY(read_mv_component(0)));
  1194. if (mv_joint == MvJointHnzvz || mv_joint == MvJointHnzvnz)
  1195. diff_mv.set_column(TRY(read_mv_component(1)));
  1196. // FIXME: We probably don't need to assign MVs to a field, these can just
  1197. // be returned and assigned where they are requested.
  1198. m_mv[ref] = m_best_mv[ref] + diff_mv;
  1199. return {};
  1200. }
  1201. DecoderErrorOr<i32> Parser::read_mv_component(u8 component)
  1202. {
  1203. m_tree_parser->set_mv_component(component);
  1204. auto mv_sign = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::MVSign));
  1205. auto mv_class = TRY_READ(m_tree_parser->parse_tree<MvClass>(SyntaxElementType::MVClass));
  1206. u32 mag;
  1207. if (mv_class == MvClass0) {
  1208. u32 mv_class0_bit = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::MVClass0Bit));
  1209. u32 mv_class0_fr = TRY_READ(m_tree_parser->parse_mv_class0_fr(mv_class0_bit));
  1210. u32 mv_class0_hp = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::MVClass0HP));
  1211. mag = ((mv_class0_bit << 3) | (mv_class0_fr << 1) | mv_class0_hp) + 1;
  1212. } else {
  1213. u32 d = 0;
  1214. for (u8 i = 0; i < mv_class; i++) {
  1215. u32 mv_bit = TRY_READ(m_tree_parser->parse_mv_bit(i));
  1216. d |= mv_bit << i;
  1217. }
  1218. mag = CLASS0_SIZE << (mv_class + 2);
  1219. u32 mv_fr = TRY_READ(m_tree_parser->parse_tree<u8>(SyntaxElementType::MVFR));
  1220. u32 mv_hp = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::MVHP));
  1221. mag += ((d << 3) | (mv_fr << 1) | mv_hp) + 1;
  1222. }
  1223. return (mv_sign ? -1 : 1) * static_cast<i32>(mag);
  1224. }
  1225. Gfx::Point<size_t> Parser::get_decoded_point_for_plane(u32 column, u32 row, u8 plane)
  1226. {
  1227. if (plane == 0)
  1228. return { column * 8, row * 8 };
  1229. return { (column * 8) >> m_subsampling_x, (row * 8) >> m_subsampling_y };
  1230. }
  1231. Gfx::Size<size_t> Parser::get_decoded_size_for_plane(u8 plane)
  1232. {
  1233. auto point = get_decoded_point_for_plane(m_mi_cols, m_mi_rows, plane);
  1234. return { point.x(), point.y() };
  1235. }
  1236. DecoderErrorOr<void> Parser::residual()
  1237. {
  1238. auto block_size = m_mi_size < Block_8x8 ? Block_8x8 : static_cast<BlockSubsize>(m_mi_size);
  1239. for (u8 plane = 0; plane < 3; plane++) {
  1240. auto tx_size = (plane > 0) ? get_uv_tx_size() : m_tx_size;
  1241. auto step = 1 << tx_size;
  1242. auto plane_size = get_plane_block_size(block_size, plane);
  1243. auto num_4x4_w = num_4x4_blocks_wide_lookup[plane_size];
  1244. auto num_4x4_h = num_4x4_blocks_high_lookup[plane_size];
  1245. auto sub_x = (plane > 0) ? m_subsampling_x : 0;
  1246. auto sub_y = (plane > 0) ? m_subsampling_y : 0;
  1247. auto base_x = (m_mi_col * 8) >> sub_x;
  1248. auto base_y = (m_mi_row * 8) >> sub_y;
  1249. if (m_is_inter) {
  1250. if (m_mi_size < Block_8x8) {
  1251. for (auto y = 0; y < num_4x4_h; y++) {
  1252. for (auto x = 0; x < num_4x4_w; x++) {
  1253. TRY(m_decoder.predict_inter(plane, base_x + (4 * x), base_y + (4 * y), 4, 4, (y * num_4x4_w) + x));
  1254. }
  1255. }
  1256. } else {
  1257. TRY(m_decoder.predict_inter(plane, base_x, base_y, num_4x4_w * 4, num_4x4_h * 4, 0));
  1258. }
  1259. }
  1260. auto max_x = (m_mi_cols * 8) >> sub_x;
  1261. auto max_y = (m_mi_rows * 8) >> sub_y;
  1262. auto block_index = 0;
  1263. for (auto y = 0; y < num_4x4_h; y += step) {
  1264. for (auto x = 0; x < num_4x4_w; x += step) {
  1265. auto start_x = base_x + (4 * x);
  1266. auto start_y = base_y + (4 * y);
  1267. auto non_zero = false;
  1268. if (start_x < max_x && start_y < max_y) {
  1269. if (!m_is_inter)
  1270. TRY(m_decoder.predict_intra(plane, start_x, start_y, m_available_l || x > 0, m_available_u || y > 0, (x + step) < num_4x4_w, tx_size, block_index));
  1271. if (!m_skip) {
  1272. non_zero = TRY(tokens(plane, start_x, start_y, tx_size, block_index));
  1273. TRY(m_decoder.reconstruct(plane, start_x, start_y, tx_size));
  1274. }
  1275. }
  1276. auto& above_sub_context = m_above_nonzero_context[plane];
  1277. auto above_sub_context_index = start_x >> 2;
  1278. auto above_sub_context_end = min(above_sub_context_index + step, above_sub_context.size());
  1279. for (; above_sub_context_index < above_sub_context_end; above_sub_context_index++)
  1280. above_sub_context[above_sub_context_index] = non_zero;
  1281. auto& left_sub_context = m_left_nonzero_context[plane];
  1282. auto left_sub_context_index = start_y >> 2;
  1283. auto left_sub_context_end = min(left_sub_context_index + step, left_sub_context.size());
  1284. for (; left_sub_context_index < left_sub_context_end; left_sub_context_index++)
  1285. left_sub_context[left_sub_context_index] = non_zero;
  1286. block_index++;
  1287. }
  1288. }
  1289. }
  1290. return {};
  1291. }
  1292. TXSize Parser::get_uv_tx_size()
  1293. {
  1294. if (m_mi_size < Block_8x8)
  1295. return TX_4x4;
  1296. return min(m_tx_size, max_txsize_lookup[get_plane_block_size(m_mi_size, 1)]);
  1297. }
  1298. BlockSubsize Parser::get_plane_block_size(u32 subsize, u8 plane)
  1299. {
  1300. auto sub_x = (plane > 0) ? m_subsampling_x : 0;
  1301. auto sub_y = (plane > 0) ? m_subsampling_y : 0;
  1302. return ss_size_lookup[subsize][sub_x][sub_y];
  1303. }
  1304. DecoderErrorOr<bool> Parser::tokens(size_t plane, u32 start_x, u32 start_y, TXSize tx_size, u32 block_index)
  1305. {
  1306. m_tree_parser->set_start_x_and_y(start_x, start_y);
  1307. size_t segment_eob = 16 << (tx_size << 1);
  1308. auto scan = get_scan(plane, tx_size, block_index);
  1309. auto check_eob = true;
  1310. size_t c = 0;
  1311. for (; c < segment_eob; c++) {
  1312. auto pos = scan[c];
  1313. auto band = (tx_size == TX_4x4) ? coefband_4x4[c] : coefband_8x8plus[c];
  1314. m_tree_parser->set_tokens_variables(band, c, plane, tx_size, pos);
  1315. if (check_eob) {
  1316. auto more_coefs = TRY_READ(m_tree_parser->parse_tree<bool>(SyntaxElementType::MoreCoefs));
  1317. if (!more_coefs)
  1318. break;
  1319. }
  1320. auto token = TRY_READ(m_tree_parser->parse_tree<Token>(SyntaxElementType::Token));
  1321. m_token_cache[pos] = energy_class[token];
  1322. if (token == ZeroToken) {
  1323. m_tokens[pos] = 0;
  1324. check_eob = false;
  1325. } else {
  1326. i32 coef = TRY(read_coef(token));
  1327. auto sign_bit = TRY_READ(m_bit_stream->read_literal(1));
  1328. m_tokens[pos] = sign_bit ? -coef : coef;
  1329. check_eob = true;
  1330. }
  1331. }
  1332. auto non_zero = c > 0;
  1333. m_eob_total += non_zero;
  1334. for (size_t i = c; i < segment_eob; i++)
  1335. m_tokens[scan[i]] = 0;
  1336. return non_zero;
  1337. }
  1338. u32 const* Parser::get_scan(size_t plane, TXSize tx_size, u32 block_index)
  1339. {
  1340. if (plane > 0 || tx_size == TX_32x32) {
  1341. m_tx_type = DCT_DCT;
  1342. } else if (tx_size == TX_4x4) {
  1343. if (m_lossless || m_is_inter)
  1344. m_tx_type = DCT_DCT;
  1345. else
  1346. m_tx_type = mode_to_txfm_map[to_underlying(m_mi_size < Block_8x8 ? m_block_sub_modes[block_index] : m_y_mode)];
  1347. } else {
  1348. m_tx_type = mode_to_txfm_map[to_underlying(m_y_mode)];
  1349. }
  1350. if (tx_size == TX_4x4) {
  1351. if (m_tx_type == ADST_DCT)
  1352. return row_scan_4x4;
  1353. if (m_tx_type == DCT_ADST)
  1354. return col_scan_4x4;
  1355. return default_scan_4x4;
  1356. }
  1357. if (tx_size == TX_8x8) {
  1358. if (m_tx_type == ADST_DCT)
  1359. return row_scan_8x8;
  1360. if (m_tx_type == DCT_ADST)
  1361. return col_scan_8x8;
  1362. return default_scan_8x8;
  1363. }
  1364. if (tx_size == TX_16x16) {
  1365. if (m_tx_type == ADST_DCT)
  1366. return row_scan_16x16;
  1367. if (m_tx_type == DCT_ADST)
  1368. return col_scan_16x16;
  1369. return default_scan_16x16;
  1370. }
  1371. return default_scan_32x32;
  1372. }
  1373. DecoderErrorOr<i32> Parser::read_coef(Token token)
  1374. {
  1375. auto cat = extra_bits[token][0];
  1376. auto num_extra = extra_bits[token][1];
  1377. u32 coef = extra_bits[token][2];
  1378. if (token == DctValCat6) {
  1379. for (size_t e = 0; e < (u8)(m_bit_depth - 8); e++) {
  1380. auto high_bit = TRY_READ(m_bit_stream->read_bool(255));
  1381. coef += high_bit << (5 + m_bit_depth - e);
  1382. }
  1383. }
  1384. for (size_t e = 0; e < num_extra; e++) {
  1385. auto coef_bit = TRY_READ(m_bit_stream->read_bool(cat_probs[cat][e]));
  1386. coef += coef_bit << (num_extra - 1 - e);
  1387. }
  1388. return coef;
  1389. }
  1390. bool Parser::is_inside(i32 row, i32 column)
  1391. {
  1392. if (row < 0)
  1393. return false;
  1394. if (column < 0)
  1395. return false;
  1396. u32 row_positive = row;
  1397. u32 column_positive = column;
  1398. return row_positive < m_mi_rows && column_positive >= m_mi_col_start && column_positive < m_mi_col_end;
  1399. }
  1400. void Parser::add_mv_ref_list(u8 ref_list)
  1401. {
  1402. if (m_ref_mv_count >= 2)
  1403. return;
  1404. if (m_ref_mv_count > 0 && m_candidate_mv[ref_list] == m_ref_list_mv[0])
  1405. return;
  1406. m_ref_list_mv[m_ref_mv_count] = m_candidate_mv[ref_list];
  1407. m_ref_mv_count++;
  1408. }
  1409. void Parser::get_block_mv(u32 candidate_row, u32 candidate_column, u8 ref_list, bool use_prev)
  1410. {
  1411. auto index = get_image_index(candidate_row, candidate_column);
  1412. if (use_prev) {
  1413. m_candidate_mv[ref_list] = m_prev_mvs[index][ref_list];
  1414. m_candidate_frame[ref_list] = m_prev_ref_frames[index][ref_list];
  1415. } else {
  1416. m_candidate_mv[ref_list] = m_mvs[index][ref_list];
  1417. m_candidate_frame[ref_list] = m_ref_frames[index][ref_list];
  1418. }
  1419. }
  1420. void Parser::if_same_ref_frame_add_mv(u32 candidate_row, u32 candidate_column, ReferenceFrameType ref_frame, bool use_prev)
  1421. {
  1422. for (auto ref_list = 0u; ref_list < 2; ref_list++) {
  1423. get_block_mv(candidate_row, candidate_column, ref_list, use_prev);
  1424. if (m_candidate_frame[ref_list] == ref_frame) {
  1425. add_mv_ref_list(ref_list);
  1426. return;
  1427. }
  1428. }
  1429. }
  1430. void Parser::scale_mv(u8 ref_list, ReferenceFrameType ref_frame)
  1431. {
  1432. auto candidate_frame = m_candidate_frame[ref_list];
  1433. if (m_ref_frame_sign_bias[candidate_frame] != m_ref_frame_sign_bias[ref_frame])
  1434. m_candidate_mv[ref_list] *= -1;
  1435. }
  1436. void Parser::if_diff_ref_frame_add_mv(u32 candidate_row, u32 candidate_column, ReferenceFrameType ref_frame, bool use_prev)
  1437. {
  1438. for (auto ref_list = 0u; ref_list < 2; ref_list++)
  1439. get_block_mv(candidate_row, candidate_column, ref_list, use_prev);
  1440. auto mvs_are_same = m_candidate_mv[0] == m_candidate_mv[1];
  1441. if (m_candidate_frame[0] > ReferenceFrameType::IntraFrame && m_candidate_frame[0] != ref_frame) {
  1442. scale_mv(0, ref_frame);
  1443. add_mv_ref_list(0);
  1444. }
  1445. if (m_candidate_frame[1] > ReferenceFrameType::IntraFrame && m_candidate_frame[1] != ref_frame && !mvs_are_same) {
  1446. scale_mv(1, ref_frame);
  1447. add_mv_ref_list(1);
  1448. }
  1449. }
  1450. MotionVector Parser::clamp_mv(MotionVector vector, i32 border)
  1451. {
  1452. i32 blocks_high = num_8x8_blocks_high_lookup[m_mi_size];
  1453. // Casts must be done here to prevent subtraction underflow from wrapping the values.
  1454. i32 mb_to_top_edge = -8 * (static_cast<i32>(m_mi_row) * MI_SIZE);
  1455. i32 mb_to_bottom_edge = 8 * ((static_cast<i32>(m_mi_rows) - blocks_high - static_cast<i32>(m_mi_row)) * MI_SIZE);
  1456. i32 blocks_wide = num_8x8_blocks_wide_lookup[m_mi_size];
  1457. i32 mb_to_left_edge = -8 * (static_cast<i32>(m_mi_col) * MI_SIZE);
  1458. i32 mb_to_right_edge = 8 * ((static_cast<i32>(m_mi_cols) - blocks_wide - static_cast<i32>(m_mi_col)) * MI_SIZE);
  1459. return {
  1460. clip_3(mb_to_top_edge - border, mb_to_bottom_edge + border, vector.row()),
  1461. clip_3(mb_to_left_edge - border, mb_to_right_edge + border, vector.column())
  1462. };
  1463. }
  1464. void Parser::clamp_mv_ref(u8 i)
  1465. {
  1466. MotionVector& vector = m_ref_list_mv[i];
  1467. vector = clamp_mv(vector, MV_BORDER);
  1468. }
  1469. // 6.5.1 Find MV refs syntax
  1470. void Parser::find_mv_refs(ReferenceFrameType reference_frame, i32 block)
  1471. {
  1472. m_ref_mv_count = 0;
  1473. bool different_ref_found = false;
  1474. u8 context_counter = 0;
  1475. m_ref_list_mv[0] = {};
  1476. m_ref_list_mv[1] = {};
  1477. MotionVector base_coordinates = MotionVector(m_mi_row, m_mi_col);
  1478. for (auto i = 0u; i < 2; i++) {
  1479. auto offset_vector = mv_ref_blocks[m_mi_size][i];
  1480. auto candidate = base_coordinates + offset_vector;
  1481. if (is_inside(candidate.row(), candidate.column())) {
  1482. auto candidate_index = get_image_index(candidate.row(), candidate.column());
  1483. auto index = get_image_index(candidate.row(), candidate.column());
  1484. different_ref_found = true;
  1485. context_counter += mode_2_counter[to_underlying(m_y_modes[index])];
  1486. for (auto ref_list = 0u; ref_list < 2; ref_list++) {
  1487. if (m_ref_frames[candidate_index][ref_list] == reference_frame) {
  1488. // This section up until add_mv_ref_list() is defined in spec as get_sub_block_mv().
  1489. constexpr u8 idx_n_column_to_subblock[4][2] = {
  1490. { 1, 2 },
  1491. { 1, 3 },
  1492. { 3, 2 },
  1493. { 3, 3 }
  1494. };
  1495. auto index = block >= 0 ? idx_n_column_to_subblock[block][offset_vector.column() == 0] : 3;
  1496. m_candidate_mv[ref_list] = m_sub_mvs[candidate_index][ref_list][index];
  1497. add_mv_ref_list(ref_list);
  1498. break;
  1499. }
  1500. }
  1501. }
  1502. }
  1503. for (auto i = 2u; i < MVREF_NEIGHBOURS; i++) {
  1504. MotionVector candidate = base_coordinates + mv_ref_blocks[m_mi_size][i];
  1505. if (is_inside(candidate.row(), candidate.column())) {
  1506. different_ref_found = true;
  1507. if_same_ref_frame_add_mv(candidate.row(), candidate.column(), reference_frame, false);
  1508. }
  1509. }
  1510. if (m_use_prev_frame_mvs)
  1511. if_same_ref_frame_add_mv(m_mi_row, m_mi_col, reference_frame, true);
  1512. if (different_ref_found) {
  1513. for (auto i = 0u; i < MVREF_NEIGHBOURS; i++) {
  1514. MotionVector candidate = base_coordinates + mv_ref_blocks[m_mi_size][i];
  1515. if (is_inside(candidate.row(), candidate.column()))
  1516. if_diff_ref_frame_add_mv(candidate.row(), candidate.column(), reference_frame, false);
  1517. }
  1518. }
  1519. if (m_use_prev_frame_mvs)
  1520. if_diff_ref_frame_add_mv(m_mi_row, m_mi_col, reference_frame, true);
  1521. m_mode_context[reference_frame] = counter_to_context[context_counter];
  1522. for (auto i = 0u; i < MAX_MV_REF_CANDIDATES; i++)
  1523. clamp_mv_ref(i);
  1524. }
  1525. bool Parser::use_mv_hp(MotionVector const& vector)
  1526. {
  1527. return (abs(vector.row()) >> 3) < COMPANDED_MVREF_THRESH && (abs(vector.column()) >> 3) < COMPANDED_MVREF_THRESH;
  1528. }
  1529. void Parser::find_best_ref_mvs(u8 ref_list)
  1530. {
  1531. for (auto i = 0u; i < MAX_MV_REF_CANDIDATES; i++) {
  1532. auto delta = m_ref_list_mv[i];
  1533. auto delta_row = delta.row();
  1534. auto delta_column = delta.column();
  1535. if (!m_allow_high_precision_mv || !use_mv_hp(delta)) {
  1536. if (delta_row & 1)
  1537. delta_row += delta_row > 0 ? -1 : 1;
  1538. if (delta_column & 1)
  1539. delta_column += delta_column > 0 ? -1 : 1;
  1540. }
  1541. delta = { delta_row, delta_column };
  1542. m_ref_list_mv[i] = clamp_mv(delta, (BORDERINPIXELS - INTERP_EXTEND) << 3);
  1543. }
  1544. m_nearest_mv[ref_list] = m_ref_list_mv[0];
  1545. m_near_mv[ref_list] = m_ref_list_mv[1];
  1546. m_best_mv[ref_list] = m_ref_list_mv[0];
  1547. }
  1548. void Parser::append_sub8x8_mvs(i32 block, u8 ref_list)
  1549. {
  1550. MotionVector sub_8x8_mvs[2];
  1551. find_mv_refs(m_ref_frame[ref_list], block);
  1552. auto destination_index = 0;
  1553. if (block == 0) {
  1554. for (auto i = 0u; i < 2; i++)
  1555. sub_8x8_mvs[destination_index++] = m_ref_list_mv[i];
  1556. } else if (block <= 2) {
  1557. sub_8x8_mvs[destination_index++] = m_block_mvs[ref_list][0];
  1558. } else {
  1559. sub_8x8_mvs[destination_index++] = m_block_mvs[ref_list][2];
  1560. for (auto index = 1; index >= 0 && destination_index < 2; index--) {
  1561. auto block_vector = m_block_mvs[ref_list][index];
  1562. if (block_vector != sub_8x8_mvs[0])
  1563. sub_8x8_mvs[destination_index++] = block_vector;
  1564. }
  1565. }
  1566. for (auto n = 0u; n < 2 && destination_index < 2; n++) {
  1567. auto ref_list_vector = m_ref_list_mv[n];
  1568. if (ref_list_vector != sub_8x8_mvs[0])
  1569. sub_8x8_mvs[destination_index++] = ref_list_vector;
  1570. }
  1571. if (destination_index < 2)
  1572. sub_8x8_mvs[destination_index++] = {};
  1573. m_nearest_mv[ref_list] = sub_8x8_mvs[0];
  1574. m_near_mv[ref_list] = sub_8x8_mvs[1];
  1575. }
  1576. void Parser::dump_info()
  1577. {
  1578. outln("Frame dimensions: {}x{}", m_frame_size.width(), m_frame_size.height());
  1579. outln("Render dimensions: {}x{}", m_render_size.width(), m_render_size.height());
  1580. outln("Bit depth: {}", m_bit_depth);
  1581. outln("Show frame: {}", m_show_frame);
  1582. }
  1583. }