AtomicsObject.cpp 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404
  1. /*
  2. * Copyright (c) 2021, Tim Flynn <trflynn89@pm.me>
  3. *
  4. * SPDX-License-Identifier: BSD-2-Clause
  5. */
  6. #include <AK/Atomic.h>
  7. #include <AK/ByteBuffer.h>
  8. #include <AK/Endian.h>
  9. #include <LibJS/Runtime/AtomicsObject.h>
  10. #include <LibJS/Runtime/GlobalObject.h>
  11. #include <LibJS/Runtime/TypedArray.h>
  12. #include <LibJS/Runtime/Value.h>
  13. namespace JS {
  14. // 25.4.2.1 ValidateIntegerTypedArray ( typedArray [ , waitable ] ), https://tc39.es/ecma262/#sec-validateintegertypedarray
  15. static ThrowCompletionOr<ArrayBuffer*> validate_integer_typed_array(GlobalObject& global_object, TypedArrayBase& typed_array, bool waitable = false)
  16. {
  17. auto& vm = global_object.vm();
  18. // 1. If waitable is not present, set waitable to false.
  19. // 2. Perform ? ValidateTypedArray(typedArray).
  20. TRY(validate_typed_array(global_object, typed_array));
  21. // 3. Let buffer be typedArray.[[ViewedArrayBuffer]].
  22. auto* buffer = typed_array.viewed_array_buffer();
  23. // 4. Let typeName be typedArray.[[TypedArrayName]].
  24. auto type_name = typed_array.element_name();
  25. // 5. Let type be the Element Type value in Table 72 for typeName.
  26. // 6. If waitable is true, then
  27. if (waitable) {
  28. // a. If typeName is not "Int32Array" or "BigInt64Array", throw a TypeError exception.
  29. if ((type_name != "Int32Array"sv) && (type_name != "BigInt64Array"sv))
  30. return vm.throw_completion<TypeError>(global_object, ErrorType::TypedArrayTypeIsNot, type_name, "Int32 or BigInt64"sv);
  31. }
  32. // 7. Else,
  33. else {
  34. // a. If ! IsUnclampedIntegerElementType(type) is false and ! IsBigIntElementType(type) is false, throw a TypeError exception.
  35. if (!typed_array.is_unclamped_integer_element_type() && !typed_array.is_bigint_element_type())
  36. return vm.throw_completion<TypeError>(global_object, ErrorType::TypedArrayTypeIsNot, type_name, "an unclamped integer or BigInt"sv);
  37. }
  38. // 8. Return buffer.
  39. return buffer;
  40. }
  41. // 25.4.2.2 ValidateAtomicAccess ( typedArray, requestIndex ), https://tc39.es/ecma262/#sec-validateatomicaccess
  42. static Optional<size_t> validate_atomic_access(GlobalObject& global_object, TypedArrayBase& typed_array, Value request_index)
  43. {
  44. auto& vm = global_object.vm();
  45. auto access_index = request_index.to_index(global_object);
  46. if (vm.exception())
  47. return {};
  48. if (access_index >= typed_array.array_length()) {
  49. vm.throw_exception<RangeError>(global_object, ErrorType::IndexOutOfRange, access_index, typed_array.array_length());
  50. return {};
  51. }
  52. return access_index * typed_array.element_size() + typed_array.byte_offset();
  53. }
  54. // 25.4.2.11 AtomicReadModifyWrite ( typedArray, index, value, op ), https://tc39.es/ecma262/#sec-atomicreadmodifywrite
  55. static Value atomic_read_modify_write(GlobalObject& global_object, TypedArrayBase& typed_array, Value index, Value value, ReadWriteModifyFunction operation)
  56. {
  57. auto& vm = global_object.vm();
  58. TRY_OR_DISCARD(validate_integer_typed_array(global_object, typed_array));
  59. auto byte_index = validate_atomic_access(global_object, typed_array, index);
  60. if (!byte_index.has_value())
  61. return {};
  62. Value value_to_set;
  63. if (typed_array.content_type() == TypedArrayBase::ContentType::BigInt) {
  64. value_to_set = value.to_bigint(global_object);
  65. if (vm.exception())
  66. return {};
  67. } else {
  68. value_to_set = Value(value.to_integer_or_infinity(global_object));
  69. if (vm.exception())
  70. return {};
  71. }
  72. if (typed_array.viewed_array_buffer()->is_detached()) {
  73. vm.throw_exception<TypeError>(global_object, ErrorType::DetachedArrayBuffer);
  74. return {};
  75. }
  76. return typed_array.get_modify_set_value_in_buffer(*byte_index, value_to_set, move(operation));
  77. }
  78. template<typename T, typename AtomicFunction>
  79. static Value perform_atomic_operation(GlobalObject& global_object, TypedArrayBase& typed_array, AtomicFunction&& operation)
  80. {
  81. auto& vm = global_object.vm();
  82. auto index = vm.argument(1);
  83. auto value = vm.argument(2);
  84. auto operation_wrapper = [&, operation = forward<AtomicFunction>(operation)](ByteBuffer x_bytes, ByteBuffer y_bytes) -> ByteBuffer {
  85. if constexpr (IsFloatingPoint<T>) {
  86. VERIFY_NOT_REACHED();
  87. } else {
  88. using U = Conditional<IsSame<ClampedU8, T>, u8, T>;
  89. auto* x = reinterpret_cast<U*>(x_bytes.data());
  90. auto* y = reinterpret_cast<U*>(y_bytes.data());
  91. operation(x, *y);
  92. return x_bytes;
  93. }
  94. };
  95. return atomic_read_modify_write(global_object, typed_array, index, value, move(operation_wrapper));
  96. }
  97. AtomicsObject::AtomicsObject(GlobalObject& global_object)
  98. : Object(*global_object.object_prototype())
  99. {
  100. }
  101. void AtomicsObject::initialize(GlobalObject& global_object)
  102. {
  103. Object::initialize(global_object);
  104. auto& vm = this->vm();
  105. u8 attr = Attribute::Writable | Attribute::Configurable;
  106. define_native_function(vm.names.add, add, 3, attr);
  107. define_native_function(vm.names.and_, and_, 3, attr);
  108. define_native_function(vm.names.compareExchange, compare_exchange, 4, attr);
  109. define_native_function(vm.names.exchange, exchange, 3, attr);
  110. define_native_function(vm.names.isLockFree, is_lock_free, 1, attr);
  111. define_native_function(vm.names.load, load, 2, attr);
  112. define_native_function(vm.names.or_, or_, 3, attr);
  113. define_native_function(vm.names.store, store, 3, attr);
  114. define_native_function(vm.names.sub, sub, 3, attr);
  115. define_native_function(vm.names.xor_, xor_, 3, attr);
  116. // 25.4.15 Atomics [ @@toStringTag ], https://tc39.es/ecma262/#sec-atomics-@@tostringtag
  117. define_direct_property(*vm.well_known_symbol_to_string_tag(), js_string(global_object.heap(), "Atomics"), Attribute::Configurable);
  118. }
  119. // 25.4.3 Atomics.add ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.add
  120. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::add)
  121. {
  122. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  123. if (!typed_array)
  124. return {};
  125. auto atomic_add = [](auto* storage, auto value) { return AK::atomic_fetch_add(storage, value); };
  126. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  127. if (is<ClassName>(typed_array)) \
  128. return perform_atomic_operation<Type>(global_object, *typed_array, move(atomic_add));
  129. JS_ENUMERATE_TYPED_ARRAYS
  130. #undef __JS_ENUMERATE
  131. VERIFY_NOT_REACHED();
  132. }
  133. // 25.4.4 Atomics.and ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.and
  134. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::and_)
  135. {
  136. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  137. if (!typed_array)
  138. return {};
  139. auto atomic_and = [](auto* storage, auto value) { return AK::atomic_fetch_and(storage, value); };
  140. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  141. if (is<ClassName>(typed_array)) \
  142. return perform_atomic_operation<Type>(global_object, *typed_array, move(atomic_and));
  143. JS_ENUMERATE_TYPED_ARRAYS
  144. #undef __JS_ENUMERATE
  145. VERIFY_NOT_REACHED();
  146. }
  147. template<typename T>
  148. static Value atomic_compare_exchange_impl(GlobalObject& global_object, TypedArrayBase& typed_array)
  149. {
  150. auto& vm = global_object.vm();
  151. TRY_OR_DISCARD(validate_integer_typed_array(global_object, typed_array));
  152. auto indexed_position = validate_atomic_access(global_object, typed_array, vm.argument(1));
  153. if (!indexed_position.has_value())
  154. return {};
  155. Value expected;
  156. Value replacement;
  157. if (typed_array.content_type() == TypedArrayBase::ContentType::BigInt) {
  158. expected = vm.argument(2).to_bigint(global_object);
  159. if (vm.exception())
  160. return {};
  161. replacement = vm.argument(3).to_bigint(global_object);
  162. if (vm.exception())
  163. return {};
  164. } else {
  165. expected = Value(vm.argument(2).to_integer_or_infinity(global_object));
  166. if (vm.exception())
  167. return {};
  168. replacement = Value(vm.argument(3).to_integer_or_infinity(global_object));
  169. if (vm.exception())
  170. return {};
  171. }
  172. if (typed_array.viewed_array_buffer()->is_detached()) {
  173. vm.throw_exception<TypeError>(global_object, ErrorType::DetachedArrayBuffer);
  174. return {};
  175. }
  176. constexpr bool is_little_endian = __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__;
  177. auto& block = typed_array.viewed_array_buffer()->buffer();
  178. auto expected_bytes = numeric_to_raw_bytes<T>(global_object, expected, is_little_endian);
  179. auto replacement_bytes = numeric_to_raw_bytes<T>(global_object, replacement, is_little_endian);
  180. // FIXME: Implement SharedArrayBuffer case.
  181. auto raw_bytes_read = block.slice(*indexed_position, sizeof(T));
  182. if constexpr (IsFloatingPoint<T>) {
  183. VERIFY_NOT_REACHED();
  184. } else {
  185. using U = Conditional<IsSame<ClampedU8, T>, u8, T>;
  186. auto* v = reinterpret_cast<U*>(block.span().slice(*indexed_position).data());
  187. auto* e = reinterpret_cast<U*>(expected_bytes.data());
  188. auto* r = reinterpret_cast<U*>(replacement_bytes.data());
  189. (void)AK::atomic_compare_exchange_strong(v, *e, *r);
  190. }
  191. return raw_bytes_to_numeric<T>(global_object, raw_bytes_read, is_little_endian);
  192. }
  193. // 25.4.5 Atomics.compareExchange ( typedArray, index, expectedValue, replacementValue ), https://tc39.es/ecma262/#sec-atomics.compareexchange
  194. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::compare_exchange)
  195. {
  196. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  197. if (!typed_array)
  198. return {};
  199. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  200. if (is<ClassName>(typed_array)) \
  201. return atomic_compare_exchange_impl<Type>(global_object, *typed_array);
  202. JS_ENUMERATE_TYPED_ARRAYS
  203. #undef __JS_ENUMERATE
  204. VERIFY_NOT_REACHED();
  205. }
  206. // 25.4.6 Atomics.exchange ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.exchange
  207. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::exchange)
  208. {
  209. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  210. if (!typed_array)
  211. return {};
  212. auto atomic_exchange = [](auto* storage, auto value) { return AK::atomic_exchange(storage, value); };
  213. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  214. if (is<ClassName>(typed_array)) \
  215. return perform_atomic_operation<Type>(global_object, *typed_array, move(atomic_exchange));
  216. JS_ENUMERATE_TYPED_ARRAYS
  217. #undef __JS_ENUMERATE
  218. VERIFY_NOT_REACHED();
  219. }
  220. // 25.4.7 Atomics.isLockFree ( size ), https://tc39.es/ecma262/#sec-atomics.islockfree
  221. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::is_lock_free)
  222. {
  223. auto size = vm.argument(0).to_integer_or_infinity(global_object);
  224. if (vm.exception())
  225. return {};
  226. if (size == 1)
  227. return Value(AK::atomic_is_lock_free<u8>());
  228. if (size == 2)
  229. return Value(AK::atomic_is_lock_free<u16>());
  230. if (size == 4)
  231. return Value(true);
  232. if (size == 8)
  233. return Value(AK::atomic_is_lock_free<u64>());
  234. return Value(false);
  235. }
  236. // 25.4.8 Atomics.load ( typedArray, index ), https://tc39.es/ecma262/#sec-atomics.load
  237. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::load)
  238. {
  239. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  240. if (!typed_array)
  241. return {};
  242. TRY_OR_DISCARD(validate_integer_typed_array(global_object, *typed_array));
  243. auto indexed_position = validate_atomic_access(global_object, *typed_array, vm.argument(1));
  244. if (!indexed_position.has_value())
  245. return {};
  246. if (typed_array->viewed_array_buffer()->is_detached()) {
  247. vm.throw_exception<TypeError>(global_object, ErrorType::DetachedArrayBuffer);
  248. return {};
  249. }
  250. return typed_array->get_value_from_buffer(*indexed_position, ArrayBuffer::Order::SeqCst, true);
  251. }
  252. // 25.4.9 Atomics.or ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.or
  253. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::or_)
  254. {
  255. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  256. if (!typed_array)
  257. return {};
  258. auto atomic_or = [](auto* storage, auto value) { return AK::atomic_fetch_or(storage, value); };
  259. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  260. if (is<ClassName>(typed_array)) \
  261. return perform_atomic_operation<Type>(global_object, *typed_array, move(atomic_or));
  262. JS_ENUMERATE_TYPED_ARRAYS
  263. #undef __JS_ENUMERATE
  264. VERIFY_NOT_REACHED();
  265. }
  266. // 25.4.10 Atomics.store ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.store
  267. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::store)
  268. {
  269. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  270. if (!typed_array)
  271. return {};
  272. TRY_OR_DISCARD(validate_integer_typed_array(global_object, *typed_array));
  273. auto indexed_position = validate_atomic_access(global_object, *typed_array, vm.argument(1));
  274. if (!indexed_position.has_value())
  275. return {};
  276. auto value = vm.argument(2);
  277. Value value_to_set;
  278. if (typed_array->content_type() == TypedArrayBase::ContentType::BigInt) {
  279. value_to_set = value.to_bigint(global_object);
  280. if (vm.exception())
  281. return {};
  282. } else {
  283. value_to_set = Value(value.to_integer_or_infinity(global_object));
  284. if (vm.exception())
  285. return {};
  286. }
  287. if (typed_array->viewed_array_buffer()->is_detached()) {
  288. vm.throw_exception<TypeError>(global_object, ErrorType::DetachedArrayBuffer);
  289. return {};
  290. }
  291. typed_array->set_value_in_buffer(*indexed_position, value_to_set, ArrayBuffer::Order::SeqCst, true);
  292. return value_to_set;
  293. }
  294. // 25.4.11 Atomics.sub ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.sub
  295. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::sub)
  296. {
  297. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  298. if (!typed_array)
  299. return {};
  300. auto atomic_sub = [](auto* storage, auto value) { return AK::atomic_fetch_sub(storage, value); };
  301. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  302. if (is<ClassName>(typed_array)) \
  303. return perform_atomic_operation<Type>(global_object, *typed_array, move(atomic_sub));
  304. JS_ENUMERATE_TYPED_ARRAYS
  305. #undef __JS_ENUMERATE
  306. VERIFY_NOT_REACHED();
  307. }
  308. // 25.4.14 Atomics.xor ( typedArray, index, value ), https://tc39.es/ecma262/#sec-atomics.xor
  309. JS_DEFINE_NATIVE_FUNCTION(AtomicsObject::xor_)
  310. {
  311. auto* typed_array = typed_array_from(global_object, vm.argument(0));
  312. if (!typed_array)
  313. return {};
  314. auto atomic_xor = [](auto* storage, auto value) { return AK::atomic_fetch_xor(storage, value); };
  315. #define __JS_ENUMERATE(ClassName, snake_name, PrototypeName, ConstructorName, Type) \
  316. if (is<ClassName>(typed_array)) \
  317. return perform_atomic_operation<Type>(global_object, *typed_array, move(atomic_xor));
  318. JS_ENUMERATE_TYPED_ARRAYS
  319. #undef __JS_ENUMERATE
  320. VERIFY_NOT_REACHED();
  321. }
  322. }