/* * Copyright (c) 2021, Ali Mohammad Pur * * SPDX-License-Identifier: BSD-2-Clause */ #pragma once #include #include #include #include namespace AK::Detail { template struct VariantIndexOf { static_assert(DependentFalse, "Invalid VariantIndex instantiated"); }; template struct VariantIndexOf { consteval IndexType operator()() { if constexpr (IsSame) return InitialIndex; else return VariantIndexOf {}(); } }; template struct VariantIndexOf { consteval IndexType operator()() { return InitialIndex; } }; template consteval IndexType index_of() { return VariantIndexOf {}(); } template struct Variant; template struct Variant { static constexpr auto current_index = VariantIndexOf {}(); ALWAYS_INLINE static void delete_(IndexType id, void* data) { if (id == current_index) bit_cast(data)->~F(); else Variant::delete_(id, data); } ALWAYS_INLINE static void move_(IndexType old_id, void* old_data, void* new_data) { if (old_id == current_index) new (new_data) F(move(*bit_cast(old_data))); else Variant::move_(old_id, old_data, new_data); } ALWAYS_INLINE static void copy_(IndexType old_id, void const* old_data, void* new_data) { if (old_id == current_index) new (new_data) F(*bit_cast(old_data)); else Variant::copy_(old_id, old_data, new_data); } }; template struct Variant { ALWAYS_INLINE static void delete_(IndexType, void*) { } ALWAYS_INLINE static void move_(IndexType, void*, void*) { } ALWAYS_INLINE static void copy_(IndexType, void const*, void*) { } }; template struct VisitImpl { template static constexpr bool has_explicitly_named_overload() { // If we're not allowed to make a member function pointer and call it directly (without explicitly resolving it), // we have a templated function on our hands (or a function overload set). // in such cases, we don't have an explicitly named overload, and we would have to select it. return requires { (declval().*(&Fn::operator()))(declval()); }; } template static constexpr bool should_invoke_const_overload(IndexSequence) { // Scan over all the different visitor functions, if none of them are suitable for calling with `T const&`, avoid calling that first. return ((has_explicitly_named_overload>()) || ...); } template ALWAYS_INLINE static constexpr decltype(auto) visit(Self& self, IndexType id, void const* data, Visitor&& visitor) requires(CurrentIndex < sizeof...(Ts)) { using T = typename TypeList::template Type; if (id == CurrentIndex) { // Check if Visitor::operator() is an explicitly typed function (as opposed to a templated function) // if so, try to call that with `T const&` first before copying the Variant's const-ness. // This emulates normal C++ call semantics where templated functions are considered last, after all non-templated overloads // are checked and found to be unusable. using ReturnType = decltype(visitor(*bit_cast(data))); if constexpr (should_invoke_const_overload(MakeIndexSequence())) return visitor(*bit_cast*>(data)); return visitor(*bit_cast*>(data)); } if constexpr ((CurrentIndex + 1) < sizeof...(Ts)) return visit(self, id, data, forward(visitor)); else VERIFY_NOT_REACHED(); } }; struct VariantNoClearTag { explicit VariantNoClearTag() = default; }; struct VariantConstructTag { explicit VariantConstructTag() = default; }; template struct VariantConstructors { ALWAYS_INLINE VariantConstructors(T&& t) requires(requires { T(move(t)); }) { internal_cast().clear_without_destruction(); internal_cast().set(move(t), VariantNoClearTag {}); } ALWAYS_INLINE VariantConstructors(T const& t) requires(requires { T(t); }) { internal_cast().clear_without_destruction(); internal_cast().set(t, VariantNoClearTag {}); } ALWAYS_INLINE VariantConstructors() = default; private: [[nodiscard]] ALWAYS_INLINE Base& internal_cast() { // Warning: Internal type shenanigans - VariantsConstrutors <- Base // Not the other way around, so be _really_ careful not to cause issues. return *static_cast(this); } }; // Type list deduplication // Since this is a big template mess, each template is commented with how and why it works. struct ParameterPackTag { }; // Pack is just a way to pass around the type parameter pack Ts template struct ParameterPack : ParameterPackTag { }; // Blank is a unique replacement for T, if T is a duplicate type. template struct Blank { }; template inline constexpr bool IsTypeInPack = false; // IsTypeInPack> will just return whether 'T' exists in 'Ts'. template inline constexpr bool IsTypeInPack> = (IsSame || ...); // Replaces T with Blank if it exists in Qs. template using BlankIfDuplicate = Conditional<(IsTypeInPack || ...), Blank, T>; template struct InheritFromUniqueEntries; // InheritFromUniqueEntries will inherit from both Qs and Ts, but only scan entries going *forwards* // that is to say, if it's scanning from index I in Qs, it won't scan for duplicates for entries before I // as that has already been checked before. // This makes sure that the search is linear in time (like the 'merge' step of merge sort). template struct InheritFromUniqueEntries, IndexSequence, Qs...> : public BlankIfDuplicate, Qs>...>... { using BlankIfDuplicate, Qs>...>::BlankIfDuplicate...; }; template struct InheritFromPacks; // InheritFromPacks will attempt to 'merge' the pack 'Ps' with *itself*, but skip the duplicate entries // (via InheritFromUniqueEntries). template struct InheritFromPacks, Ps...> : public InheritFromUniqueEntries, Ps...>... { using InheritFromUniqueEntries, Ps...>::InheritFromUniqueEntries...; }; // Just a nice wrapper around InheritFromPacks, which will wrap any parameter packs in ParameterPack (unless it already is one). template using MergeAndDeduplicatePacks = InheritFromPacks, Conditional, Ps, ParameterPack>...>; } namespace AK { struct Empty { }; template concept NotLvalueReference = ! IsLvalueReference; template struct Variant : public Detail::MergeAndDeduplicatePacks>...> { private: using IndexType = Conditional; // Note: size+1 reserved for internal value checks static constexpr IndexType invalid_index = sizeof...(Ts); template static constexpr IndexType index_of() { return Detail::index_of(); } public: template static constexpr bool can_contain() { return index_of() != invalid_index; } template Variant(Variant&& old) requires((can_contain() && ...)) : Variant(move(old).template downcast()) { } template Variant(Variant const& old) requires((can_contain() && ...)) : Variant(old.template downcast()) { } template friend struct Variant; Variant() requires(!can_contain()) = delete; Variant() requires(can_contain()) : Variant(Empty()) { } #ifdef AK_HAS_CONDITIONALLY_TRIVIAL Variant(Variant const&) requires(!(IsCopyConstructible && ...)) = delete; Variant(Variant const&) = default; Variant(Variant&&) requires(!(IsMoveConstructible && ...)) = delete; Variant(Variant&&) = default; ~Variant() requires(!(IsDestructible && ...)) = delete; ~Variant() = default; Variant& operator=(Variant const&) requires(!(IsCopyConstructible && ...) || !(IsDestructible && ...)) = delete; Variant& operator=(Variant const&) = default; Variant& operator=(Variant&&) requires(!(IsMoveConstructible && ...) || !(IsDestructible && ...)) = delete; Variant& operator=(Variant&&) = default; #endif ALWAYS_INLINE Variant(Variant const& old) #ifdef AK_HAS_CONDITIONALLY_TRIVIAL requires(!(IsTriviallyCopyConstructible && ...)) #endif : Detail::MergeAndDeduplicatePacks>...>() , m_data {} , m_index(old.m_index) { Helper::copy_(old.m_index, old.m_data, m_data); } // Note: A moved-from variant emulates the state of the object it contains // so if a variant containing an int is moved from, it will still contain that int // and if a variant with a nontrivial move ctor is moved from, it may or may not be valid // but it will still contain the "moved-from" state of the object it previously contained. ALWAYS_INLINE Variant(Variant&& old) #ifdef AK_HAS_CONDITIONALLY_TRIVIAL requires(!(IsTriviallyMoveConstructible && ...)) #endif : Detail::MergeAndDeduplicatePacks>...>() , m_index(old.m_index) { Helper::move_(old.m_index, old.m_data, m_data); } ALWAYS_INLINE ~Variant() #ifdef AK_HAS_CONDITIONALLY_TRIVIAL requires(!(IsTriviallyDestructible && ...)) #endif { Helper::delete_(m_index, m_data); } ALWAYS_INLINE Variant& operator=(Variant const& other) #ifdef AK_HAS_CONDITIONALLY_TRIVIAL requires(!(IsTriviallyCopyConstructible && ...) || !(IsTriviallyDestructible && ...)) #endif { if (this != &other) { if constexpr (!(IsTriviallyDestructible && ...)) { Helper::delete_(m_index, m_data); } m_index = other.m_index; Helper::copy_(other.m_index, other.m_data, m_data); } return *this; } ALWAYS_INLINE Variant& operator=(Variant&& other) #ifdef AK_HAS_CONDITIONALLY_TRIVIAL requires(!(IsTriviallyMoveConstructible && ...) || !(IsTriviallyDestructible && ...)) #endif { if (this != &other) { if constexpr (!(IsTriviallyDestructible && ...)) { Helper::delete_(m_index, m_data); } m_index = other.m_index; Helper::move_(other.m_index, other.m_data, m_data); } return *this; } using Detail::MergeAndDeduplicatePacks>...>::MergeAndDeduplicatePacks; template> void set(T&& t) requires(can_contain() && requires { StrippedT(forward(t)); }) { constexpr auto new_index = index_of(); Helper::delete_(m_index, m_data); new (m_data) StrippedT(forward(t)); m_index = new_index; } template> void set(T&& t, Detail::VariantNoClearTag) requires(can_contain() && requires { StrippedT(forward(t)); }) { constexpr auto new_index = index_of(); new (m_data) StrippedT(forward(t)); m_index = new_index; } template T* get_pointer() requires(can_contain()) { if (index_of() == m_index) return bit_cast(&m_data); return nullptr; } template T& get() requires(can_contain()) { VERIFY(has()); return *bit_cast(&m_data); } template T const* get_pointer() const requires(can_contain()) { if (index_of() == m_index) return bit_cast(&m_data); return nullptr; } template T const& get() const requires(can_contain()) { VERIFY(has()); return *bit_cast(&m_data); } template [[nodiscard]] bool has() const requires(can_contain()) { return index_of() == m_index; } template ALWAYS_INLINE decltype(auto) visit(Fs&&... functions) { Visitor visitor { forward(functions)... }; return VisitHelper::visit(*this, m_index, m_data, move(visitor)); } template ALWAYS_INLINE decltype(auto) visit(Fs&&... functions) const { Visitor visitor { forward(functions)... }; return VisitHelper::visit(*this, m_index, m_data, move(visitor)); } template decltype(auto) downcast() && { if constexpr (sizeof...(NewTs) == 1 && (IsSpecializationOf && ...)) { return move(*this).template downcast_variant(); } else { Variant instance { Variant::invalid_index, Detail::VariantConstructTag {} }; visit([&](auto& value) { if constexpr (Variant::template can_contain>()) instance.set(move(value), Detail::VariantNoClearTag {}); }); VERIFY(instance.m_index != instance.invalid_index); return instance; } } template decltype(auto) downcast() const& { if constexpr (sizeof...(NewTs) == 1 && (IsSpecializationOf && ...)) { return (*this).template downcast_variant(TypeWrapper {}); } else { Variant instance { Variant::invalid_index, Detail::VariantConstructTag {} }; visit([&](auto const& value) { if constexpr (Variant::template can_contain>()) instance.set(value, Detail::VariantNoClearTag {}); }); VERIFY(instance.m_index != instance.invalid_index); return instance; } } auto index() const { return m_index; } private: template Variant downcast_variant(TypeWrapper>) && { return move(*this).template downcast(); } template Variant downcast_variant(TypeWrapper>) const& { return (*this).template downcast(); } static constexpr auto data_size = Detail::integer_sequence_generate_array(0, IntegerSequence()).max(); static constexpr auto data_alignment = Detail::integer_sequence_generate_array(0, IntegerSequence()).max(); using Helper = Detail::Variant; using VisitHelper = Detail::VisitImpl; template friend struct Detail::VariantConstructors; explicit Variant(IndexType index, Detail::VariantConstructTag) : Detail::MergeAndDeduplicatePacks>...>() , m_index(index) { } ALWAYS_INLINE void clear_without_destruction() { __builtin_memset(m_data, 0, data_size); m_index = invalid_index; } template struct Visitor : Fs... { using Types = TypeList; Visitor(Fs&&... args) : Fs(forward(args))... { } using Fs::operator()...; }; // Note: Make sure not to default-initialize! // VariantConstructors::VariantConstructors(T) will set this to the correct value // So default-constructing to anything will leave the first initialization with that value instead of the correct one. alignas(data_alignment) u8 m_data[data_size]; IndexType m_index; }; } #if USING_AK_GLOBALLY using AK::Empty; using AK::Variant; #endif