183 using host_mirror_space =
184 typename ViewTraits<Key, Device, void, void>::host_mirror_space;
192 using key_type = std::remove_const_t<declared_key_type>;
193 using const_key_type = std::add_const_t<key_type>;
196 using declared_value_type = Value;
197 using value_type = std::remove_const_t<declared_value_type>;
198 using const_value_type = std::add_const_t<value_type>;
200 using device_type = Device;
201 using execution_space =
typename Device::execution_space;
218 static const bool is_set = std::is_void<value_type>::value;
219 static const bool has_const_key =
220 std::is_same<const_key_type, declared_key_type>::value;
221 static const bool has_const_value =
222 is_set || std::is_same<const_value_type, declared_value_type>::value;
224 static const bool is_insertable_map =
225 !has_const_key && (is_set || !has_const_value);
226 static const bool is_modifiable_map = has_const_key && !has_const_value;
227 static const bool is_const_map = has_const_key && has_const_value;
234 using histogram_type = Impl::UnorderedMapHistogram<const_map_type>;
241 using impl_value_type = std::conditional_t<is_set, int, declared_value_type>;
243 using key_type_view = std::conditional_t<
247 using value_type_view = std::conditional_t<
248 is_insertable_map || is_modifiable_map,
252 using size_type_view = std::conditional_t<
256 using bitset_type = std::conditional_t<is_insertable_map, Bitset<Device>,
259 enum { modified_idx = 0, erasable_idx = 1, failed_insert_idx = 2 };
260 enum { num_scalars = 3 };
275 : m_bounded_insert(
true),
277 m_equal_to(equal_to),
286 m_keys(
"UnorderedMap keys",
capacity()),
287 m_values(
"UnorderedMap values", (is_set ? 0 :
capacity())),
288 m_scalars(
"UnorderedMap scalars") {
289 if (!is_insertable_map) {
290 Kokkos::Impl::throw_runtime_exception(
291 "Cannot construct a non-insertable (i.e. const key_type) "
295 Kokkos::deep_copy(m_hash_lists, invalid_index);
296 Kokkos::deep_copy(m_next_index, invalid_index);
299 void reset_failed_insert_flag() { reset_flag(failed_insert_idx); }
301 histogram_type get_histogram() {
return histogram_type(*
this); }
305 m_bounded_insert =
true;
309 m_available_indexes.clear();
311 Kokkos::deep_copy(m_hash_lists, invalid_index);
312 Kokkos::deep_copy(m_next_index, invalid_index);
314 const key_type
tmp = key_type();
315 Kokkos::deep_copy(m_keys,
tmp);
317 Kokkos::deep_copy(m_scalars, 0);
321 KOKKOS_INLINE_FUNCTION
constexpr bool is_allocated()
const {
322 return (m_keys.is_allocated() && (is_set || m_values.is_allocated()) &&
323 m_scalars.is_allocated());
342 if (!is_insertable_map)
return false;
351 tmp.m_bounded_insert =
false;
352 Impl::UnorderedMapRehash<insertable_map_type>
f(
tmp, *
this);
355 tmp.m_bounded_insert = bounded_insert;
372 m_size = m_available_indexes.count();
373 reset_flag(modified_idx);
385 bool erasable()
const {
386 return is_insertable_map ? get_flag(erasable_idx) :
false;
390 bool result = !erasable();
391 if (is_insertable_map && result) {
392 execution_space().fence(
393 "Kokkos::UnorderedMap::begin_erase: fence before setting erasable "
395 set_flag(erasable_idx);
401 bool result = erasable();
402 if (is_insertable_map && result) {
403 execution_space().fence(
404 "Kokkos::UnorderedMap::end_erase: fence before erasing");
405 Impl::UnorderedMapErase<declared_map_type> f(*
this);
407 execution_space().fence(
408 "Kokkos::UnorderedMap::end_erase: fence after erasing");
409 reset_flag(erasable_idx);
418 KOKKOS_FORCEINLINE_FUNCTION
431 KOKKOS_INLINE_FUNCTION
445 KOKKOS_INLINE_FUNCTION
447 impl_value_type
const &
v = impl_value_type())
const {
450 if (!is_insertable_map ||
capacity() == 0
u ||
451 m_scalars((
int)erasable_idx)) {
455 if (!m_scalars((
int)modified_idx)) {
456 m_scalars((
int)modified_idx) =
true;
478 : m_available_indexes.max_hint();
491#ifdef KOKKOS_ENABLE_SYCL
497 KOKKOS_NONTEMPORAL_PREFETCH_LOAD(
498 &m_keys[
curr != invalid_index ?
curr : 0]);
502 while (
curr != invalid_index && !m_equal_to(
510 result.increment_list_position();
513#ifdef KOKKOS_ENABLE_SYCL
518 KOKKOS_NONTEMPORAL_PREFETCH_LOAD(
519 &m_keys[
curr != invalid_index ?
curr : 0]);
524 if (
curr != invalid_index) {
529 if (!m_available_indexes.reset(
new_index)) {
530 KOKKOS_IMPL_DO_NOT_USE_PRINTF(
"Unable to free existing\n");
553 }
else if (m_available_indexes.set(
index_hint)) {
556 KOKKOS_NONTEMPORAL_PREFETCH_STORE(&m_keys[
new_index]);
558#ifdef KOKKOS_ENABLE_SYCL
565 KOKKOS_NONTEMPORAL_PREFETCH_STORE(&m_values[
new_index]);
566#ifdef KOKKOS_ENABLE_SYCL
573#ifndef KOKKOS_ENABLE_SYCL
599 KOKKOS_INLINE_FUNCTION
600 bool erase(key_type
const &
k)
const {
603 if (is_insertable_map && 0
u <
capacity() && m_scalars((
int)erasable_idx)) {
604 if (!m_scalars((
int)modified_idx)) {
605 m_scalars((
int)modified_idx) =
true;
608 size_type index =
find(k);
609 if (valid_at(index)) {
610 m_available_indexes.reset(index);
625 KOKKOS_INLINE_FUNCTION
628 ? m_hash_lists(m_hasher(
k) % m_hash_lists.extent(0))
631 KOKKOS_NONTEMPORAL_PREFETCH_LOAD(&m_keys[
curr != invalid_index ?
curr : 0]);
632 while (
curr != invalid_index && !m_equal_to(m_keys[
curr],
k)) {
633 KOKKOS_NONTEMPORAL_PREFETCH_LOAD(
634 &m_keys[
curr != invalid_index ?
curr : 0]);
645 KOKKOS_INLINE_FUNCTION
656 template <
typename Dummy = value_type>
657 KOKKOS_FORCEINLINE_FUNCTION std::enable_if_t<
658 !std::is_void<Dummy>::value,
659 std::conditional_t<has_const_value, impl_value_type, impl_value_type &>>
671 KOKKOS_FORCEINLINE_FUNCTION
677 KOKKOS_FORCEINLINE_FUNCTION
678 bool valid_at(size_type
i)
const {
return m_available_indexes.test(
i); }
680 template <
typename SKey,
typename SValue>
682 UnorderedMap<SKey, SValue, Device, Hasher, EqualTo>
const &src,
684 Impl::UnorderedMapCanAssign<declared_key_type, declared_value_type,
685 SKey, SValue>::value,
687 : m_bounded_insert(src.m_bounded_insert),
688 m_hasher(src.m_hasher),
689 m_equal_to(src.m_equal_to),
691 m_available_indexes(src.m_available_indexes),
692 m_hash_lists(src.m_hash_lists),
693 m_next_index(src.m_next_index),
695 m_values(src.m_values),
696 m_scalars(src.m_scalars) {}
698 template <
typename SKey,
typename SValue>
700 Impl::UnorderedMapCanAssign<declared_key_type, declared_value_type, SKey,
703 operator=(UnorderedMap<SKey, SValue, Device, Hasher, EqualTo>
const &src) {
704 m_bounded_insert = src.m_bounded_insert;
705 m_hasher = src.m_hasher;
706 m_equal_to = src.m_equal_to;
708 m_available_indexes = src.m_available_indexes;
709 m_hash_lists = src.m_hash_lists;
710 m_next_index = src.m_next_index;
712 m_values = src.m_values;
713 m_scalars = src.m_scalars;
717 template <
typename SKey,
typename SValue,
typename SDevice>
718 std::enable_if_t<std::is_same<std::remove_const_t<SKey>, key_type>::value &&
719 std::is_same<std::remove_const_t<SValue>, value_type>::value>
721 UnorderedMap<SKey, SValue, SDevice, Hasher, EqualTo>
const &src) {
722 if (m_hash_lists.data() != src.m_hash_lists.data()) {
723 insertable_map_type tmp;
725 tmp.m_bounded_insert = src.m_bounded_insert;
726 tmp.m_hasher = src.m_hasher;
727 tmp.m_equal_to = src.m_equal_to;
728 tmp.m_size = src.
size();
729 tmp.m_available_indexes = bitset_type(src.capacity());
730 tmp.m_hash_lists = size_type_view(
731 view_alloc(WithoutInitializing,
"UnorderedMap hash list"),
732 src.m_hash_lists.extent(0));
733 tmp.m_next_index = size_type_view(
734 view_alloc(WithoutInitializing,
"UnorderedMap next index"),
735 src.m_next_index.extent(0));
737 key_type_view(view_alloc(WithoutInitializing,
"UnorderedMap keys"),
738 src.m_keys.extent(0));
739 tmp.m_values = value_type_view(
740 view_alloc(WithoutInitializing,
"UnorderedMap values"),
741 src.m_values.extent(0));
742 tmp.m_scalars = scalars_view(
"UnorderedMap scalars");
744 Kokkos::deep_copy(tmp.m_available_indexes, src.m_available_indexes);
746 using raw_deep_copy =
747 Kokkos::Impl::DeepCopy<
typename device_type::memory_space,
748 typename SDevice::memory_space>;
750 raw_deep_copy(tmp.m_hash_lists.data(), src.m_hash_lists.data(),
751 sizeof(size_type) * src.m_hash_lists.extent(0));
752 raw_deep_copy(tmp.m_next_index.data(), src.m_next_index.data(),
753 sizeof(size_type) * src.m_next_index.extent(0));
754 raw_deep_copy(tmp.m_keys.data(), src.m_keys.data(),
755 sizeof(key_type) * src.m_keys.extent(0));
757 raw_deep_copy(tmp.m_values.data(), src.m_values.data(),
758 sizeof(impl_value_type) * src.m_values.extent(0));
760 raw_deep_copy(tmp.m_scalars.data(), src.m_scalars.data(),
761 sizeof(
int) * num_scalars);
764 "Kokkos::UnorderedMap::create_copy_view: fence after copy to tmp");
772 bool modified()
const {
return get_flag(modified_idx); }
774 void set_flag(
int flag)
const {
775 using raw_deep_copy =
776 Kokkos::Impl::DeepCopy<
typename device_type::memory_space,
778 const int true_ =
true;
779 raw_deep_copy(m_scalars.data() + flag, &true_,
sizeof(
int));
781 "Kokkos::UnorderedMap::set_flag: fence after copying flag from "
785 void reset_flag(
int flag)
const {
786 using raw_deep_copy =
787 Kokkos::Impl::DeepCopy<
typename device_type::memory_space,
789 const int false_ =
false;
790 raw_deep_copy(m_scalars.data() + flag, &false_,
sizeof(
int));
792 "Kokkos::UnorderedMap::reset_flag: fence after copying flag from "
796 bool get_flag(
int flag)
const {
797 using raw_deep_copy =
799 typename device_type::memory_space>;
801 raw_deep_copy(&result, m_scalars.data() + flag,
sizeof(
int));
803 "Kokkos::UnorderedMap::get_flag: fence after copy to return value in "
808 static uint32_t calculate_capacity(uint32_t capacity_hint) {
811 ? ((
static_cast<uint32_t
>(7ull * capacity_hint / 6u) + 127u) /
818 bool m_bounded_insert;
819 hasher_type m_hasher;
820 equal_to_type m_equal_to;
821 mutable size_type m_size;
822 bitset_type m_available_indexes;
823 size_type_view m_hash_lists;
824 size_type_view m_next_index;
825 key_type_view m_keys;
826 value_type_view m_values;
827 scalars_view m_scalars;
829 template <
typename KKey,
typename VValue,
typename DDevice,
typename HHash,
831 friend class UnorderedMap;
833 template <
typename UMap>
834 friend struct Impl::UnorderedMapErase;
836 template <
typename UMap>
837 friend struct Impl::UnorderedMapHistogram;
839 template <
typename UMap>
840 friend struct Impl::UnorderedMapPrint;