1
0
mirror of https://github.com/SerenityOS/serenity synced 2024-07-03 12:33:37 +00:00

AK: Replace C-style casts

This commit is contained in:
Sam Atkins 2023-03-07 14:28:21 +00:00 committed by Andreas Kling
parent 7d6908d9a5
commit 067d0689c5
14 changed files with 49 additions and 49 deletions

View File

@ -30,7 +30,7 @@ public:
if (!data) if (!data)
return Error::from_errno(ENOMEM); return Error::from_errno(ENOMEM);
auto bitmap = Bitmap { (u8*)data, size, true }; auto bitmap = Bitmap { static_cast<u8*>(data), size, true };
bitmap.fill(default_value); bitmap.fill(default_value);
return bitmap; return bitmap;
} }

View File

@ -64,19 +64,19 @@ public:
count += popcount(byte); count += popcount(byte);
} }
if (++first < last) { if (++first < last) {
size_t const* ptr_large = (size_t const*)(((FlatPtr)first + sizeof(size_t) - 1) & ~(sizeof(size_t) - 1)); size_t const* ptr_large = reinterpret_cast<size_t const*>((reinterpret_cast<FlatPtr>(first) + sizeof(size_t) - 1) & ~(sizeof(size_t) - 1));
if ((u8 const*)ptr_large > last) if (reinterpret_cast<u8 const*>(ptr_large) > last)
ptr_large = (size_t const*)last; ptr_large = reinterpret_cast<size_t const*>(last);
while (first < (u8 const*)ptr_large) { while (first < reinterpret_cast<u8 const*>(ptr_large)) {
count += popcount(*first); count += popcount(*first);
first++; first++;
} }
size_t const* last_large = (size_t const*)((FlatPtr)last & ~(sizeof(size_t) - 1)); size_t const* last_large = reinterpret_cast<size_t const*>(reinterpret_cast<FlatPtr>(last) & ~(sizeof(size_t) - 1));
while (ptr_large < last_large) { while (ptr_large < last_large) {
count += popcount(*ptr_large); count += popcount(*ptr_large);
ptr_large++; ptr_large++;
} }
for (first = (u8 const*)ptr_large; first < last; first++) for (first = reinterpret_cast<u8 const*>(ptr_large); first < last; first++)
count += popcount(*first); count += popcount(*first);
} }
} }
@ -100,12 +100,12 @@ public:
// We will use hint as what it is: a hint. Because we try to // We will use hint as what it is: a hint. Because we try to
// scan over entire 32 bit words, we may start searching before // scan over entire 32 bit words, we may start searching before
// the hint! // the hint!
size_t const* ptr_large = (size_t const*)((FlatPtr)&m_data[hint / 8] & ~(sizeof(size_t) - 1)); size_t const* ptr_large = reinterpret_cast<size_t const*>(reinterpret_cast<FlatPtr>(&m_data[hint / 8]) & ~(sizeof(size_t) - 1));
if ((u8 const*)ptr_large < &m_data[0]) { if (reinterpret_cast<u8 const*>(ptr_large) < &m_data[0]) {
ptr_large++; ptr_large++;
// m_data isn't aligned, check first bytes // m_data isn't aligned, check first bytes
size_t start_ptr_large = (u8 const*)ptr_large - &m_data[0]; size_t start_ptr_large = reinterpret_cast<u8 const*>(ptr_large) - &m_data[0];
size_t i = 0; size_t i = 0;
u8 byte = VALUE ? 0x00 : 0xff; u8 byte = VALUE ? 0x00 : 0xff;
while (i < start_ptr_large && m_data[i] == byte) while (i < start_ptr_large && m_data[i] == byte)
@ -120,14 +120,14 @@ public:
} }
size_t val_large = VALUE ? 0x0 : NumericLimits<size_t>::max(); size_t val_large = VALUE ? 0x0 : NumericLimits<size_t>::max();
size_t const* end_large = (size_t const*)((FlatPtr)end & ~(sizeof(size_t) - 1)); size_t const* end_large = reinterpret_cast<size_t const*>(reinterpret_cast<FlatPtr>(end) & ~(sizeof(size_t) - 1));
while (ptr_large < end_large && *ptr_large == val_large) while (ptr_large < end_large && *ptr_large == val_large)
ptr_large++; ptr_large++;
if (ptr_large == end_large) { if (ptr_large == end_large) {
// We didn't find anything, check the remaining few bytes (if any) // We didn't find anything, check the remaining few bytes (if any)
u8 byte = VALUE ? 0x00 : 0xff; u8 byte = VALUE ? 0x00 : 0xff;
size_t i = (u8 const*)ptr_large - &m_data[0]; size_t i = reinterpret_cast<u8 const*>(ptr_large) - &m_data[0];
size_t byte_count = m_size / 8; size_t byte_count = m_size / 8;
VERIFY(i <= byte_count); VERIFY(i <= byte_count);
while (i < byte_count && m_data[i] == byte) while (i < byte_count && m_data[i] == byte)
@ -137,7 +137,7 @@ public:
return {}; // We already checked from the beginning return {}; // We already checked from the beginning
// Try scanning before the hint // Try scanning before the hint
end = (u8 const*)((FlatPtr)&m_data[hint / 8] & ~(sizeof(size_t) - 1)); end = reinterpret_cast<u8 const*>(reinterpret_cast<FlatPtr>(&m_data[hint / 8]) & ~(sizeof(size_t) - 1));
hint = 0; hint = 0;
continue; continue;
} }
@ -154,7 +154,7 @@ public:
if constexpr (!VALUE) if constexpr (!VALUE)
val_large = ~val_large; val_large = ~val_large;
VERIFY(val_large != 0); VERIFY(val_large != 0);
return ((u8 const*)ptr_large - &m_data[0]) * 8 + bit_scan_forward(val_large) - 1; return (reinterpret_cast<u8 const*>(ptr_large) - &m_data[0]) * 8 + bit_scan_forward(val_large) - 1;
} }
} }
@ -207,7 +207,7 @@ public:
size_t bit_size = 8 * sizeof(size_t); size_t bit_size = 8 * sizeof(size_t);
size_t* bitmap = (size_t*)m_data; size_t* bitmap = reinterpret_cast<size_t*>(m_data);
// Calculating the start offset. // Calculating the start offset.
size_t start_bucket_index = from / bit_size; size_t start_bucket_index = from / bit_size;

View File

@ -65,7 +65,7 @@ public:
for_each_chunk([&](auto chunk) { for_each_chunk([&](auto chunk) {
if (!cache_filled) { if (!cache_filled) {
cache_filled = true; cache_filled = true;
((ChunkHeader*)chunk)->next_chunk = 0; (reinterpret_cast<ChunkHeader*>(chunk))->next_chunk = 0;
chunk = s_unused_allocation_cache.exchange(chunk); chunk = s_unused_allocation_cache.exchange(chunk);
if (!chunk) if (!chunk)
return; return;
@ -86,7 +86,7 @@ protected:
{ {
auto head_chunk = m_head_chunk; auto head_chunk = m_head_chunk;
while (head_chunk) { while (head_chunk) {
auto& chunk_header = *(ChunkHeader const*)head_chunk; auto& chunk_header = *reinterpret_cast<ChunkHeader const*>(head_chunk);
VERIFY(chunk_header.magic == chunk_magic); VERIFY(chunk_header.magic == chunk_magic);
if (head_chunk == m_current_chunk) if (head_chunk == m_current_chunk)
VERIFY(chunk_header.next_chunk == 0); VERIFY(chunk_header.next_chunk == 0);
@ -100,7 +100,7 @@ protected:
{ {
// dbgln("Allocated {} entries in previous chunk and have {} unusable bytes", m_allocations_in_previous_chunk, m_chunk_size - m_byte_offset_into_current_chunk); // dbgln("Allocated {} entries in previous chunk and have {} unusable bytes", m_allocations_in_previous_chunk, m_chunk_size - m_byte_offset_into_current_chunk);
// m_allocations_in_previous_chunk = 0; // m_allocations_in_previous_chunk = 0;
void* new_chunk = (void*)s_unused_allocation_cache.exchange(0); void* new_chunk = reinterpret_cast<void*>(s_unused_allocation_cache.exchange(0));
if (!new_chunk) { if (!new_chunk) {
if constexpr (use_mmap) { if constexpr (use_mmap) {
#ifdef AK_OS_SERENITY #ifdef AK_OS_SERENITY
@ -117,24 +117,24 @@ protected:
} }
} }
auto& new_header = *(ChunkHeader*)new_chunk; auto& new_header = *reinterpret_cast<ChunkHeader*>(new_chunk);
new_header.magic = chunk_magic; new_header.magic = chunk_magic;
new_header.next_chunk = 0; new_header.next_chunk = 0;
m_byte_offset_into_current_chunk = sizeof(ChunkHeader); m_byte_offset_into_current_chunk = sizeof(ChunkHeader);
if (!m_head_chunk) { if (!m_head_chunk) {
VERIFY(!m_current_chunk); VERIFY(!m_current_chunk);
m_head_chunk = (FlatPtr)new_chunk; m_head_chunk = reinterpret_cast<FlatPtr>(new_chunk);
m_current_chunk = (FlatPtr)new_chunk; m_current_chunk = reinterpret_cast<FlatPtr>(new_chunk);
return true; return true;
} }
VERIFY(m_current_chunk); VERIFY(m_current_chunk);
auto& old_header = *(ChunkHeader*)m_current_chunk; auto& old_header = *reinterpret_cast<ChunkHeader*>(m_current_chunk);
VERIFY(old_header.magic == chunk_magic); VERIFY(old_header.magic == chunk_magic);
VERIFY(old_header.next_chunk == 0); VERIFY(old_header.next_chunk == 0);
old_header.next_chunk = (FlatPtr)new_chunk; old_header.next_chunk = reinterpret_cast<FlatPtr>(new_chunk);
m_current_chunk = (FlatPtr)new_chunk; m_current_chunk = reinterpret_cast<FlatPtr>(new_chunk);
return true; return true;
} }

View File

@ -296,7 +296,7 @@ private:
// This is most noticable in Lagom, where kmalloc_good_size is just a no-op. // This is most noticable in Lagom, where kmalloc_good_size is just a no-op.
new_capacity = max(new_capacity, (capacity() * 3) / 2); new_capacity = max(new_capacity, (capacity() * 3) / 2);
new_capacity = kmalloc_good_size(new_capacity); new_capacity = kmalloc_good_size(new_capacity);
auto* new_buffer = (u8*)kmalloc(new_capacity); auto* new_buffer = static_cast<u8*>(kmalloc(new_capacity));
if (!new_buffer) if (!new_buffer)
return Error::from_errno(ENOMEM); return Error::from_errno(ENOMEM);

View File

@ -276,7 +276,7 @@ public:
{ {
if (buffer.is_empty()) if (buffer.is_empty())
return empty(); return empty();
return DeprecatedString((char const*)buffer.data(), buffer.size(), should_chomp); return DeprecatedString(reinterpret_cast<char const*>(buffer.data()), buffer.size(), should_chomp);
} }
[[nodiscard]] static DeprecatedString vformatted(StringView fmtstr, TypeErasedFormatParams&); [[nodiscard]] static DeprecatedString vformatted(StringView fmtstr, TypeErasedFormatParams&);

View File

@ -188,7 +188,7 @@ template<typename T>
struct Traits<NonnullOwnPtr<T>> : public GenericTraits<NonnullOwnPtr<T>> { struct Traits<NonnullOwnPtr<T>> : public GenericTraits<NonnullOwnPtr<T>> {
using PeekType = T*; using PeekType = T*;
using ConstPeekType = T const*; using ConstPeekType = T const*;
static unsigned hash(NonnullOwnPtr<T> const& p) { return ptr_hash((FlatPtr)p.ptr()); } static unsigned hash(NonnullOwnPtr<T> const& p) { return ptr_hash(p.ptr()); }
static bool equals(NonnullOwnPtr<T> const& a, NonnullOwnPtr<T> const& b) { return a.ptr() == b.ptr(); } static bool equals(NonnullOwnPtr<T> const& a, NonnullOwnPtr<T> const& b) { return a.ptr() == b.ptr(); }
}; };

View File

@ -134,8 +134,8 @@ inline void swap(T& a, U& b)
{ {
if (&a == &b) if (&a == &b)
return; return;
U tmp = move((U&)a); U tmp = move(static_cast<U&>(a));
a = (T &&) move(b); a = static_cast<T&&>(move(b));
b = move(tmp); b = move(tmp);
} }

View File

@ -14,7 +14,7 @@ constexpr u32 string_hash(char const* characters, size_t length, u32 seed = 0)
{ {
u32 hash = seed; u32 hash = seed;
for (size_t i = 0; i < length; ++i) { for (size_t i = 0; i < length; ++i) {
hash += (u32)characters[i]; hash += static_cast<u32>(characters[i]);
hash += (hash << 10); hash += (hash << 10);
hash ^= (hash >> 6); hash ^= (hash >> 6);
} }

View File

@ -26,13 +26,13 @@ public:
, m_length(length) , m_length(length)
{ {
if (!is_constant_evaluated()) if (!is_constant_evaluated())
VERIFY(!Checked<uintptr_t>::addition_would_overflow((uintptr_t)characters, length)); VERIFY(!Checked<uintptr_t>::addition_would_overflow(reinterpret_cast<uintptr_t>(characters), length));
} }
ALWAYS_INLINE StringView(unsigned char const* characters, size_t length) ALWAYS_INLINE StringView(unsigned char const* characters, size_t length)
: m_characters((char const*)characters) : m_characters(reinterpret_cast<char const*>(characters))
, m_length(length) , m_length(length)
{ {
VERIFY(!Checked<uintptr_t>::addition_would_overflow((uintptr_t)characters, length)); VERIFY(!Checked<uintptr_t>::addition_would_overflow(reinterpret_cast<uintptr_t>(characters), length));
} }
ALWAYS_INLINE StringView(ReadonlyBytes bytes) ALWAYS_INLINE StringView(ReadonlyBytes bytes)
: m_characters(reinterpret_cast<char const*>(bytes.data())) : m_characters(reinterpret_cast<char const*>(bytes.data()))

View File

@ -59,7 +59,7 @@ struct Traits<T> : public GenericTraits<T> {
template<typename T> template<typename T>
requires(IsPointer<T> && !Detail::IsPointerOfType<char, T>) struct Traits<T> : public GenericTraits<T> { requires(IsPointer<T> && !Detail::IsPointerOfType<char, T>) struct Traits<T> : public GenericTraits<T> {
static unsigned hash(T p) { return ptr_hash((FlatPtr)p); } static unsigned hash(T p) { return ptr_hash(p); }
static constexpr bool is_trivial() { return true; } static constexpr bool is_trivial() { return true; }
}; };

View File

@ -97,9 +97,9 @@ static constexpr FlatPtr explode_byte(u8 b)
return value << 56 | value << 48 | value << 40 | value << 32 | value << 24 | value << 16 | value << 8 | value; return value << 56 | value << 48 | value << 40 | value << 32 | value << 24 | value << 16 | value << 8 | value;
} }
static_assert(explode_byte(0xff) == (FlatPtr)0xffffffffffffffffull); static_assert(explode_byte(0xff) == static_cast<FlatPtr>(0xffffffffffffffffull));
static_assert(explode_byte(0x80) == (FlatPtr)0x8080808080808080ull); static_assert(explode_byte(0x80) == static_cast<FlatPtr>(0x8080808080808080ull));
static_assert(explode_byte(0x7f) == (FlatPtr)0x7f7f7f7f7f7f7f7full); static_assert(explode_byte(0x7f) == static_cast<FlatPtr>(0x7f7f7f7f7f7f7f7full));
static_assert(explode_byte(0) == 0); static_assert(explode_byte(0) == 0);
constexpr size_t align_up_to(const size_t value, const size_t alignment) constexpr size_t align_up_to(const size_t value, const size_t alignment)

View File

@ -16,22 +16,22 @@ template<typename Callback>
[[nodiscard]] constexpr int code_point_to_utf8(u32 code_point, Callback callback) [[nodiscard]] constexpr int code_point_to_utf8(u32 code_point, Callback callback)
{ {
if (code_point <= 0x7f) { if (code_point <= 0x7f) {
callback((char)code_point); callback(static_cast<char>(code_point));
return 1; return 1;
} else if (code_point <= 0x07ff) { } else if (code_point <= 0x07ff) {
callback((char)(((code_point >> 6) & 0x1f) | 0xc0)); callback(static_cast<char>(((code_point >> 6) & 0x1f) | 0xc0));
callback((char)(((code_point >> 0) & 0x3f) | 0x80)); callback(static_cast<char>(((code_point >> 0) & 0x3f) | 0x80));
return 2; return 2;
} else if (code_point <= 0xffff) { } else if (code_point <= 0xffff) {
callback((char)(((code_point >> 12) & 0x0f) | 0xe0)); callback(static_cast<char>(((code_point >> 12) & 0x0f) | 0xe0));
callback((char)(((code_point >> 6) & 0x3f) | 0x80)); callback(static_cast<char>(((code_point >> 6) & 0x3f) | 0x80));
callback((char)(((code_point >> 0) & 0x3f) | 0x80)); callback(static_cast<char>(((code_point >> 0) & 0x3f) | 0x80));
return 3; return 3;
} else if (code_point <= 0x10ffff) { } else if (code_point <= 0x10ffff) {
callback((char)(((code_point >> 18) & 0x07) | 0xf0)); callback(static_cast<char>(((code_point >> 18) & 0x07) | 0xf0));
callback((char)(((code_point >> 12) & 0x3f) | 0x80)); callback(static_cast<char>(((code_point >> 12) & 0x3f) | 0x80));
callback((char)(((code_point >> 6) & 0x3f) | 0x80)); callback(static_cast<char>(((code_point >> 6) & 0x3f) | 0x80));
callback((char)(((code_point >> 0) & 0x3f) | 0x80)); callback(static_cast<char>(((code_point >> 0) & 0x3f) | 0x80));
return 4; return 4;
} }
return -1; return -1;

View File

@ -68,7 +68,7 @@ inline Userspace<T> static_ptr_cast(Userspace<U> const& ptr)
#else #else
auto casted_ptr = static_cast<T>(ptr.ptr()); auto casted_ptr = static_cast<T>(ptr.ptr());
#endif #endif
return Userspace<T>((FlatPtr)casted_ptr); return Userspace<T>(reinterpret_cast<FlatPtr>(casted_ptr));
} }
} }

View File

@ -158,7 +158,7 @@ public:
private: private:
friend class Utf8CodePointIterator; friend class Utf8CodePointIterator;
u8 const* begin_ptr() const { return (u8 const*)m_string.characters_without_null_termination(); } u8 const* begin_ptr() const { return reinterpret_cast<u8 const*>(m_string.characters_without_null_termination()); }
u8 const* end_ptr() const { return begin_ptr() + m_string.length(); } u8 const* end_ptr() const { return begin_ptr() + m_string.length(); }
size_t calculate_length() const; size_t calculate_length() const;