Skip to content

Commit

Permalink
Deprecate SharedMemoryRegion memory(), replace it with data() and spans
Browse files Browse the repository at this point in the history
The accessor returns an unbounded void* which is dangerous to use.
Callers should instead use GetMemoryAsSpan() or GetMemoryAs(). To ease
this we introduce `uint8_t* data()` which allows SharedMemoryMappings
to convert implicitly or explicitly to base::span<uint8_t>.

We enable unsafe-buffer-usage warning in the shared memory unit tests,
which mostly required changing tests to use span apis instead of
working with the unbounded pointer accessor. The span apis
return the same pointer but with a length attached.

Other code is changed to span(mapping), GetMemoryAsSpan() or
GetMemoryAs(). These require the types being pulled out of shared
memory are trivially copyable. However a couple classes used in this
way in devices were _not_ trivially copyable. This can cause UB.
These classes wanted to be trivially copyable but could not be
because of the out-of-line ctor requirements of the chromium clang
plugin. So we template these and use a type alias to avoid rewriting
1000 LOC with useless template arguments. This works around the clang
plugin for now.

[email protected]

Bug: 40284755, 355003174
Change-Id: Id7fd4c9bcf0b86b8109134c18740453bb24aa5de
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5734584
Reviewed-by: James Cook <[email protected]>
Reviewed-by: Łukasz Anforowicz <[email protected]>
Reviewed-by: Khushal Sagar <[email protected]>
Reviewed-by: Daniel Cheng <[email protected]>
Reviewed-by: Alex Moshchuk <[email protected]>
Reviewed-by: Avi Drissman <[email protected]>
Owners-Override: Daniel Cheng <[email protected]>
Commit-Queue: danakj <[email protected]>
Reviewed-by: Reilly Grant <[email protected]>
Code-Coverage: [email protected] <[email protected]>
Reviewed-by: Jonathan Ross <[email protected]>
Cr-Commit-Position: refs/heads/main@{#1333755}
  • Loading branch information
danakj authored and Chromium LUCI CQ committed Jul 26, 2024
1 parent c93a7d8 commit 5a9a4a9
Show file tree
Hide file tree
Showing 64 changed files with 423 additions and 338 deletions.
4 changes: 2 additions & 2 deletions ash/webui/camera_app_ui/camera_app_helper_impl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ void CameraAppHelperImpl::ScanDocumentCorners(
std::move(callback).Run({});
return;
}
memcpy(memory.mapping.memory(), jpeg_data.data(), jpeg_data.size());
base::span(memory.mapping).copy_from(jpeg_data);

// Since |this| owns |document_scanner_service|, and the callback will be
// posted to other sequence with weak pointer of |document_scanner_service|.
Expand Down Expand Up @@ -441,7 +441,7 @@ void CameraAppHelperImpl::ConvertToDocument(
std::move(callback).Run({});
return;
}
memcpy(memory.mapping.memory(), jpeg_data.data(), jpeg_data.size());
base::span(memory.mapping).copy_from(jpeg_data);

// Since |this| owns |document_scanner_service|, and the callback will be
// posted to other sequence with weak pointer of |document_scanner_service|.
Expand Down
16 changes: 10 additions & 6 deletions base/android/library_loader/library_prefetcher_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -29,18 +29,22 @@ TEST(NativeLibraryPrefetcherTest, DISABLED_TestPercentageOfResidentCode) {
ASSERT_TRUE(shared_region.IsValid());
auto mapping = shared_region.Map();
ASSERT_TRUE(mapping.IsValid());
void* address = mapping.memory();
size_t start = reinterpret_cast<size_t>(address);
size_t end = start + length;
// SAFETY: There's no public way to get a span of the full mapped memory size.
// The `mapped_size()` is larger then `size()` but is the actual size of the
// shared memory backing.
span<uint8_t> memory =
UNSAFE_BUFFERS(base::span(mapping.data(), mapping.mapped_size()));
auto start = reinterpret_cast<uintptr_t>(&*memory.begin());
auto end = reinterpret_cast<uintptr_t>(&*memory.end());

// Remove everything.
ASSERT_EQ(0, madvise(address, length, MADV_DONTNEED));
ASSERT_EQ(0, madvise(memory.data(), memory.size(), MADV_DONTNEED));
EXPECT_EQ(0, NativeLibraryPrefetcher::PercentageOfResidentCode(start, end));

// Get everything back.
ASSERT_EQ(0, mlock(address, length));
ASSERT_EQ(0, mlock(memory.data(), memory.size()));
EXPECT_EQ(100, NativeLibraryPrefetcher::PercentageOfResidentCode(start, end));
munlock(address, length);
munlock(memory.data(), memory.size());
}
#endif // !defined(ADDRESS_SANITIZER)

Expand Down
2 changes: 1 addition & 1 deletion base/memory/discardable_shared_memory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ struct SharedState {
SharedState* SharedStateFromSharedMemory(
const WritableSharedMemoryMapping& shared_memory) {
DCHECK(shared_memory.IsValid());
return static_cast<SharedState*>(shared_memory.memory());
return shared_memory.GetMemoryAs<SharedState>();
}

// Round up |size| to a multiple of page size.
Expand Down
14 changes: 10 additions & 4 deletions base/memory/platform_shared_memory_region_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -308,11 +308,17 @@ TEST_F(PlatformSharedMemoryRegionTest, MappingProtectionSetCorrectly) {
ASSERT_TRUE(region.ConvertToReadOnly());
WritableSharedMemoryMapping ro_mapping = MapForTesting(&region);
ASSERT_TRUE(ro_mapping.IsValid());
CheckReadOnlyMapProtection(ro_mapping.memory());
CheckReadOnlyMapProtection(ro_mapping.data());

EXPECT_FALSE(TryToRestoreWritablePermissions(ro_mapping.memory(),
ro_mapping.mapped_size()));
CheckReadOnlyMapProtection(ro_mapping.memory());
// SAFETY: There's no public way to get a span of the full mapped memory size.
// The `mapped_size()` is larger then `size()` but is the actual size of the
// shared memory backing.
auto full_map_mem =
UNSAFE_BUFFERS(span(ro_mapping.data(), ro_mapping.mapped_size()));
EXPECT_FALSE(TryToRestoreWritablePermissions(full_map_mem.data(),
full_map_mem.size()));

CheckReadOnlyMapProtection(ro_mapping.data());
}

// Tests that platform handle permissions are checked correctly.
Expand Down
2 changes: 1 addition & 1 deletion base/memory/read_only_shared_memory_region.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MappedReadOnlyRegion ReadOnlySharedMemoryRegion::Create(
WritableSharedMemoryMapping mapping(result.value(), size, handle.GetGUID(),
mapper);
#if BUILDFLAG(IS_MAC)
handle.ConvertToReadOnly(mapping.memory());
handle.ConvertToReadOnly(mapping.data());
#else
handle.ConvertToReadOnly();
#endif // BUILDFLAG(IS_MAC)
Expand Down
25 changes: 23 additions & 2 deletions base/memory/shared_memory_mapping.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ class BASE_EXPORT SharedMemoryMapping {
size_t size,
const UnguessableToken& guid,
SharedMemoryMapper* mapper);

// Returns a span over the full mapped memory.
span<uint8_t> mapped_memory() const { return mapped_span_; }

Expand Down Expand Up @@ -107,7 +108,17 @@ class BASE_EXPORT ReadOnlySharedMemoryMapping : public SharedMemoryMapping {

// Returns the base address of the read-only mapping. Returns nullptr for
// invalid instances.
const void* memory() const { return mapped_memory().data(); }
//
// Use `span(mapping)` to make a span of `uint8_t`, `GetMemoryAs<T>()` to
// access the memory as a single `T` or `GetMemoryAsSpan<T>()` to access it as
// an array of `T`.
const uint8_t* data() const { return mapped_memory().data(); }

// TODO(crbug.com/355451178): Deprecated. Use `span(mapping)` to make a span
// of `uint8_t`, `GetMemoryAs<T>()` to access the memory as a single `T` or
// `GetMemoryAsSpan<T>()` to access it as an array of `T`, or `data()` for an
// unbounded pointer.
const void* memory() const { return data(); }

// Returns a pointer to a page-aligned const T if the mapping is valid and
// large enough to contain a T, or nullptr otherwise.
Expand Down Expand Up @@ -187,7 +198,17 @@ class BASE_EXPORT WritableSharedMemoryMapping : public SharedMemoryMapping {

// Returns the base address of the writable mapping. Returns nullptr for
// invalid instances.
void* memory() const { return mapped_memory().data(); }
//
// Use `span(mapping)` to make a span of `uint8_t`, `GetMemoryAs<T>()` to
// access the memory as a single `T` or `GetMemoryAsSpan<T>()` to access it as
// an array of `T`.
uint8_t* data() const { return mapped_memory().data(); }

// TODO(crbug.com/355451178): Deprecated. Use `span(mapping)` to make a span
// of `uint8_t`, `GetMemoryAs<T>()` to access the memory as a single `T`, or
// `GetMemoryAsSpan<T>()` to access it as an array of `T` or `data()` for an
// unbounded pointer.
void* memory() const { return data(); }

// Returns a pointer to a page-aligned T if the mapping is valid and large
// enough to contain a T, or nullptr otherwise.
Expand Down
93 changes: 49 additions & 44 deletions base/memory/shared_memory_region_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifdef UNSAFE_BUFFERS_BUILD
// TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
#pragma allow_unsafe_buffers
#endif

#include <algorithm>
#include <utility>

#include "base/containers/span.h"
#include "base/memory/platform_shared_memory_region.h"
#include "base/memory/read_only_shared_memory_region.h"
#include "base/memory/unsafe_shared_memory_region.h"
Expand All @@ -22,14 +19,12 @@ namespace base {

const size_t kRegionSize = 1024;

bool IsMemoryFilledWithByte(const void* memory, size_t size, char byte) {
const char* start_ptr = static_cast<const char*>(memory);
const char* end_ptr = start_ptr + size;
for (const char* ptr = start_ptr; ptr < end_ptr; ++ptr) {
if (*ptr != byte)
bool IsMemoryFilledWithByte(span<const uint8_t> memory, uint8_t byte) {
for (uint8_t c : memory) {
if (c != byte) {
return false;
}
}

return true;
}

Expand All @@ -41,8 +36,10 @@ class SharedMemoryRegionTest : public ::testing::Test {
CreateMappedRegion<SharedMemoryRegionType>(kRegionSize);
ASSERT_TRUE(region_.IsValid());
ASSERT_TRUE(rw_mapping_.IsValid());
memset(rw_mapping_.memory(), 'G', kRegionSize);
EXPECT_TRUE(IsMemoryFilledWithByte(rw_mapping_.memory(), kRegionSize, 'G'));
span<uint8_t> mapped = base::span(rw_mapping_);
EXPECT_EQ(mapped.size(), kRegionSize);
std::ranges::fill(mapped, uint8_t{'G'});
EXPECT_TRUE(IsMemoryFilledWithByte(mapped, 'G'));
}

protected:
Expand Down Expand Up @@ -72,51 +69,46 @@ TYPED_TEST(SharedMemoryRegionTest, MoveRegion) {
// Check that moved region maps correctly.
typename TypeParam::MappingType mapping = moved_region.Map();
ASSERT_TRUE(mapping.IsValid());
EXPECT_NE(this->rw_mapping_.memory(), mapping.memory());
EXPECT_EQ(memcmp(this->rw_mapping_.memory(), mapping.memory(), kRegionSize),
0);
EXPECT_NE(this->rw_mapping_.data(), mapping.data());
EXPECT_EQ(base::span(this->rw_mapping_), base::span(mapping));

// Verify that the second mapping reflects changes in the first.
memset(this->rw_mapping_.memory(), '#', kRegionSize);
EXPECT_EQ(memcmp(this->rw_mapping_.memory(), mapping.memory(), kRegionSize),
0);
std::ranges::fill(base::span(this->rw_mapping_), '#');
EXPECT_EQ(base::span(this->rw_mapping_), base::span(mapping));
}

TYPED_TEST(SharedMemoryRegionTest, MappingValidAfterClose) {
// Check the mapping is still valid after the region is closed.
this->region_ = TypeParam();
EXPECT_FALSE(this->region_.IsValid());
ASSERT_TRUE(this->rw_mapping_.IsValid());
EXPECT_TRUE(
IsMemoryFilledWithByte(this->rw_mapping_.memory(), kRegionSize, 'G'));
EXPECT_TRUE(IsMemoryFilledWithByte(base::span(this->rw_mapping_), 'G'));
}

TYPED_TEST(SharedMemoryRegionTest, MapTwice) {
// The second mapping is either writable or read-only.
typename TypeParam::MappingType mapping = this->region_.Map();
ASSERT_TRUE(mapping.IsValid());
EXPECT_NE(this->rw_mapping_.memory(), mapping.memory());
EXPECT_EQ(memcmp(this->rw_mapping_.memory(), mapping.memory(), kRegionSize),
0);
EXPECT_NE(this->rw_mapping_.data(), mapping.data());
EXPECT_EQ(base::span(this->rw_mapping_), base::span(mapping));

// Verify that the second mapping reflects changes in the first.
memset(this->rw_mapping_.memory(), '#', kRegionSize);
EXPECT_EQ(memcmp(this->rw_mapping_.memory(), mapping.memory(), kRegionSize),
0);
std::ranges::fill(base::span(this->rw_mapping_), '#');
EXPECT_EQ(base::span(this->rw_mapping_), base::span(mapping));

// Close the region and unmap the first memory segment, verify the second
// still has the right data.
this->region_ = TypeParam();
this->rw_mapping_ = WritableSharedMemoryMapping();
EXPECT_TRUE(IsMemoryFilledWithByte(mapping.memory(), kRegionSize, '#'));
EXPECT_TRUE(IsMemoryFilledWithByte(base::span(mapping), '#'));
}

TYPED_TEST(SharedMemoryRegionTest, MapUnmapMap) {
this->rw_mapping_ = WritableSharedMemoryMapping();

typename TypeParam::MappingType mapping = this->region_.Map();
ASSERT_TRUE(mapping.IsValid());
EXPECT_TRUE(IsMemoryFilledWithByte(mapping.memory(), kRegionSize, 'G'));
EXPECT_TRUE(IsMemoryFilledWithByte(base::span(mapping), 'G'));
}

TYPED_TEST(SharedMemoryRegionTest, SerializeAndDeserialize) {
Expand All @@ -128,20 +120,19 @@ TYPED_TEST(SharedMemoryRegionTest, SerializeAndDeserialize) {
EXPECT_FALSE(this->region_.IsValid());
typename TypeParam::MappingType mapping = region.Map();
ASSERT_TRUE(mapping.IsValid());
EXPECT_TRUE(IsMemoryFilledWithByte(mapping.memory(), kRegionSize, 'G'));
EXPECT_TRUE(IsMemoryFilledWithByte(base::span(mapping), 'G'));

// Verify that the second mapping reflects changes in the first.
memset(this->rw_mapping_.memory(), '#', kRegionSize);
EXPECT_EQ(memcmp(this->rw_mapping_.memory(), mapping.memory(), kRegionSize),
0);
std::ranges::fill(base::span(this->rw_mapping_), '#');
EXPECT_EQ(base::span(this->rw_mapping_), base::span(mapping));
}

// Map() will return addresses which are aligned to the platform page size, this
// varies from platform to platform though. Since we'd like to advertise a
// minimum alignment that callers can count on, test for it here.
TYPED_TEST(SharedMemoryRegionTest, MapMinimumAlignment) {
EXPECT_EQ(0U,
reinterpret_cast<uintptr_t>(this->rw_mapping_.memory()) &
reinterpret_cast<uintptr_t>(this->rw_mapping_.data()) &
(subtle::PlatformSharedMemoryRegion::kMapMinimumAlignment - 1));
}

Expand All @@ -165,10 +156,10 @@ TYPED_TEST(SharedMemoryRegionTest, MapAt) {
auto [region, rw_mapping] = CreateMappedRegion<TypeParam>(kDataSize);
ASSERT_TRUE(region.IsValid());
ASSERT_TRUE(rw_mapping.IsValid());
uint32_t* ptr = static_cast<uint32_t*>(rw_mapping.memory());
auto map = rw_mapping.template GetMemoryAsSpan<uint32_t>();

for (size_t i = 0; i < kCount; ++i)
ptr[i] = i;
map[i] = i;

rw_mapping = WritableSharedMemoryMapping();

Expand All @@ -179,9 +170,9 @@ TYPED_TEST(SharedMemoryRegionTest, MapAt) {
ASSERT_TRUE(mapping.IsValid());

size_t int_offset = bytes_offset / sizeof(uint32_t);
const uint32_t* ptr2 = static_cast<const uint32_t*>(mapping.memory());
auto map2 = mapping.template GetMemoryAsSpan<uint32_t>();
for (size_t i = int_offset; i < kCount; ++i) {
EXPECT_EQ(ptr2[i - int_offset], i);
EXPECT_EQ(map2[i - int_offset], i);
}
}
}
Expand Down Expand Up @@ -211,9 +202,9 @@ TYPED_TEST(DuplicatableSharedMemoryRegionTest, Duplicate) {
EXPECT_EQ(this->region_.GetGUID(), dup_region.GetGUID());
typename TypeParam::MappingType mapping = dup_region.Map();
ASSERT_TRUE(mapping.IsValid());
EXPECT_NE(this->rw_mapping_.memory(), mapping.memory());
EXPECT_NE(this->rw_mapping_.data(), mapping.data());
EXPECT_EQ(this->rw_mapping_.guid(), mapping.guid());
EXPECT_TRUE(IsMemoryFilledWithByte(mapping.memory(), kRegionSize, 'G'));
EXPECT_TRUE(IsMemoryFilledWithByte(base::span(mapping), 'G'));
}

class ReadOnlySharedMemoryRegionTest : public ::testing::Test {
Expand Down Expand Up @@ -260,8 +251,15 @@ TEST_F(ReadOnlySharedMemoryRegionTest,
ASSERT_TRUE(region.IsValid());
ReadOnlySharedMemoryMapping mapping = region.Map();
ASSERT_TRUE(mapping.IsValid());
void* memory_ptr = const_cast<void*>(mapping.memory());
EXPECT_DEATH_IF_SUPPORTED(memset(memory_ptr, 'G', kRegionSize), "");
base::span<const uint8_t> mem(mapping);
base::span<uint8_t> mut_mem =
// SAFETY: The data() and size() from `mem` produce a valid span as they
// come from another span of the same type (modulo const). Const-casting
// is not Undefined Behaviour here as it's not pointing to a const object.
// We're testing that we crash if writing to a ReadOnly shared memory
// backing.
UNSAFE_BUFFERS(base::span(const_cast<uint8_t*>(mem.data()), mem.size()));
EXPECT_DEATH_IF_SUPPORTED(std::ranges::fill(mut_mem, 'G'), "");
}

TEST_F(ReadOnlySharedMemoryRegionTest,
Expand All @@ -270,8 +268,15 @@ TEST_F(ReadOnlySharedMemoryRegionTest,
ASSERT_TRUE(region.IsValid());
ReadOnlySharedMemoryMapping mapping = region.Map();
ASSERT_TRUE(mapping.IsValid());
void* memory_ptr = const_cast<void*>(mapping.memory());
EXPECT_DEATH_IF_SUPPORTED(memset(memory_ptr, 'G', kRegionSize), "");
base::span<const uint8_t> mem(mapping);
base::span<uint8_t> mut_mem =
// SAFETY: The data() and size() from `mem` produce a valid span as they
// come from another span of the same type (modulo const). Const-casting
// is not Undefined Behaviour here as it's not pointing to a const object.
// We're testing that we crash if writing to a ReadOnly shared memory
// backing.
UNSAFE_BUFFERS(base::span(const_cast<uint8_t*>(mem.data()), mem.size()));
EXPECT_DEATH_IF_SUPPORTED(std::ranges::fill(mut_mem, 'G'), "");
}

} // namespace base
Loading

0 comments on commit 5a9a4a9

Please sign in to comment.