From 8fde834fae463f3ee26fe09b0b00a7177c7a1b1e Mon Sep 17 00:00:00 2001 From: Mark Rowe Date: Sun, 24 Nov 2024 16:54:16 -0800 Subject: [PATCH 1/4] [SharedCache] Split view-specific state into a separate struct The existing view-specific state was stored in several global unordered maps. Many of these were accessed without locking, including `viewSpecificMutexes`, which is racy in the face of multiple threads. View-specific state is stored in a new heap-allocated `ViewSpecificState` struct that is reference counted via `std::shared_ptr`. A static map holds a `std::weak_ptr` to each view-specific state, keyed by session id. `SharedCache` retrieves its view-specific state during its constructor. Since `ViewSpecificState` is reference counted it will naturally be deallocated when the last `SharedCache` instance that references it goes away. Its corresponding entry will remain in the static map, though since it only holds a `std::weak_ptr` rather than any state it will not use much memory. The next time view-specific state is retrieved any expired entries will be removed from the map. --- view/sharedcache/core/SharedCache.cpp | 168 ++++++++++++++------------ view/sharedcache/core/SharedCache.h | 3 + 2 files changed, 95 insertions(+), 76 deletions(-) diff --git a/view/sharedcache/core/SharedCache.cpp b/view/sharedcache/core/SharedCache.cpp index a7c7ffb01..c0e320bd8 100644 --- a/view/sharedcache/core/SharedCache.cpp +++ b/view/sharedcache/core/SharedCache.cpp @@ -27,6 +27,8 @@ #include "SharedCache.h" #include "ObjC.h" #include +#include +#include #include #include #include @@ -75,19 +77,51 @@ struct ViewStateCacheStore { std::unordered_map>>> m_symbolInfos; }; -static std::recursive_mutex viewStateMutex; -static std::unordered_map viewStateCache; +struct SharedCache::ViewSpecificState { + std::mutex typeLibraryMutex; + std::mutex viewOperationsThatInfluenceMetadataMutex; -std::mutex progressMutex; -std::unordered_map progressMap; + std::atomic progress; -struct ViewSpecificMutexes { - std::mutex viewOperationsThatInfluenceMetadataMutex; - std::mutex typeLibraryLookupAndApplicationMutex; + std::mutex stateMutex; + std::optional state; }; -static std::unordered_map viewSpecificMutexes; +std::shared_ptr ViewSpecificStateForId(uint64_t viewIdentifier, bool insertIfNeeded = true) { + static std::mutex viewSpecificStateMutex; + static std::unordered_map> viewSpecificState; + + std::lock_guard lock(viewSpecificStateMutex); + + if (auto it = viewSpecificState.find(viewIdentifier); it != viewSpecificState.end()) { + if (auto statePtr = it->second.lock()) { + return statePtr; + } + } + + if (!insertIfNeeded) { + return nullptr; + } + + auto statePtr = std::make_shared(); + viewSpecificState[viewIdentifier] = statePtr; + + // Prune entries for any views that are no longer in use. + for (auto it = viewSpecificState.begin(); it != viewSpecificState.end(); ) { + if (it->second.expired()) { + it = viewSpecificState.erase(it); + } else { + ++it; + } + } + + return statePtr; +} + +std::shared_ptr ViewSpecificStateForView(Ref view) { + return ViewSpecificStateForId(view->GetFile()->GetSessionId()); +} std::string base_name(std::string const& path) { @@ -217,9 +251,7 @@ void SharedCache::PerformInitialLoad() auto path = m_dscView->GetFile()->GetOriginalFilename(); auto baseFile = MMappedFileAccessor::Open(m_dscView, m_dscView->GetFile()->GetSessionId(), path)->lock(); - progressMutex.lock(); - progressMap[m_dscView->GetFile()->GetSessionId()] = LoadProgressLoadingCaches; - progressMutex.unlock(); + m_viewSpecificState->progress = LoadProgressLoadingCaches; m_baseFilePath = path; @@ -729,9 +761,8 @@ void SharedCache::PerformInitialLoad() } } baseFile.reset(); - progressMutex.lock(); - progressMap[m_dscView->GetFile()->GetSessionId()] = LoadProgressLoadingImages; - progressMutex.unlock(); + + m_viewSpecificState->progress = LoadProgressLoadingImages; // We have set up enough metadata to map VM now. @@ -948,9 +979,7 @@ void SharedCache::PerformInitialLoad() m_logger->LogDebug("Finished initial load of Shared Cache"); - progressMutex.lock(); - progressMap[m_dscView->GetFile()->GetSessionId()] = LoadProgressFinished; - progressMutex.unlock(); + m_viewSpecificState->progress = LoadProgressFinished; } std::shared_ptr SharedCache::GetVMMap(bool mapPages) @@ -979,10 +1008,10 @@ void SharedCache::DeserializeFromRawView() { if (m_dscView->QueryMetadata(SharedCacheMetadataTag)) { - std::unique_lock viewStateCacheLock(viewStateMutex); - if (viewStateCache.find(m_dscView->GetFile()->GetSessionId()) != viewStateCache.end()) + std::lock_guard lock(m_viewSpecificState->stateMutex); + if (m_viewSpecificState->state.has_value()) { - auto c = viewStateCache[m_dscView->GetFile()->GetSessionId()]; + const auto& c = *m_viewSpecificState->state; m_imageStarts = c.m_imageStarts; m_cacheFormat = c.m_cacheFormat; m_backingCaches = c.m_backingCaches; @@ -1351,7 +1380,7 @@ void SharedCache::ParseAndApplySlideInfoForFile(std::shared_ptr dscView) : m_dscView(dscView) +SharedCache::SharedCache(BinaryNinja::Ref dscView) : m_dscView(dscView), m_viewSpecificState(ViewSpecificStateForView(dscView)) { if (dscView->GetTypeName() != VIEW_NAME) { @@ -1365,53 +1394,44 @@ SharedCache::SharedCache(BinaryNinja::Ref dscView) : m_ DeserializeFromRawView(); if (!m_metadataValid) return; - if (m_viewState == DSCViewStateUnloaded) + if (m_viewState != DSCViewStateUnloaded) { + m_viewSpecificState->progress = LoadProgressFinished; + return; + } + + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); + try { + PerformInitialLoad(); + } + catch (...) { - if (m_viewState == DSCViewStateUnloaded) - { - std::unique_lock lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); - try { - PerformInitialLoad(); - } - catch (...) - { - m_logger->LogError("Failed to perform initial load of Shared Cache"); - } + m_logger->LogError("Failed to perform initial load of Shared Cache"); + } - auto settings = m_dscView->GetLoadSettings(VIEW_NAME); - bool autoLoadLibsystem = true; - if (settings && settings->Contains("loader.dsc.autoLoadLibSystem")) - { - autoLoadLibsystem = settings->Get("loader.dsc.autoLoadLibSystem", m_dscView); - } - if (autoLoadLibsystem) + auto settings = m_dscView->GetLoadSettings(VIEW_NAME); + bool autoLoadLibsystem = true; + if (settings && settings->Contains("loader.dsc.autoLoadLibSystem")) + { + autoLoadLibsystem = settings->Get("loader.dsc.autoLoadLibSystem", m_dscView); + } + if (autoLoadLibsystem) + { + for (const auto& [_, header] : m_headers) + { + if (header.installName.find("libsystem_c.dylib") != std::string::npos) { - for (const auto& [_, header] : m_headers) - { - if (header.installName.find("libsystem_c.dylib") != std::string::npos) - { - lock.unlock(); - m_logger->LogInfo("Loading core libsystem_c.dylib library"); - LoadImageWithInstallName(header.installName); - lock.lock(); - break; - } - } + lock.unlock(); + m_logger->LogInfo("Loading core libsystem_c.dylib library"); + LoadImageWithInstallName(header.installName); + break; } - m_viewState = DSCViewStateLoaded; - SaveToDSCView(); } } - else - { - progressMutex.lock(); - progressMap[m_dscView->GetFile()->GetSessionId()] = LoadProgressFinished; - progressMutex.unlock(); - } + m_viewState = DSCViewStateLoaded; + SaveToDSCView(); } SharedCache::~SharedCache() { - std::unique_lock lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); sharedCacheReferences--; } @@ -1524,7 +1544,7 @@ bool SharedCache::LoadImageContainingAddress(uint64_t address) bool SharedCache::LoadSectionAtAddress(uint64_t address) { - std::unique_lock lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); DeserializeFromRawView(); auto vm = GetVMMap(); if (!vm) @@ -1717,7 +1737,7 @@ bool SharedCache::LoadImageWithInstallName(std::string installName) { auto settings = m_dscView->GetLoadSettings(VIEW_NAME); - std::unique_lock lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); DeserializeFromRawView(); m_logger->LogInfo("Loading image %s", installName.c_str()); @@ -1785,7 +1805,7 @@ bool SharedCache::LoadImageWithInstallName(std::string installName) return false; } - std::unique_lock typelibLock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].typeLibraryLookupAndApplicationMutex); + std::unique_lock typelibLock(m_viewSpecificState->typeLibraryMutex); auto typeLib = m_dscView->GetTypeLibrary(header.installName); if (!typeLib) @@ -2792,7 +2812,7 @@ std::vector SharedCache::GetAvailableImages() std::vector>> SharedCache::LoadAllSymbolsAndWait() { - std::unique_lock initialLoadBlock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); + std::unique_lock initialLoadBlock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); std::vector>> symbols; for (const auto& img : m_images) @@ -2885,7 +2905,7 @@ void SharedCache::FindSymbolAtAddrAndApplyToAddr(uint64_t symbolLocation, uint64 } auto exportList = SharedCache::ParseExportTrie(mapping, *header); std::vector>> exportMapping; - std::unique_lock lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].typeLibraryLookupAndApplicationMutex); + std::unique_lock lock(m_viewSpecificState->typeLibraryMutex); auto typeLib = m_dscView->GetTypeLibrary(header->installName); if (!typeLib) { @@ -2932,7 +2952,7 @@ void SharedCache::FindSymbolAtAddrAndApplyToAddr(uint64_t symbolLocation, uint64 } } { - std::unique_lock _lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); + std::lock_guard lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); m_exportInfos[header->textBase] = exportMapping; } m_dscView->EndBulkModifySymbols(); @@ -2948,7 +2968,7 @@ bool SharedCache::SaveToDSCView() auto data = AsMetadata(); m_dscView->StoreMetadata(SharedCacheMetadataTag, data); m_dscView->GetParentView()->GetParentView()->StoreMetadata(SharedCacheMetadataTag, data); - std::unique_lock viewStateCacheLock(viewStateMutex); + std::lock_guard lock(m_viewSpecificState->stateMutex); ViewStateCacheStore c; c.m_imageStarts = m_imageStarts; c.m_cacheFormat = m_cacheFormat; @@ -2963,7 +2983,7 @@ bool SharedCache::SaveToDSCView() c.m_baseFilePath = m_baseFilePath; c.m_exportInfos = m_exportInfos; c.m_symbolInfos = m_symbolInfos; - viewStateCache[m_dscView->GetFile()->GetSessionId()] = c; + m_viewSpecificState->state = std::move(c); m_metadataValid = true; @@ -2973,7 +2993,7 @@ bool SharedCache::SaveToDSCView() } std::vector SharedCache::GetMappedRegions() const { - std::unique_lock lock(viewSpecificMutexes[m_dscView->GetFile()->GetSessionId()].viewOperationsThatInfluenceMetadataMutex); + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); return m_regionsMappedIntoMemory; } @@ -3287,15 +3307,11 @@ extern "C" BNDSCViewLoadProgress BNDSCViewGetLoadProgress(uint64_t sessionID) { - progressMutex.lock(); - if (progressMap.find(sessionID) == progressMap.end()) - { - progressMutex.unlock(); - return LoadProgressNotStarted; + if (auto viewSpecificState = ViewSpecificStateForId(sessionID, false)) { + return viewSpecificState->progress; } - auto progress = progressMap[sessionID]; - progressMutex.unlock(); - return progress; + + return LoadProgressNotStarted; } uint64_t BNDSCViewFastGetBackingCacheCount(BNBinaryView* data) diff --git a/view/sharedcache/core/SharedCache.h b/view/sharedcache/core/SharedCache.h index 81c026203..c8dc2d447 100644 --- a/view/sharedcache/core/SharedCache.h +++ b/view/sharedcache/core/SharedCache.h @@ -1108,6 +1108,8 @@ namespace SharedCacheCore { m_metadataValid = true; } + struct ViewSpecificState; + private: Ref m_logger; /* VIEW STATE BEGIN -- SERIALIZE ALL OF THIS AND STORE IT IN RAW VIEW */ @@ -1140,6 +1142,7 @@ namespace SharedCacheCore { std::vector m_nonImageRegions; /* VIEWSTATE END -- NOTHING PAST THIS IS SERIALIZED */ + std::shared_ptr m_viewSpecificState; /* API VIEW START */ BinaryNinja::Ref m_dscView; From 0199e4d7f92c6b356d5d638d81fa3b5cdef4fd54 Mon Sep 17 00:00:00 2001 From: Mark Rowe Date: Sun, 24 Nov 2024 16:55:39 -0800 Subject: [PATCH 2/4] [SharedCache] Cache type libraries in the view-specific state They're surprisingly expensive to look up. --- view/sharedcache/core/SharedCache.cpp | 49 ++++++++++++--------------- view/sharedcache/core/SharedCache.h | 3 ++ 2 files changed, 25 insertions(+), 27 deletions(-) diff --git a/view/sharedcache/core/SharedCache.cpp b/view/sharedcache/core/SharedCache.cpp index c0e320bd8..cb6a697b6 100644 --- a/view/sharedcache/core/SharedCache.cpp +++ b/view/sharedcache/core/SharedCache.cpp @@ -79,6 +79,8 @@ struct ViewStateCacheStore { struct SharedCache::ViewSpecificState { std::mutex typeLibraryMutex; + std::unordered_map> typeLibraries; + std::mutex viewOperationsThatInfluenceMetadataMutex; std::atomic progress; @@ -1805,21 +1807,7 @@ bool SharedCache::LoadImageWithInstallName(std::string installName) return false; } - std::unique_lock typelibLock(m_viewSpecificState->typeLibraryMutex); - auto typeLib = m_dscView->GetTypeLibrary(header.installName); - - if (!typeLib) - { - auto typeLibs = m_dscView->GetDefaultPlatform()->GetTypeLibrariesByName(header.installName); - if (!typeLibs.empty()) - { - typeLib = typeLibs[0]; - m_dscView->AddTypeLibrary(typeLib); - m_logger->LogInfo("shared-cache: adding type library for '%s': %s (%s)", - targetImage->installName.c_str(), typeLib->GetName().c_str(), typeLib->GetGuid().c_str()); - } - } - typelibLock.unlock(); + auto typeLib = TypeLibraryForImage(header.installName); SaveToDSCView(); @@ -2864,6 +2852,24 @@ std::string SharedCache::SerializedImageHeaderForName(std::string name) return ""; } +Ref SharedCache::TypeLibraryForImage(const std::string& installName) { + std::lock_guard lock(m_viewSpecificState->typeLibraryMutex); + if (auto it = m_viewSpecificState->typeLibraries.find(installName); it != m_viewSpecificState->typeLibraries.end()) { + return it->second; + } + + auto typeLib = m_dscView->GetTypeLibrary(installName); + if (!typeLib) { + auto typeLibs = m_dscView->GetDefaultPlatform()->GetTypeLibrariesByName(installName); + if (!typeLibs.empty()) { + typeLib = typeLibs[0]; + m_dscView->AddTypeLibrary(typeLib); + } + } + + m_viewSpecificState->typeLibraries[installName] = typeLib; + return typeLib; +} void SharedCache::FindSymbolAtAddrAndApplyToAddr(uint64_t symbolLocation, uint64_t targetLocation, bool triggerReanalysis) { @@ -2905,18 +2911,7 @@ void SharedCache::FindSymbolAtAddrAndApplyToAddr(uint64_t symbolLocation, uint64 } auto exportList = SharedCache::ParseExportTrie(mapping, *header); std::vector>> exportMapping; - std::unique_lock lock(m_viewSpecificState->typeLibraryMutex); - auto typeLib = m_dscView->GetTypeLibrary(header->installName); - if (!typeLib) - { - auto typeLibs = m_dscView->GetDefaultPlatform()->GetTypeLibrariesByName(header->installName); - if (!typeLibs.empty()) - { - typeLib = typeLibs[0]; - m_dscView->AddTypeLibrary(typeLib); - } - } - lock.unlock(); + auto typeLib = TypeLibraryForImage(header->installName); id = m_dscView->BeginUndoActions(); m_dscView->BeginBulkModifySymbols(); for (const auto& sym : exportList) diff --git a/view/sharedcache/core/SharedCache.h b/view/sharedcache/core/SharedCache.h index c8dc2d447..979bde863 100644 --- a/view/sharedcache/core/SharedCache.h +++ b/view/sharedcache/core/SharedCache.h @@ -1191,6 +1191,7 @@ namespace SharedCacheCore { explicit SharedCache(BinaryNinja::Ref rawView); ~SharedCache() override; +private: std::optional LoadHeaderForAddress( std::shared_ptr vm, uint64_t address, std::string installName); void InitializeHeader( @@ -1199,6 +1200,8 @@ namespace SharedCacheCore { const std::string& currentText, size_t cursor, uint32_t endGuard); std::vector> ParseExportTrie( std::shared_ptr linkeditFile, SharedCacheMachOHeader header); + + Ref TypeLibraryForImage(const std::string& installName); }; From e42eb50d88d60b50ac7e474fd9d3d05e0c0799ae Mon Sep 17 00:00:00 2001 From: WeiN76LQh Date: Tue, 26 Nov 2024 14:57:03 +0000 Subject: [PATCH 3/4] [SharedCache] Reduce lock contention in `SharedCache::LoadSectionAtAddress` When loading a number of sections, for instance when an image is loaded, many of the analysis threads bottleneck in `SharedCache::LoadSectionAtAddress` trying to acquire the `viewOperationsThatInfluenceMetadataMutex` lock. This is a very expensive lock to try and acquire as it is used in a lot of places and held for long durations. This commit improves performance in `SharedCache::LoadSectionAtAddress` by not acquiring the `viewOperationsThatInfluenceMetadataMutex` lock until absolutely necessary. I.e. when something actually needs to be loaded. Often what happens is many threads try to load the same sections but ending up queuing up to do this one at a time. The commit adds per memory region locking so that threads only block waiting for the memory region they require to be loaded. In most cases though, if the region is already loaded they won't wait at all because no lock is required to determine if this is the case. --- view/sharedcache/core/SharedCache.cpp | 165 ++++++++++++++++++++++++-- 1 file changed, 152 insertions(+), 13 deletions(-) diff --git a/view/sharedcache/core/SharedCache.cpp b/view/sharedcache/core/SharedCache.cpp index cb6a697b6..fe7694738 100644 --- a/view/sharedcache/core/SharedCache.cpp +++ b/view/sharedcache/core/SharedCache.cpp @@ -87,6 +87,9 @@ struct SharedCache::ViewSpecificState { std::mutex stateMutex; std::optional state; + + std::mutex memoryRegionLoadingMutexesMutex; + std::unordered_map memoryRegionLoadingMutexes; }; @@ -1546,17 +1549,6 @@ bool SharedCache::LoadImageContainingAddress(uint64_t address) bool SharedCache::LoadSectionAtAddress(uint64_t address) { - std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); - DeserializeFromRawView(); - auto vm = GetVMMap(); - if (!vm) - { - m_logger->LogError("Failed to map VM pages for Shared Cache."); - return false; - } - - SharedCacheMachOHeader targetHeader; - CacheImage* targetImage = nullptr; MemoryRegion* targetSegment = nullptr; for (auto& image : m_images) @@ -1565,8 +1557,6 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) { if (region.start <= address && region.start + region.size > address) { - targetHeader = m_headers[image.headerLocation]; - targetImage = ℑ targetSegment = ®ion; break; } @@ -1584,6 +1574,46 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) { return true; } + + // The region appears not to be loaded. Acquire the loading lock, re-check + // that it hasn't been loaded and if it still hasn't then actually load it. + std::unique_lock memoryRegionLoadingLockslock(ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexesMutex); + auto& memoryRegionLoadingMutex = ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexes[stubIsland.start]; + // Now the specific memory region's loading mutex has been retrieved, this one can be dropped + memoryRegionLoadingLockslock.unlock(); + // Hold this lock until loading of the region is done + std::unique_lock memoryRegionLoadingLock(memoryRegionLoadingMutex); + + // Check the latest state to see if the memory region has been loaded while acquiring the lock + { + auto viewSpecificState = ViewSpecificStateForView(m_dscView); + std::unique_lock viewStateCacheLock(viewSpecificState->stateMutex); + + for (auto& cacheStubIsland : viewSpecificState->state->m_stubIslandRegions) + { + if (cacheStubIsland.start <= address && cacheStubIsland.start + cacheStubIsland.size > address) + { + if (cacheStubIsland.loaded) + { + return true; + } + stubIsland = cacheStubIsland; + break; + } + } + } + + // Still not loaded, so load it below + std::unique_lock lock(ViewSpecificStateForView(m_dscView)->viewOperationsThatInfluenceMetadataMutex); + DeserializeFromRawView(); + + auto vm = GetVMMap(); + if (!vm) + { + m_logger->LogError("Failed to map VM pages for Shared Cache."); + return false; + } + m_logger->LogInfo("Loading stub island %s @ 0x%llx", stubIsland.prettyName.c_str(), stubIsland.start); auto targetFile = vm->MappingAtAddress(stubIsland.start).first.fileAccessor->lock(); ParseAndApplySlideInfoForFile(targetFile); @@ -1624,6 +1654,46 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) { return true; } + + // The region appears not to be loaded. Acquire the loading lock, re-check + // that it hasn't been loaded and if it still hasn't then actually load it. + std::unique_lock memoryRegionLoadingLockslock(ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexesMutex); + auto& memoryRegionLoadingMutex = ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexes[dyldData.start]; + // Now the specific memory region's loading mutex has been retrieved, this one can be dropped + memoryRegionLoadingLockslock.unlock(); + // Hold this lock until loading of the region is done + std::unique_lock memoryRegionLoadingLock(memoryRegionLoadingMutex); + + // Check the latest state to see if the memory region has been loaded while acquiring the lock + { + auto viewSpecificState = ViewSpecificStateForView(m_dscView); + std::unique_lock viewStateCacheLock(viewSpecificState->stateMutex); + + for (auto& cacheDyldData : viewSpecificState->state->m_dyldDataRegions) + { + if (cacheDyldData.start <= address && cacheDyldData.start + cacheDyldData.size > address) + { + if (cacheDyldData.loaded) + { + return true; + } + dyldData = cacheDyldData; + break; + } + } + } + + // Still not loaded, so load it below + std::unique_lock lock(ViewSpecificStateForView(m_dscView)->viewOperationsThatInfluenceMetadataMutex); + DeserializeFromRawView(); + + auto vm = GetVMMap(); + if (!vm) + { + m_logger->LogError("Failed to map VM pages for Shared Cache."); + return false; + } + m_logger->LogInfo("Loading dyld data %s", dyldData.prettyName.c_str()); auto targetFile = vm->MappingAtAddress(dyldData.start).first.fileAccessor->lock(); ParseAndApplySlideInfoForFile(targetFile); @@ -1663,6 +1733,46 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) { return true; } + + // The region appears not to be loaded. Acquire the loading lock, re-check + // that it hasn't been loaded and if it still hasn't then actually load it. + std::unique_lock memoryRegionLoadingLockslock(ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexesMutex); + auto& memoryRegionLoadingMutex = ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexes[region.start]; + // Now the specific memory region's loading mutex has been retrieved, this one can be dropped + memoryRegionLoadingLockslock.unlock(); + // Hold this lock until loading of the region is done + std::unique_lock memoryRegionLoadingLock(memoryRegionLoadingMutex); + + // Check the latest state to see if the memory region has been loaded while acquiring the lock + { + auto viewSpecificState = ViewSpecificStateForView(m_dscView); + std::unique_lock viewStateCacheLock(viewSpecificState->stateMutex); + + for (auto& cacheRegion : viewSpecificState->state->m_nonImageRegions) + { + if (cacheRegion.start <= address && cacheRegion.start + cacheRegion.size > address) + { + if (cacheRegion.loaded) + { + return true; + } + region = cacheRegion; + break; + } + } + } + + // Still not loaded, so load it below + std::unique_lock lock(ViewSpecificStateForView(m_dscView)->viewOperationsThatInfluenceMetadataMutex); + DeserializeFromRawView(); + + auto vm = GetVMMap(); + if (!vm) + { + m_logger->LogError("Failed to map VM pages for Shared Cache."); + return false; + } + m_logger->LogInfo("Loading non-image region %s", region.prettyName.c_str()); auto targetFile = vm->MappingAtAddress(region.start).first.fileAccessor->lock(); ParseAndApplySlideInfoForFile(targetFile); @@ -1697,6 +1807,35 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) return false; } + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); + DeserializeFromRawView(); + auto vm = GetVMMap(); + if (!vm) + { + m_logger->LogError("Failed to map VM pages for Shared Cache."); + return false; + } + + SharedCacheMachOHeader targetHeader; + CacheImage* targetImage = nullptr; + targetSegment = nullptr; + + for (auto& image : m_images) + { + for (auto& region : image.regions) + { + if (region.start <= address && region.start + region.size > address) + { + targetHeader = m_headers[image.headerLocation]; + targetImage = ℑ + targetSegment = ®ion; + break; + } + } + if (targetSegment) + break; + } + auto id = m_dscView->BeginUndoActions(); auto rawViewEnd = m_dscView->GetParentView()->GetEnd(); auto reader = VMReader(vm); From 58bf2b588e2d1dcdadb7ffe554b5401261b30429 Mon Sep 17 00:00:00 2001 From: WeiN76LQh Date: Tue, 26 Nov 2024 16:24:53 +0000 Subject: [PATCH 4/4] [SharedCache] Access view specific state directly rather than by a lookup This is just correcting a silly mistake --- view/sharedcache/core/SharedCache.cpp | 30 +++++++++++++-------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/view/sharedcache/core/SharedCache.cpp b/view/sharedcache/core/SharedCache.cpp index fe7694738..fb5ebeec6 100644 --- a/view/sharedcache/core/SharedCache.cpp +++ b/view/sharedcache/core/SharedCache.cpp @@ -1577,8 +1577,8 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) // The region appears not to be loaded. Acquire the loading lock, re-check // that it hasn't been loaded and if it still hasn't then actually load it. - std::unique_lock memoryRegionLoadingLockslock(ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexesMutex); - auto& memoryRegionLoadingMutex = ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexes[stubIsland.start]; + std::unique_lock memoryRegionLoadingLockslock(m_viewSpecificState->memoryRegionLoadingMutexesMutex); + auto& memoryRegionLoadingMutex = m_viewSpecificState->memoryRegionLoadingMutexes[stubIsland.start]; // Now the specific memory region's loading mutex has been retrieved, this one can be dropped memoryRegionLoadingLockslock.unlock(); // Hold this lock until loading of the region is done @@ -1586,10 +1586,9 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) // Check the latest state to see if the memory region has been loaded while acquiring the lock { - auto viewSpecificState = ViewSpecificStateForView(m_dscView); - std::unique_lock viewStateCacheLock(viewSpecificState->stateMutex); + std::unique_lock viewStateCacheLock(m_viewSpecificState->stateMutex); - for (auto& cacheStubIsland : viewSpecificState->state->m_stubIslandRegions) + for (auto& cacheStubIsland : m_viewSpecificState->state->m_stubIslandRegions) { if (cacheStubIsland.start <= address && cacheStubIsland.start + cacheStubIsland.size > address) { @@ -1604,7 +1603,7 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) } // Still not loaded, so load it below - std::unique_lock lock(ViewSpecificStateForView(m_dscView)->viewOperationsThatInfluenceMetadataMutex); + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); DeserializeFromRawView(); auto vm = GetVMMap(); @@ -1657,8 +1656,8 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) // The region appears not to be loaded. Acquire the loading lock, re-check // that it hasn't been loaded and if it still hasn't then actually load it. - std::unique_lock memoryRegionLoadingLockslock(ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexesMutex); - auto& memoryRegionLoadingMutex = ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexes[dyldData.start]; + std::unique_lock memoryRegionLoadingLockslock(m_viewSpecificState->memoryRegionLoadingMutexesMutex); + auto& memoryRegionLoadingMutex = m_viewSpecificState->memoryRegionLoadingMutexes[dyldData.start]; // Now the specific memory region's loading mutex has been retrieved, this one can be dropped memoryRegionLoadingLockslock.unlock(); // Hold this lock until loading of the region is done @@ -1666,10 +1665,9 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) // Check the latest state to see if the memory region has been loaded while acquiring the lock { - auto viewSpecificState = ViewSpecificStateForView(m_dscView); - std::unique_lock viewStateCacheLock(viewSpecificState->stateMutex); + std::unique_lock viewStateCacheLock(m_viewSpecificState->stateMutex); - for (auto& cacheDyldData : viewSpecificState->state->m_dyldDataRegions) + for (auto& cacheDyldData : m_viewSpecificState->state->m_dyldDataRegions) { if (cacheDyldData.start <= address && cacheDyldData.start + cacheDyldData.size > address) { @@ -1684,7 +1682,7 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) } // Still not loaded, so load it below - std::unique_lock lock(ViewSpecificStateForView(m_dscView)->viewOperationsThatInfluenceMetadataMutex); + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); DeserializeFromRawView(); auto vm = GetVMMap(); @@ -1736,8 +1734,8 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) // The region appears not to be loaded. Acquire the loading lock, re-check // that it hasn't been loaded and if it still hasn't then actually load it. - std::unique_lock memoryRegionLoadingLockslock(ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexesMutex); - auto& memoryRegionLoadingMutex = ViewSpecificStateForView(m_dscView)->memoryRegionLoadingMutexes[region.start]; + std::unique_lock memoryRegionLoadingLockslock(m_viewSpecificState->memoryRegionLoadingMutexesMutex); + auto& memoryRegionLoadingMutex = m_viewSpecificState->memoryRegionLoadingMutexes[region.start]; // Now the specific memory region's loading mutex has been retrieved, this one can be dropped memoryRegionLoadingLockslock.unlock(); // Hold this lock until loading of the region is done @@ -1745,7 +1743,7 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) // Check the latest state to see if the memory region has been loaded while acquiring the lock { - auto viewSpecificState = ViewSpecificStateForView(m_dscView); + auto viewSpecificState = m_viewSpecificState; std::unique_lock viewStateCacheLock(viewSpecificState->stateMutex); for (auto& cacheRegion : viewSpecificState->state->m_nonImageRegions) @@ -1763,7 +1761,7 @@ bool SharedCache::LoadSectionAtAddress(uint64_t address) } // Still not loaded, so load it below - std::unique_lock lock(ViewSpecificStateForView(m_dscView)->viewOperationsThatInfluenceMetadataMutex); + std::unique_lock lock(m_viewSpecificState->viewOperationsThatInfluenceMetadataMutex); DeserializeFromRawView(); auto vm = GetVMMap();