diff --git a/src/server/async/socket_server.cc b/src/server/async/socket_server.cc index 9e17b6521..3c6b40ec0 100644 --- a/src/server/async/socket_server.cc +++ b/src/server/async/socket_server.cc @@ -574,7 +574,7 @@ bool SocketConnection::doGetBuffers(const json& root) { RESPONSE_ON_ERROR(bulk_store_->GetUnsafe(ids, unsafe, objects)); VINEYARD_CHECK_OK(bulk_store_->MarkAsCold(ids, objects)); for (size_t i = 0; i < objects.size(); ++i) { - if (objects[i]->pointer == nullptr) { + if (objects[i]->pointer == nullptr && objects[i]->data_size > 0) { VINEYARD_CHECK_OK( bulk_store_->ReloadColdObject(ids[i], objects[i], false)); VINEYARD_CHECK_OK(bulk_store_->MarkAsCold(ids[i], objects[i])); @@ -802,7 +802,7 @@ bool SocketConnection::doGetRemoteBuffers(const json& root) { RESPONSE_ON_ERROR(bulk_store_->GetUnsafe(ids, unsafe, objects)); VINEYARD_CHECK_OK(bulk_store_->MarkAsCold(ids, objects)); for (size_t i = 0; i < objects.size(); ++i) { - if (objects[i]->pointer == nullptr) { + if (objects[i]->pointer == nullptr && objects[i]->data_size > 0) { VINEYARD_CHECK_OK( bulk_store_->ReloadColdObject(ids[i], objects[i], false)); VINEYARD_CHECK_OK(bulk_store_->MarkAsCold(ids[i], objects[i])); diff --git a/src/server/memory/usage.h b/src/server/memory/usage.h index ecbff4d98..e36e7c408 100644 --- a/src/server/memory/usage.h +++ b/src/server/memory/usage.h @@ -396,6 +396,7 @@ class ColdObjectTracker lru_map_t map_; lru_list_t list_; ska::flat_hash_map> spilled_obj_; + friend class ColdObjectTracker; }; public: @@ -556,7 +557,8 @@ class ColdObjectTracker */ uint8_t* AllocateMemoryWithSpill(const size_t size, int* fd, int64_t* map_size, ptrdiff_t* offset) { - std::lock_guard locked(allocate_memory_mu_); + // std::lock_guard locked(allocate_memory_mu_); + std::lock_guard locked(cold_obj_lru_.mu_); uint8_t* pointer = nullptr; std::cout << "Thread " << std::this_thread::get_id() << " before AllocateMemoryWithSpill;" << "size:" << size @@ -574,7 +576,7 @@ class ColdObjectTracker // 2. memory usage is above upper bound if (pointer == nullptr || BulkAllocator::Allocated() >= self().mem_spill_upper_bound_) { - //std::unique_lock locked(spill_mu_); + // std::unique_lock locked(spill_mu_); int64_t min_spill_size = 0; if (pointer == nullptr) {