Skip to content

Commit

Permalink
HBASE-28893 RefCnt Leak error when closing a HalfStoreFileReader (#6329)
Browse files Browse the repository at this point in the history
Signed-off-by: Istvan Toth <[email protected]>
  • Loading branch information
wchevreuil authored Oct 1, 2024
1 parent 958318f commit 24c7a3f
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -376,6 +376,7 @@ public void close(boolean evictOnClose) throws IOException {
reference, referred, top, numEvictedReferred, numEvictedReference);
});
}
s.close();
reader.close(false);
} else {
reader.close(evictOnClose);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,16 @@
*/
package org.apache.hadoop.hbase.io;

import static org.apache.hadoop.hbase.io.ByteBuffAllocator.BUFFER_SIZE_KEY;
import static org.apache.hadoop.hbase.io.hfile.CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -36,13 +39,16 @@
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.io.hfile.ReaderContext;
import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
import org.apache.hadoop.hbase.nio.RefCnt;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
import org.apache.hadoop.hbase.testclassification.IOTests;
Expand All @@ -54,6 +60,8 @@
import org.junit.Test;
import org.junit.experimental.categories.Category;

import org.apache.hbase.thirdparty.io.netty.util.ResourceLeakDetector;

@Category({ IOTests.class, SmallTests.class })
public class TestHalfStoreFileReader {

Expand Down Expand Up @@ -84,7 +92,8 @@ public static void tearDownAfterClass() throws Exception {
* top of the file while we are at it.
*/
@Test
public void testHalfScanAndReseek() throws IOException {
public void testHalfScanAndReseek() throws Exception {
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.PARANOID);
Configuration conf = TEST_UTIL.getConfiguration();
FileSystem fs = FileSystem.get(conf);
String root_dir = TEST_UTIL.getDataTestDir().toString();
Expand All @@ -93,8 +102,25 @@ public void testHalfScanAndReseek() throws IOException {
Path splitAPath = new Path(new Path(root_dir, "splita"), "CF");
Path splitBPath = new Path(new Path(root_dir, "splitb"), "CF");
Path filePath = StoreFileWriter.getUniqueFile(fs, parentPath);
String ioEngineName = "file:" + TEST_UTIL.getDataTestDir() + "/bucketNoRecycler.cache";
BucketCache bucketCache = new BucketCache(ioEngineName, 32 * 1024 * 1024, 1024,
new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 96 * 1024 }, 1, 1, null);
conf.setBoolean(CACHE_BLOCKS_ON_WRITE_KEY, true);
conf.setInt(BUFFER_SIZE_KEY, 1024);
ByteBuffAllocator allocator = ByteBuffAllocator.create(conf, true);

final AtomicInteger counter = new AtomicInteger();
RefCnt.detector.setLeakListener(new ResourceLeakDetector.LeakListener() {
@Override
public void onLeak(String s, String s1) {
counter.incrementAndGet();
}
});

ColumnFamilyDescriptorBuilder cfBuilder =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("CF"));
CacheConfig cacheConf = new CacheConfig(conf, cfBuilder.build(), bucketCache, allocator);

CacheConfig cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
HFile.Writer w =
HFile.getWriterFactory(conf, cacheConf).withPath(fs, filePath).withFileContext(meta).create();
Expand Down Expand Up @@ -122,10 +148,12 @@ public void testHalfScanAndReseek() throws IOException {
doTestOfScanAndReseek(splitFileB, fs, top, cacheConf);

r.close();

assertEquals(0, counter.get());
}

private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, CacheConfig cacheConf)
throws IOException {
throws Exception {
Path referencePath = StoreFileInfo.getReferredToFile(p);
FSDataInputStreamWrapper in = new FSDataInputStreamWrapper(fs, referencePath, false, 0);
FileStatus status = fs.getFileStatus(referencePath);
Expand Down Expand Up @@ -158,6 +186,9 @@ private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, Cach
}

halfreader.close(true);

System.gc();
Thread.sleep(1000);
}

// Tests the scanner on an HFile that is backed by HalfStoreFiles
Expand Down

0 comments on commit 24c7a3f

Please sign in to comment.