Skip to content

Commit

Permalink
feat: only close writers on flush
Browse files Browse the repository at this point in the history
  • Loading branch information
gintarasm committed Apr 2, 2024
1 parent 79df424 commit d762d9f
Showing 1 changed file with 16 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ public class AsyncBigQuerySinkWriter<A> extends AsyncSinkWriter<Rows<A>, StreamR

private final Executor appendExecutor;

protected transient Queue<StreamWriter> writersToClose = new ArrayDeque<>();
protected transient Map<String, StreamWriter> streamMap = new ConcurrentHashMap<>();

public AsyncBigQuerySinkWriter(ExecutorProvider executorProvider, AsyncClientProvider clientProvider, ElementConverter<Rows<A>, StreamRequest> elementConverter, Sink.InitContext context, AsyncSinkWriterConfiguration configuration, Collection<BufferedRequestState<StreamRequest>> bufferedRequestStates) {
Expand Down Expand Up @@ -94,11 +95,7 @@ protected final void recreateStreamWriter(String traceId, String streamName, Str
streamMap.replaceAll((key, writer) -> {
var newWriter = writer;
if (writer.getWriterId().equals(writerId)) {
try {
writer.close();
} catch (Exception e) {
logger.trace("Trace-id {} Could not close writer for {}", traceId, streamName);
}
writersToClose.add(writer);
newWriter = this.clientProvider.getWriter(streamName, table);
registerInflightMetric(newWriter);
}
Expand Down Expand Up @@ -229,6 +226,20 @@ protected long getSizeInBytes(StreamRequest StreamRequest) {
return StreamRequest.getData().getSerializedSize();
}

@Override
public void flush(boolean flush) throws InterruptedException {
super.flush(flush);

while (writersToClose.peek() != null) {
var writer = writersToClose.poll();
try {
writer.close();
} catch (Exception e) {
logger.error("Could not close writer", e);
}
}
}

@Override
public void close() {
logger.info("Closing BigQuery write stream");
Expand Down

0 comments on commit d762d9f

Please sign in to comment.