Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ver 0.1 : improve performance with buffer dispatching #2

Open
wants to merge 32 commits into
base: ver-0.1
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
079ab97
Buffer multiple raw insertion requests [DRAFT]
smarchint May 24, 2018
1d9b60c
resolve compilation errors
smarchint May 24, 2018
0751e94
remove unnecessary code
smarchint May 25, 2018
e0063a4
handle SIGTINT/TERM and refactor
smarchint May 25, 2018
a9bf88d
fix timer dispatch
smarchint May 27, 2018
4b35f61
minor changes [remove comments]
smarchint May 27, 2018
8ac7fb2
minor changes
smarchint May 28, 2018
03242a9
fix nullpointer exception at ctrl+c
smarchint May 28, 2018
ec2811d
refactor
smarchint May 29, 2018
e69597e
comment debug code
smarchint May 29, 2018
d0f82c1
remove test code
smarchint May 30, 2018
a350d69
fix : close rabbitmq channel on shutdown
smarchint May 30, 2018
55985aa
remove unwanted code
smarchint May 30, 2018
5ff0680
remove comments
smarchint May 30, 2018
39df85b
fix : reminent messages at client termination
smarchint May 31, 2018
f35d456
decouplr msg recv and processor
smarchint Jun 6, 2018
2ccffe6
add bounded queue
smarchint Jun 6, 2018
22df065
remove comments
smarchint Jun 6, 2018
6a9dc26
minor change
smarchint Jun 7, 2018
6091fc1
add try catch in dispatch event
smarchint Jun 7, 2018
9535152
catch exceptions
smarchint Jun 13, 2018
dca185e
minor change
smarchint Jun 13, 2018
d0f4046
fix batch insertions with partial failures and refactor
smarchint Jun 13, 2018
5d64ab3
add info logging on shutdown
smarchint Jun 13, 2018
2411773
minor changes
smarchint Jun 14, 2018
3359436
add eventbus for scaling bigquery dispatcher and make rabbit consumer…
smarchint Jun 19, 2018
48e845d
refactor and remove unwanted code
smarchint Jun 19, 2018
6816ead
add sub-optimal prefetch count with manual ack [multiple] [DRAFT]
smarchint Jun 20, 2018
bd615cf
remove comments and add debug mode for th app
smarchint Jun 20, 2018
6e1b036
remove comment
smarchint Jun 20, 2018
0db40be
remove manual termination of connection
smarchint Jun 20, 2018
3c5d36c
make consumer blocking with QOS
smarchint Jun 21, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,11 @@
</build>

<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>25.1-jre</version>
</dependency>
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-bigquery</artifactId>
Expand Down
38 changes: 36 additions & 2 deletions src/main/java/com/coverfox/bitserv/ActionHandler.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,39 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

public class ActionHandler {
import com.google.common.eventbus.AsyncEventBus;

public class ActionHandler {
private static final Logger logger = LogManager.getLogger(BigQueryOps.class);
private JSONObject message;
private static BatchInsertionControl insertionControl; // remove this from action_handler
private static AsyncEventBus eventbus;

public ActionHandler(String message) {
this.message = new JSONObject(message);
}
public static void initStaticDependecies(BatchInsertionControl controlInstance, AsyncEventBus e){
insertionControl = controlInstance;
eventbus = e;
}

public static void dispatchEvent(String event,String source){
switch(event) {
case "dispatch.buffer.all":
if(insertionControl != null ){
BigQueryOps.dispatchBatchInsertionsBasedOnSize(insertionControl);
}
break;
case "dispatch.buffer.batch":
if(insertionControl != null){
BigQueryOps.dispatchSingleBatchInsertion(insertionControl);
}
break;
default:
logger.error("iEvent: [" + event + "] not found");
break;
}
}

public void handle() {
String target = this.message.getString("target");
Expand Down Expand Up @@ -44,7 +69,16 @@ public void handle() {
new BigQueryOps(this.message.getJSONObject("data")).updateTable();
break;
case "insert":
new BigQueryOps(this.message.getJSONObject("data")).insertAll();
try{
Integer bufferIndicator = insertionControl.buffer(this.message.getJSONObject("data"));
MetricAnalyser.buffering();
if(insertionControl.dispatchReady(bufferIndicator)) {
ActionHandler.dispatchEvent("dispatch.buffer.batch","DISPATCHER"); // synchronous execution
// eventbus.post(new BufferDispatchEvent()); // asynchronous execution
}
}catch(Exception e){
logger.error("Dispatch Error : " + e.toString());
}
break;
case "delete":
new BigQueryOps(this.message.getJSONObject("data")).deleteTable();
Expand Down
109 changes: 109 additions & 0 deletions src/main/java/com/coverfox/bitserv/BatchInsertionControl.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
package com.coverfox.bitserv;

import org.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
import java.util.ArrayList;
import java.util.LinkedList;

import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;


class Buffer{
// {dataset : {table : [ requestString ] } }
private HashMap<String, HashMap<String,BlockingQueue<JSONObject>>> buffer;
public Integer totalEventsDispatched = 0;
private Integer capacity; // table level bound

public Buffer(Integer capacity){
this.buffer = new HashMap<>();
this.capacity = capacity;
}
public String toString(){
HashMap<String,Integer> temp = new HashMap<String,Integer>();
for(String dataset : this.buffer.keySet()){
for(String table : this.buffer.get(dataset).keySet() ){
temp.put(table,this.buffer.get(dataset).get(table).size());
}
}
return temp.toString();
}
public Integer add(String dataset, String table, JSONObject request){
Map<String, BlockingQueue<JSONObject>> datasetBuffer;
if(!this.buffer.containsKey(dataset)){
this.buffer.put(dataset,new HashMap<String,BlockingQueue<JSONObject>>());
}
datasetBuffer = this.buffer.get(dataset);
if(!datasetBuffer.containsKey(table)){
datasetBuffer.put(table,new ArrayBlockingQueue<JSONObject>(this.capacity));
}
try {
datasetBuffer.get(table).put(request);
}catch(InterruptedException e){
e.printStackTrace();
}
this.totalEventsDispatched += 1;
return datasetBuffer.get(table).size();
}
public HashMap getCachedRequests(){
return this.buffer;
}
// public boolean dispatchReady(Integer bufferSize){
// for (String dataset : this.buffer.keySet()) {
// for (String table : this.buffer.get(dataset).keySet()){
// if(this.buffer.get(dataset).get(table).size() == this.capacity){
// return true;
// }
// }
// }
// return false;
// }
}

// singleton
public class BatchInsertionControl{
private Integer bufferSize;// in messages
private Integer batchSize;// in messages
private Buffer buffer;
private static BatchInsertionControl instance = null;
public static BatchInsertionControl getInstance(Integer batchSize, Integer capacityFactor){
if (instance == null) {
instance = new BatchInsertionControl(batchSize,capacityFactor);
}
return instance;
}
private BatchInsertionControl(Integer batchSize, Integer capacityFactor){
this.bufferSize = capacityFactor * batchSize;
this.buffer = new Buffer(this.bufferSize);
this.batchSize = batchSize;
}
public Integer getBufferSize(){
return this.bufferSize;
}
public Integer getBatchSize(){
return this.batchSize;
}
public Buffer getBuffer(){
return this.buffer;
}
public String toString(){
return this.buffer.toString();
}
public boolean dispatchReady(Integer bufferIndicator){
if(bufferIndicator > 0 && bufferIndicator % this.batchSize == 0){
return true;
}
return false;
}
public Integer buffer(JSONObject data){
String dataset = data.getJSONObject("schema").getString("dataset");
String table = data.getJSONObject("schema").getString("name");
return this.buffer.add(dataset, table, data);
}
public HashMap<String, HashMap<String,BlockingQueue<JSONObject>>> getBufferedRequests(){
return this.buffer.getCachedRequests();
}
}


133 changes: 122 additions & 11 deletions src/main/java/com/coverfox/bitserv/BigQueryOps.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand All @@ -28,7 +31,6 @@
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableInfo;


public class BigQueryOps {

private static final Logger logger = LogManager.getLogger(BigQueryOps.class);
Expand Down Expand Up @@ -105,31 +107,140 @@ public Table createTable() {
}
return table;
}

public InsertAllResponse insertAll() {
JSONObject jTableSchema = this.data.getJSONObject("schema");
/*
* For each table, raw requests (mostly one) are converted to BigQuery request objects
*/
public InsertAllRequest prepareBigQueryInsertRequest(JSONObject data) {
JSONObject jTableSchema = data.getJSONObject("schema");
String datasetName = jTableSchema.getString("dataset");
String tableName = jTableSchema.getString("name");
TableId tableId = TableId.of(datasetName, tableName);

Iterator<?> jRows = jTableSchema.getJSONArray("rows").iterator();
JSONObject jRow = null;
String insertId = null;
InsertAllRequest.Builder rowBuilder = InsertAllRequest.newBuilder(tableId);

while (jRows.hasNext()) {
jRow = (JSONObject) jRows.next();
insertId = jRow.getString("insertId");
Map<String, Object> row = jsonToMap(jRow.getJSONObject("json"));
rowBuilder.addRow(insertId, row);
}

InsertAllResponse response = bigquery.insertAll(rowBuilder.build());

return rowBuilder.build();
}
/*
* For each table, accumulated raw insert requests are converted to BigQuery request objects
*/
public static InsertAllRequest prepareBigQueryInsertRequestFromBuffer(ArrayList<JSONObject> bufferedRequests) {
JSONObject jTableSchema = bufferedRequests.get(0).getJSONObject("schema");
String datasetName = jTableSchema.getString("dataset");
String tableName = jTableSchema.getString("name");
TableId tableId = TableId.of(datasetName, tableName);
InsertAllRequest.Builder rowBuilder = InsertAllRequest.newBuilder(tableId);
JSONObject jRow = null;
String insertId = null;
for(JSONObject bufferedRequest : bufferedRequests){
Iterator<?> jRows = bufferedRequest.getJSONObject("schema").getJSONArray("rows").iterator();;
while (jRows.hasNext()) {
jRow = (JSONObject) jRows.next();
insertId = jRow.getString("insertId");
Map<String, Object> row = jsonToMap(jRow.getJSONObject("json"));
rowBuilder.addRow(insertId, row);
}
}
rowBuilder.setSkipInvalidRows(true);
return rowBuilder.build();
}
public static ArrayList<InsertAllResponse> makeInsertApiCall(InsertAllRequest request,ArrayList<InsertAllResponse> responses){
try{
InsertAllResponse response = bigquery.insertAll(request);
if (response.hasErrors()) {
logger.error("Error inserting data: " + response);
MetricAnalyser.BigqueryError();
}else {
logger.info("Inserted : " + response);
MetricAnalyser.networkCall();
}
responses.add(response);
}catch(BigQueryException e){
MetricAnalyser.BigqueryFailure();
logger.error("[INSERT_TABLE_ERROR]: " + e);
try{
InsertAllResponse response = bigquery.insertAll(request);
if (response.hasErrors()) {
logger.error("Error inserting data: " + response);
MetricAnalyser.BigqueryError(2);
}else {
logger.info("Inserted : " + response);
MetricAnalyser.networkCall(2);
}
responses.add(response);
}catch(BigQueryException ex){
logger.error("[INSERT_TABLE_ERROR]: " + ex);
MetricAnalyser.BigqueryFailure(2);
}
}
return responses;
}
public static ArrayList<InsertAllResponse> dispatchBatchInsertionsBasedOnSize(BatchInsertionControl insertionControl){
HashMap<String, HashMap<String,BlockingQueue<JSONObject>>> bufferedRequests = insertionControl.getBufferedRequests();
ArrayList<InsertAllResponse> responses = new ArrayList<>();
for (String dataset : bufferedRequests.keySet()) {
for (String table : bufferedRequests.get(dataset).keySet()){
Integer readyBufferedRequests = bufferedRequests.get(dataset).get(table).size();
Integer tableLevelBatchSize = insertionControl.getBatchSize();
Integer batches = readyBufferedRequests/tableLevelBatchSize;
for (int i=0; i< batches + 1 ; i++ ) {
ArrayList<JSONObject> rawInsertRequests = multipop(bufferedRequests.get(dataset).get(table),tableLevelBatchSize);
if(!rawInsertRequests.isEmpty()){
MetricAnalyser.dispatchCall(rawInsertRequests.size(), dataset+'|'+table );
InsertAllRequest bqInsertRequests = prepareBigQueryInsertRequestFromBuffer(rawInsertRequests);
responses = makeInsertApiCall(bqInsertRequests,responses);
}
}
}
}
return responses;
}
public static ArrayList<InsertAllResponse> dispatchSingleBatchInsertion(BatchInsertionControl insertionControl){
HashMap<String, HashMap<String,BlockingQueue<JSONObject>>> bufferedRequests = insertionControl.getBufferedRequests();
ArrayList<InsertAllResponse> responses = new ArrayList<>();
for (String dataset : bufferedRequests.keySet()) {
for (String table : bufferedRequests.get(dataset).keySet()){
Integer tableLevelBatchSize = insertionControl.getBatchSize();
if(bufferedRequests.get(dataset).get(table).size() < tableLevelBatchSize) return null;
ArrayList<JSONObject> rawInsertRequests = multipop(bufferedRequests.get(dataset).get(table),tableLevelBatchSize);//bufferedRequests.get(dataset).get(table);
if(!rawInsertRequests.isEmpty()){
MetricAnalyser.dispatchCall(rawInsertRequests.size(), dataset+'|'+table);
InsertAllRequest bqInsertRequests = prepareBigQueryInsertRequestFromBuffer(rawInsertRequests);
responses = makeInsertApiCall(bqInsertRequests,responses);
}
}
}
return responses;
}
private static ArrayList<JSONObject> multipop(BlockingQueue<JSONObject>requestList, Integer size ){
BlockingQueue<JSONObject> q = requestList;
ArrayList<JSONObject> popedRequests = new ArrayList<>();
Integer count = 0;
while(count < size && q.peek() != null){
try{
popedRequests.add(q.take());
}catch(InterruptedException e){
e.printStackTrace();
}
count++;
}
return popedRequests;
}
/*
* Single Api call for each raw event from rabbitmq
*/
public InsertAllResponse insertNoBffer() {
InsertAllRequest request = this.prepareBigQueryInsertRequest(this.data);
InsertAllResponse response = bigquery.insertAll(request);
if (response.hasErrors()) {
logger.error("Error inserting data: " + response);
}
else {
}else {
logger.info("Inserted : " + response);
}
return response;
Expand Down
Loading