implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * Returns the map of metadata keys and their corresponding data types that can be consumed by
+ * HBase sink for writing.
+ *
+ * Note: All the supported writable metadata should be manually registered in it.
+ */
+ public static Map list() {
+ Map metadataMap = new HashMap<>();
+ metadataMap.put(TimestampMetadata.KEY, TimestampMetadata.DATA_TYPE);
+ return Collections.unmodifiableMap(metadataMap);
+ }
+
+ public abstract T read(RowData row);
+
+ /** Timestamp metadata for HBase. */
+ public static class TimestampMetadata extends WritableMetadata {
+
+ public static final String KEY = "timestamp";
+ public static final DataType DATA_TYPE =
+ DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).nullable();
+
+ private final int pos;
+
+ public TimestampMetadata(int pos) {
+ this.pos = pos;
+ }
+
+ @Override
+ public Long read(RowData row) {
+ if (pos < 0) {
+ return HConstants.LATEST_TIMESTAMP;
+ }
+ if (row.isNullAt(pos)) {
+ throw new IllegalArgumentException(
+ String.format("Writable metadata '%s' can not accept null value", KEY));
+ }
+ return row.getTimestamp(pos, 3).getMillisecond();
+ }
+
+ public static TimestampMetadata of(List metadataKeys, DataType physicalDataType) {
+ int pos = metadataKeys.indexOf(TimestampMetadata.KEY);
+ if (pos < 0) {
+ return new TimestampMetadata(-1);
+ }
+ return new TimestampMetadata(
+ pos + physicalDataType.getLogicalType().getChildren().size());
+ }
+ }
+}
diff --git a/flink-connector-hbase-base/src/main/java/org/apache/flink/connector/hbase/util/HBaseSerde.java b/flink-connector-hbase-base/src/main/java/org/apache/flink/connector/hbase/util/HBaseSerde.java
index 458b25d2..d381033a 100644
--- a/flink-connector-hbase-base/src/main/java/org/apache/flink/connector/hbase/util/HBaseSerde.java
+++ b/flink-connector-hbase-base/src/main/java/org/apache/flink/connector/hbase/util/HBaseSerde.java
@@ -135,7 +135,7 @@ public HBaseSerde(
*
* @return The appropriate instance of Put for this use case.
*/
- public @Nullable Put createPutMutation(RowData row) {
+ public @Nullable Put createPutMutation(RowData row, long timestamp) {
checkArgument(keyEncoder != null, "row key is not set.");
byte[] rowkey = keyEncoder.encode(row, rowkeyIndex);
if (rowkey.length == 0) {
@@ -143,7 +143,7 @@ public HBaseSerde(
return null;
}
// upsert
- Put put = new Put(rowkey);
+ Put put = new Put(rowkey, timestamp);
for (int i = 0; i < fieldLength; i++) {
if (i != rowkeyIndex) {
int f = i > rowkeyIndex ? i - 1 : i;
@@ -172,7 +172,7 @@ public HBaseSerde(
*
* @return The appropriate instance of Delete for this use case.
*/
- public @Nullable Delete createDeleteMutation(RowData row) {
+ public @Nullable Delete createDeleteMutation(RowData row, long timestamp) {
checkArgument(keyEncoder != null, "row key is not set.");
byte[] rowkey = keyEncoder.encode(row, rowkeyIndex);
if (rowkey.length == 0) {
@@ -180,7 +180,7 @@ public HBaseSerde(
return null;
}
// delete
- Delete delete = new Delete(rowkey);
+ Delete delete = new Delete(rowkey, timestamp);
for (int i = 0; i < fieldLength; i++) {
if (i != rowkeyIndex) {
int f = i > rowkeyIndex ? i - 1 : i;
diff --git a/flink-connector-hbase-base/src/test/java/org/apache/flink/connector/hbase/util/HBaseSerdeTest.java b/flink-connector-hbase-base/src/test/java/org/apache/flink/connector/hbase/util/HBaseSerdeTest.java
index e370809d..85de7c8b 100644
--- a/flink-connector-hbase-base/src/test/java/org/apache/flink/connector/hbase/util/HBaseSerdeTest.java
+++ b/flink-connector-hbase-base/src/test/java/org/apache/flink/connector/hbase/util/HBaseSerdeTest.java
@@ -25,6 +25,7 @@
import org.apache.flink.table.types.DataType;
import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
@@ -105,7 +106,7 @@ void convertToReusedRowTest() {
@Test
public void writeIgnoreNullValueTest() {
HBaseSerde serde = createHBaseSerde(false);
- Put m1 = serde.createPutMutation(prepareRowData());
+ Put m1 = serde.createPutMutation(prepareRowData(), HConstants.LATEST_TIMESTAMP);
assert m1 != null;
assertThat(m1.getRow()).isNotEmpty();
assertThat(m1.get(FAMILY1.getBytes(), F1COL1.getBytes())).isNotEmpty();
@@ -116,7 +117,9 @@ public void writeIgnoreNullValueTest() {
assertThat(m1.get(FAMILY3.getBytes(), F3COL3.getBytes())).isNotEmpty();
HBaseSerde writeIgnoreNullValueSerde = createHBaseSerde(true);
- Put m2 = writeIgnoreNullValueSerde.createPutMutation(prepareRowData());
+ Put m2 =
+ writeIgnoreNullValueSerde.createPutMutation(
+ prepareRowData(), HConstants.LATEST_TIMESTAMP);
assert m2 != null;
assertThat(m2.getRow()).isNotEmpty();
assertThat(m2.get(FAMILY1.getBytes(), F1COL1.getBytes())).isEmpty();