Skip to content

Commit

Permalink
[improvement] 2pc commit or abort retry and interval configurable.
Browse files Browse the repository at this point in the history
  • Loading branch information
CodeCooker17 committed Sep 7, 2023
1 parent 11f4976 commit d7ffc97
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -106,4 +106,16 @@ public interface ConfigurationOptions {
String DORIS_SINK_STREAMING_PASSTHROUGH = "doris.sink.streaming.passthrough";
boolean DORIS_SINK_STREAMING_PASSTHROUGH_DEFAULT = false;

/**
* txnId commit or abort interval
*/
String DORIS_SINK_TXN_INTERVAL_MS = "doris.sink.txn.interval.ms";
int DORIS_SINK_TXN_INTERVAL_MS_DEFAULT = 1000;

/**
* txnId commit or abort retry times
*/
String DORIS_SINK_TXN_RETRIES = "doris.sink.txn.retries";
int DORIS_SINK_TXN_RETRIES_DEFAULT = 3;

}
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.util.{Failure, Success}

class DorisTransactionListener(preCommittedTxnAcc: CollectionAccumulator[Int], dorisStreamLoad: DorisStreamLoad)
class DorisTransactionListener(preCommittedTxnAcc: CollectionAccumulator[Int], dorisStreamLoad: DorisStreamLoad, sinkTnxIntervalMs: Int, sinkTxnRetries: Int)
extends SparkListener {

val logger: Logger = LoggerFactory.getLogger(classOf[DorisTransactionListener])
Expand All @@ -45,7 +45,7 @@ class DorisTransactionListener(preCommittedTxnAcc: CollectionAccumulator[Int], d
}
logger.info("job run succeed, start committing transactions")
txnIds.foreach(txnId =>
Utils.retry(3, Duration.ofSeconds(1), logger) {
Utils.retry(sinkTxnRetries, Duration.ofMillis(sinkTnxIntervalMs), logger) {
dorisStreamLoad.commit(txnId)
} match {
case Success(_) =>
Expand All @@ -66,7 +66,7 @@ class DorisTransactionListener(preCommittedTxnAcc: CollectionAccumulator[Int], d
}
logger.info("job run failed, start aborting transactions")
txnIds.foreach(txnId =>
Utils.retry(3, Duration.ofSeconds(1), logger) {
Utils.retry(sinkTxnRetries, Duration.ofMillis(sinkTnxIntervalMs), logger) {
dorisStreamLoad.abort(txnId)
} match {
case Success(_) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ class DorisWriter(settings: SparkSettings) extends Serializable {

private val enable2PC: Boolean = settings.getBooleanProperty(ConfigurationOptions.DORIS_SINK_ENABLE_2PC,
ConfigurationOptions.DORIS_SINK_ENABLE_2PC_DEFAULT);
private val sinkTxnIntervalMs: Int = settings.getIntegerProperty(ConfigurationOptions.DORIS_SINK_TXN_INTERVAL_MS,
ConfigurationOptions.DORIS_SINK_TXN_INTERVAL_MS_DEFAULT)
private val sinkTxnRetries: Integer = settings.getIntegerProperty(ConfigurationOptions.DORIS_SINK_TXN_RETRIES,
ConfigurationOptions.DORIS_SINK_TXN_RETRIES_DEFAULT)

private val dorisStreamLoader: DorisStreamLoad = CachedDorisStreamLoadClient.getOrCreate(settings)

Expand All @@ -59,7 +63,7 @@ class DorisWriter(settings: SparkSettings) extends Serializable {
val sc = dataFrame.sqlContext.sparkContext
val preCommittedTxnAcc = sc.collectionAccumulator[Int]("preCommittedTxnAcc")
if (enable2PC) {
sc.addSparkListener(new DorisTransactionListener(preCommittedTxnAcc, dorisStreamLoader))
sc.addSparkListener(new DorisTransactionListener(preCommittedTxnAcc, dorisStreamLoader, sinkTxnIntervalMs, sinkTxnRetries))
}

var resultRdd = dataFrame.rdd
Expand Down Expand Up @@ -98,7 +102,7 @@ class DorisWriter(settings: SparkSettings) extends Serializable {
val sc = dataFrame.sqlContext.sparkContext
val preCommittedTxnAcc = sc.collectionAccumulator[Int]("preCommittedTxnAcc")
if (enable2PC) {
sc.addSparkListener(new DorisTransactionListener(preCommittedTxnAcc, dorisStreamLoader))
sc.addSparkListener(new DorisTransactionListener(preCommittedTxnAcc, dorisStreamLoader, sinkTxnIntervalMs, sinkTxnRetries))
}

var resultRdd = dataFrame.queryExecution.toRdd
Expand Down Expand Up @@ -153,7 +157,7 @@ class DorisWriter(settings: SparkSettings) extends Serializable {
}
val abortFailedTxnIds = mutable.Buffer[Int]()
acc.value.asScala.foreach(txnId => {
Utils.retry[Unit, Exception](3, Duration.ofSeconds(1), logger) {
Utils.retry[Unit, Exception](sinkTxnRetries, Duration.ofMillis(sinkTxnIntervalMs), logger) {
dorisStreamLoader.abort(txnId)
} match {
case Success(_) =>
Expand Down

0 comments on commit d7ffc97

Please sign in to comment.