Skip to content

Commit f6d5fa7

Browse files
committed
scala lint
1 parent 99e7497 commit f6d5fa7

File tree

2 files changed

+25
-46
lines changed

2 files changed

+25
-46
lines changed

sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientRetriesSuite.scala

Lines changed: 12 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,7 @@ class SparkConnectClientRetriesSuite
5656
private def createTestExceptionWithDetails(
5757
msg: String,
5858
code: Status.Code = Status.Code.INTERNAL,
59-
retryDelay: FiniteDuration = FiniteDuration(0, "s")
60-
): StatusRuntimeException = {
59+
retryDelay: FiniteDuration = FiniteDuration(0, "s")): StatusRuntimeException = {
6160
// In grpc-java, RetryDelay should be specified as seconds: Long + nanos: Int
6261
val seconds = retryDelay.toSeconds
6362
val nanos = (retryDelay - FiniteDuration(seconds, "s")).toNanos.toInt
@@ -83,8 +82,7 @@ class SparkConnectClientRetriesSuite
8382
private def assertLongSequencesAlmostEqual(
8483
first: Seq[Long],
8584
second: Seq[Long],
86-
delta: Long
87-
): Unit = {
85+
delta: Long): Unit = {
8886
assert(first.length == second.length, "Lists have different lengths.")
8987
for ((a, b) <- first.zip(second)) {
9088
assert(math.abs(a - b) <= delta, s"Elements $a and $b differ by more than $delta.")
@@ -199,10 +197,8 @@ class SparkConnectClientRetriesSuite
199197
}
200198
test("DefaultPolicy retries exceptions with RetryInfo") {
201199
// Error contains RetryInfo with retry_delay set to 0
202-
val dummyFn = new DummyFn(
203-
createTestExceptionWithDetails(msg = "Some error message"),
204-
numFails = 100
205-
)
200+
val dummyFn =
201+
new DummyFn(createTestExceptionWithDetails(msg = "Some error message"), numFails = 100)
206202
val retryPolicies = RetryPolicy.defaultPolicies()
207203
val retryHandler = new GrpcRetryHandler(retryPolicies, sleep = _ => {})
208204
assertThrows[RetriesExceeded] {
@@ -218,12 +214,8 @@ class SparkConnectClientRetriesSuite
218214
val st = new SleepTimeTracker()
219215
val retryDelay = FiniteDuration(5, "min")
220216
val dummyFn = new DummyFn(
221-
createTestExceptionWithDetails(
222-
msg = "Some error message",
223-
retryDelay = retryDelay
224-
),
225-
numFails = 100
226-
)
217+
createTestExceptionWithDetails(msg = "Some error message", retryDelay = retryDelay),
218+
numFails = 100)
227219
val retryPolicies = RetryPolicy.defaultPolicies()
228220
val retryHandler = new GrpcRetryHandler(retryPolicies, sleep = st.sleep)
229221

@@ -243,12 +235,8 @@ class SparkConnectClientRetriesSuite
243235
val st = new SleepTimeTracker()
244236
val retryDelay = FiniteDuration(5, "d")
245237
val dummyFn = new DummyFn(
246-
createTestExceptionWithDetails(
247-
msg = "Some error message",
248-
retryDelay = retryDelay
249-
),
250-
numFails = 100
251-
)
238+
createTestExceptionWithDetails(msg = "Some error message", retryDelay = retryDelay),
239+
numFails = 100)
252240
val retryPolicies = RetryPolicy.defaultPolicies()
253241
val retryHandler = new GrpcRetryHandler(retryPolicies, sleep = st.sleep)
254242

@@ -271,14 +259,10 @@ class SparkConnectClientRetriesSuite
271259
List.fill(2)(
272260
createTestExceptionWithDetails(
273261
msg = "Some error message",
274-
retryDelay = retryDelay
275-
)
276-
) ++ List.fill(3)(
262+
retryDelay = retryDelay)) ++ List.fill(3)(
277263
createTestExceptionWithDetails(
278264
msg = "Some error message",
279-
code = Status.Code.UNAVAILABLE
280-
)
281-
)
265+
code = Status.Code.UNAVAILABLE))
282266
).iterator
283267

284268
retryHandler.retry({
@@ -290,9 +274,8 @@ class SparkConnectClientRetriesSuite
290274

291275
// Should be retried by DefaultPolicy
292276
val policy = retryPolicies.find(_.name == "DefaultPolicy").get
293-
val expectedSleeps = List.fill(2)(retryDelay.toMillis) ++ List.tabulate(3)(
294-
i => policy.initialBackoff.toMillis * math.pow(policy.backoffMultiplier, i + 2).toLong
295-
)
277+
val expectedSleeps = List.fill(2)(retryDelay.toMillis) ++ List.tabulate(3)(i =>
278+
policy.initialBackoff.toMillis * math.pow(policy.backoffMultiplier, i + 2).toLong)
296279
assertLongSequencesAlmostEqual(st.times, expectedSleeps, delta = policy.jitter.toMillis)
297280
}
298281
}

sql/connect/common/src/main/scala/org/apache/spark/sql/connect/client/RetryPolicy.scala

Lines changed: 13 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -47,16 +47,16 @@ import org.apache.spark.internal.Logging
4747
* @param name
4848
* Name of the policy.
4949
* @param recognizeServerRetryDelay
50-
* Per gRPC standard, the server can send error messages that contain `RetryInfo` message
51-
* with `retry_delay` field indicating that the client should wait for at least `retry_delay`
52-
* amount of time before retrying again, see:
50+
* Per gRPC standard, the server can send error messages that contain `RetryInfo` message with
51+
* `retry_delay` field indicating that the client should wait for at least `retry_delay` amount
52+
* of time before retrying again, see:
5353
* https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L91
5454
*
55-
* If this flag is set to true, RetryPolicy will use `RetryInfo.retry_delay` field
56-
* in the backoff computation. Server's `retry_delay` can override client's `maxBackoff`.
55+
* If this flag is set to true, RetryPolicy will use `RetryInfo.retry_delay` field in the backoff
56+
* computation. Server's `retry_delay` can override client's `maxBackoff`.
5757
*
58-
* This flag does not change which errors are retried, only how the backoff is computed.
59-
* `DefaultPolicy` additionally has a rule for retrying any error that contains `RetryInfo`.
58+
* This flag does not change which errors are retried, only how the backoff is computed.
59+
* `DefaultPolicy` additionally has a rule for retrying any error that contains `RetryInfo`.
6060
* @param maxServerRetryDelay
6161
* Limit for the server-provided `retry_delay`.
6262
*/
@@ -178,22 +178,18 @@ object RetryPolicy extends Logging {
178178
e match {
179179
case e: StatusRuntimeException =>
180180
Option(StatusProto.fromThrowable(e))
181-
.flatMap(
182-
status =>
183-
status.getDetailsList.asScala
184-
.find(_.is(classOf[RetryInfo]))
185-
.map(_.unpack(classOf[RetryInfo]))
186-
)
181+
.flatMap(status =>
182+
status.getDetailsList.asScala
183+
.find(_.is(classOf[RetryInfo]))
184+
.map(_.unpack(classOf[RetryInfo])))
187185
case _ => None
188186
}
189187
}
190188

191189
private def extractRetryDelay(e: Throwable): Option[FiniteDuration] = {
192190
extractRetryInfo(e)
193191
.flatMap(retryInfo => Option(retryInfo.getRetryDelay))
194-
.map(
195-
retryDelay =>
196-
FiniteDuration(retryDelay.getSeconds, "s") + FiniteDuration(retryDelay.getNanos, "ns")
197-
)
192+
.map(retryDelay =>
193+
FiniteDuration(retryDelay.getSeconds, "s") + FiniteDuration(retryDelay.getNanos, "ns"))
198194
}
199195
}

0 commit comments

Comments
 (0)