Skip to content

Commit 9953c95

Browse files
committed
[SPARK-52674][SQL] Clean up the usage of deprecated APIs related to RandomStringUtils
## What changes were proposed in this pull request? This pr aims to clean up the usage of deprecated APIs related to `RandomStringUtils`, the relevant modifications were made with reference to: - https://github.com/apache/commons-lang/blob/29ccc7665f3bc5d84155a3092ab2209a053324e6/src/main/java/org/apache/commons/lang3/RandomStringUtils.java#L113-L128 ![image](https://github.com/user-attachments/assets/3005f7d6-1d70-4be9-9b0f-c66f00ce3cd7) - https://github.com/apache/commons-lang/blob/29ccc7665f3bc5d84155a3092ab2209a053324e6/src/main/java/org/apache/commons/lang3/RandomStringUtils.java#L394-L409 ![image](https://github.com/user-attachments/assets/7bb1d559-290b-4cc6-81d2-3ebd324294ff) - https://github.com/apache/commons-lang/blob/29ccc7665f3bc5d84155a3092ab2209a053324e6/src/main/java/org/apache/commons/lang3/RandomStringUtils.java#L411-L427 ![image](https://github.com/user-attachments/assets/70a97170-0bda-490d-9d81-9af326d52715) ### Why are the changes needed? Clean up deprecated APIs usage. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? - Pass GitHub Actions ### Was this patch authored or co-authored using generative AI tooling? Noa Closes #51362 from LuciferYang/cleanup-RandomStringUtils-deprecated. Authored-by: yangjie01 <yangjie01@baidu.com> Signed-off-by: yangjie01 <yangjie01@baidu.com>
1 parent 3b15ee8 commit 9953c95

File tree

4 files changed

+9
-7
lines changed

4 files changed

+9
-7
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/ConstantColumnVectorBenchmark.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,7 @@ object ConstantColumnVectorBenchmark extends BenchmarkBase {
268268

269269
Seq(1, 5, 10, 15, 20, 30).foreach { length =>
270270
val builder = new UTF8StringBuilder()
271-
builder.append(RandomStringUtils.random(length))
271+
builder.append(RandomStringUtils.secure.next(length))
272272
val row = InternalRow(builder.build())
273273
testWrite(valuesPerIteration, batchSize, StringType, row)
274274
}
@@ -281,7 +281,7 @@ object ConstantColumnVectorBenchmark extends BenchmarkBase {
281281

282282
Seq(1, 5, 10, 15, 20, 30).foreach { length =>
283283
val builder = new UTF8StringBuilder()
284-
builder.append(RandomStringUtils.random(length))
284+
builder.append(RandomStringUtils.secure.next(length))
285285
val row = InternalRow(builder.build())
286286
testRead(valuesPerIteration, batchSize, StringType, row)
287287
}
@@ -293,7 +293,7 @@ object ConstantColumnVectorBenchmark extends BenchmarkBase {
293293

294294
Seq(1, 5, 10, 15, 20, 30).foreach { length =>
295295
val builder = new UTF8StringBuilder()
296-
builder.append(RandomStringUtils.random(length))
296+
builder.append(RandomStringUtils.secure.next(length))
297297
val row = InternalRow(builder.build())
298298
testWriteAndRead(valuesPerIteration, batchSize, StringType, row)
299299
}

sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/compression/CompressionSchemeBenchmark.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ object CompressionSchemeBenchmark extends BenchmarkBase with AllCompressionSchem
222222
val testData = allocateLocal(count * (4 + strLen))
223223

224224
val g = {
225-
val dataTable = (0 until tableSize).map(_ => RandomStringUtils.randomAlphabetic(strLen))
225+
val dataTable = (0 until tableSize).map(_ => RandomStringUtils.secure.nextAlphabetic(strLen))
226226
val rng = genHigherSkewData()
227227
() => dataTable(rng().toInt % tableSize)
228228
}

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetDeltaLengthByteArrayEncodingSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,7 @@ class ParquetDeltaLengthByteArrayEncodingSuite
135135
if (randomEmpty.nextInt() % 11 != 0) {
136136
maxLen = 0;
137137
}
138-
samples(i) = RandomStringUtils.randomAlphanumeric(0, maxLen)
138+
samples(i) = RandomStringUtils.secure.nextAlphanumeric(0, maxLen)
139139
}
140140
samples
141141
}

sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -104,12 +104,14 @@ class StreamingQuerySuite extends StreamTest with BeforeAndAfter with Logging wi
104104
var cpDir: String = null
105105

106106
def startQuery(restart: Boolean): StreamingQuery = {
107-
if (cpDir == null || !restart) cpDir = s"$dir/${RandomStringUtils.randomAlphabetic(10)}"
107+
if (cpDir == null || !restart) {
108+
cpDir = s"$dir/${RandomStringUtils.secure.nextAlphabetic(10)}"
109+
}
108110
MemoryStream[Int].toDS().groupBy().count()
109111
.writeStream
110112
.format("memory")
111113
.outputMode("complete")
112-
.queryName(s"name${RandomStringUtils.randomAlphabetic(10)}")
114+
.queryName(s"name${RandomStringUtils.secure.nextAlphabetic(10)}")
113115
.option("checkpointLocation", cpDir)
114116
.start()
115117
}

0 commit comments

Comments
 (0)