Skip to content

[SPARK-52318] Refactor SparkConnectError to simplify case names #177

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Sources/SparkConnect/DataFrame.swift
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ public actor DataFrame: Sendable {
// SQLSTATE: 0A000
// [UNSUPPORTED_CONNECT_FEATURE.RDD]
// Feature is not supported in Spark Connect: Resilient Distributed Datasets (RDDs).
throw SparkConnectError.UnsupportedOperationException
throw SparkConnectError.UnsupportedOperation
}

/// Return an array of column name strings
Expand Down
10 changes: 5 additions & 5 deletions Sources/SparkConnect/Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,7 @@ extension YearMonthInterval {
case 0: "year"
case 1: "month"
default:
throw SparkConnectError.InvalidTypeException
throw SparkConnectError.InvalidType
}
}

Expand All @@ -225,7 +225,7 @@ extension YearMonthInterval {
} else if startFieldName < endFieldName {
"interval \(startFieldName) to \(endFieldName)"
} else {
throw SparkConnectError.InvalidTypeException
throw SparkConnectError.InvalidType
}
return interval
}
Expand All @@ -239,7 +239,7 @@ extension DayTimeInterval {
case 2: "minute"
case 3: "second"
default:
throw SparkConnectError.InvalidTypeException
throw SparkConnectError.InvalidType
}
}

Expand All @@ -251,7 +251,7 @@ extension DayTimeInterval {
} else if startFieldName < endFieldName {
"interval \(startFieldName) to \(endFieldName)"
} else {
throw SparkConnectError.InvalidTypeException
throw SparkConnectError.InvalidType
}
return interval
}
Expand Down Expand Up @@ -325,7 +325,7 @@ extension DataType {
case .unparsed:
self.unparsed.dataTypeString
default:
throw SparkConnectError.InvalidTypeException
throw SparkConnectError.InvalidType
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion Sources/SparkConnect/MergeIntoWriter.swift
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ public actor MergeIntoWriter {
&& self.mergeIntoTableCommand.notMatchedActions.count == 0
&& self.mergeIntoTableCommand.notMatchedBySourceActions.count == 0
{
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
self.mergeIntoTableCommand.sourceTablePlan = await (self.df.getPlan() as! Plan).root
self.mergeIntoTableCommand.withSchemaEvolution = self.schemaEvolution
Expand Down
4 changes: 2 additions & 2 deletions Sources/SparkConnect/ProtoUtils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,10 @@ public enum ProtoUtils {
// because the Spark Connect job tag is also used as part of SparkContext job tag.
// See SparkContext.throwIfInvalidTag and ExecuteHolderSessionTag
if tag.isEmpty {
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
if tag.contains(SPARK_JOB_TAGS_SEP) {
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
}
}
2 changes: 1 addition & 1 deletion Sources/SparkConnect/Row.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public struct Row: Sendable, Equatable {

public func get(_ i: Int) throws -> Sendable {
if i < 0 || i >= self.length {
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
return values[i]
}
Expand Down
4 changes: 2 additions & 2 deletions Sources/SparkConnect/SparkConnectClient.swift
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ public actor SparkConnectClient {
try await withGPRC { client in
// To prevent server-side `INVALID_HANDLE.FORMAT (SQLSTATE: HY000)` exception.
if UUID(uuidString: sessionID) == nil {
throw SparkConnectError.InvalidSessionIDException
throw SparkConnectError.InvalidSessionID
}

self.sessionID = sessionID
Expand Down Expand Up @@ -787,7 +787,7 @@ public actor SparkConnectClient {
} catch let error as RPCError where error.code == .internalError {
switch error.message {
case let m where m.contains("UNSUPPORTED_DATATYPE") || m.contains("INVALID_IDENTIFIER"):
throw SparkConnectError.InvalidTypeException
throw SparkConnectError.InvalidType
default:
throw error
}
Expand Down
8 changes: 4 additions & 4 deletions Sources/SparkConnect/SparkConnectError.swift
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

/// A enum for ``SparkConnect`` package errors
public enum SparkConnectError: Error {
case UnsupportedOperationException
case InvalidArgumentException
case InvalidSessionIDException
case InvalidTypeException
case InvalidArgument
case InvalidSessionID
case InvalidType
case UnsupportedOperation
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I sorted these too.

}
2 changes: 1 addition & 1 deletion Sources/SparkConnect/SparkFileUtils.swift
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public enum SparkFileUtils {
if fileManager.fileExists(atPath: url.path) {
try fileManager.removeItem(at: url)
} else {
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
}
}
2 changes: 1 addition & 1 deletion Sources/SparkConnect/SparkSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ public actor SparkSession {
// SQLSTATE: 0A000
// [UNSUPPORTED_CONNECT_FEATURE.SESSION_SPARK_CONTEXT]
// Feature is not supported in Spark Connect: Access to the SparkContext.
throw SparkConnectError.UnsupportedOperationException
throw SparkConnectError.UnsupportedOperation
}
}

Expand Down
4 changes: 2 additions & 2 deletions Sources/SparkConnect/StreamingQueryManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ public actor StreamingQueryManager {
let response = try await self.sparkSession.client.executeStreamingQueryManagerCommand(command)
let query = response.first!.streamingQueryManagerCommandResult.query
guard query.hasID else {
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
return StreamingQuery(
UUID(uuidString: query.id.id)!,
Expand All @@ -154,7 +154,7 @@ public actor StreamingQueryManager {
var awaitAnyTerminationCommand = StreamingQueryManagerCommand.AwaitAnyTerminationCommand()
if let timeoutMs {
guard timeoutMs > 0 else {
throw SparkConnectError.InvalidArgumentException
throw SparkConnectError.InvalidArgument
}
awaitAnyTerminationCommand.timeoutMs = timeoutMs
}
Expand Down
4 changes: 2 additions & 2 deletions Tests/SparkConnectTests/DataFrameReaderTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -103,10 +103,10 @@ struct DataFrameReaderTests {
@Test
func invalidSchema() async throws {
let spark = try await SparkSession.builder.getOrCreate()
await #expect(throws: SparkConnectError.InvalidTypeException) {
await #expect(throws: SparkConnectError.InvalidType) {
try await spark.read.schema("invalid-name SHORT")
}
await #expect(throws: SparkConnectError.InvalidTypeException) {
await #expect(throws: SparkConnectError.InvalidType) {
try await spark.read.schema("age UNKNOWN_TYPE")
}
await spark.stop()
Expand Down
2 changes: 1 addition & 1 deletion Tests/SparkConnectTests/DataFrameTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ struct DataFrameTests {
@Test
func rdd() async throws {
let spark = try await SparkSession.builder.getOrCreate()
await #expect(throws: SparkConnectError.UnsupportedOperationException) {
await #expect(throws: SparkConnectError.UnsupportedOperation) {
try await spark.range(1).rdd()
}
await spark.stop()
Expand Down
4 changes: 2 additions & 2 deletions Tests/SparkConnectTests/RowTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ struct RowTests {
func empty() {
#expect(Row.empty.size == 0)
#expect(Row.empty.length == 0)
#expect(throws: SparkConnectError.InvalidArgumentException) {
#expect(throws: SparkConnectError.InvalidArgument) {
try Row.empty.get(0)
}
}
Expand Down Expand Up @@ -57,7 +57,7 @@ struct RowTests {
#expect(try row.get(2) as! String == "a")
#expect(try row.get(3) as! Bool == true)
#expect(try row.get(4) as! Decimal == Decimal(1.2))
#expect(throws: SparkConnectError.InvalidArgumentException) {
#expect(throws: SparkConnectError.InvalidArgument) {
try Row.empty.get(-1)
}
}
Expand Down
2 changes: 1 addition & 1 deletion Tests/SparkConnectTests/SparkConnectClientTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ struct SparkConnectClientTests {
@Test
func connectWithInvalidUUID() async throws {
let client = SparkConnectClient(remote: TEST_REMOTE)
try await #require(throws: SparkConnectError.InvalidSessionIDException) {
try await #require(throws: SparkConnectError.InvalidSessionID) {
try await client.connect("not-a-uuid-format")
}
await client.stop()
Expand Down
6 changes: 3 additions & 3 deletions Tests/SparkConnectTests/SparkSessionTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ struct SparkSessionTests {
func sparkContext() async throws {
await SparkSession.builder.clear()
let spark = try await SparkSession.builder.getOrCreate()
await #expect(throws: SparkConnectError.UnsupportedOperationException) {
await #expect(throws: SparkConnectError.UnsupportedOperation) {
try await spark.sparkContext
}
await spark.stop()
Expand Down Expand Up @@ -186,10 +186,10 @@ struct SparkSessionTests {
func invalidTags() async throws {
await SparkSession.builder.clear()
let spark = try await SparkSession.builder.getOrCreate()
await #expect(throws: SparkConnectError.InvalidArgumentException) {
await #expect(throws: SparkConnectError.InvalidArgument) {
try await spark.addTag("")
}
await #expect(throws: SparkConnectError.InvalidArgumentException) {
await #expect(throws: SparkConnectError.InvalidArgument) {
try await spark.addTag(",")
}
await spark.stop()
Expand Down
6 changes: 3 additions & 3 deletions Tests/SparkConnectTests/StreamingQueryManagerTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,10 @@ struct StreamingQueryManagerTests {
@Test
func get() async throws {
let spark = try await SparkSession.builder.getOrCreate()
await #expect(throws: SparkConnectError.InvalidArgumentException) {
await #expect(throws: SparkConnectError.InvalidArgument) {
try await spark.streams.get(UUID())
}
await #expect(throws: SparkConnectError.InvalidArgumentException) {
await #expect(throws: SparkConnectError.InvalidArgument) {
try await spark.streams.get(UUID().uuidString)
}
await spark.stop()
Expand All @@ -48,7 +48,7 @@ struct StreamingQueryManagerTests {
func awaitAnyTermination() async throws {
let spark = try await SparkSession.builder.getOrCreate()
try await spark.streams.awaitAnyTermination(1)
await #expect(throws: SparkConnectError.InvalidArgumentException) {
await #expect(throws: SparkConnectError.InvalidArgument) {
try await spark.streams.awaitAnyTermination(-1)
}
await spark.stop()
Expand Down
Loading