Skip to content

Commit d350dbf

Browse files
aakash-dbsryza
authored andcommitted
fix 2
1 parent 41a0600 commit d350dbf

File tree

5 files changed

+6
-9
lines changed

5 files changed

+6
-9
lines changed

sql/pipelines/src/main/scala/org/apache/spark/sql/pipelines/graph/FlowAnalysis.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
2424
import org.apache.spark.sql.catalyst.analysis.{CTESubstitution, UnresolvedRelation}
2525
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
2626
import org.apache.spark.sql.classic.{DataFrame, Dataset, DataStreamReader, SparkSession}
27-
import org.apache.spark.sql.pipelines.{AnalysisWarning, Language}
27+
import org.apache.spark.sql.pipelines.AnalysisWarning
2828
import org.apache.spark.sql.pipelines.graph.GraphIdentifierManager.{ExternalDatasetIdentifier, InternalDatasetIdentifier}
2929
import org.apache.spark.sql.pipelines.util.{BatchReadOptions, InputReadOptions, StreamingReadOptions}
3030

sql/pipelines/src/main/scala/org/apache/spark/sql/pipelines/graph/GraphErrors.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,8 @@ object GraphErrors {
5858
}
5959

6060
/**
61-
* Throws when a table path is unresolved, i.e. the table identifier does not exist in the catalog.
61+
* Throws when a table path is unresolved, i.e. the table identifier
62+
* does not exist in the catalog.
6263
*
6364
* @param identifier the unresolved table identifier
6465
*/

sql/pipelines/src/main/scala/org/apache/spark/sql/pipelines/graph/QueryOrigin.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ case class QueryOrigin(
5858
fileName = other.fileName.orElse(fileName),
5959
sqlText = other.sqlText.orElse(sqlText),
6060
line = other.line.orElse(line),
61-
cellNumber = other.cellNumber.orElse(cellNumber),
6261
startPosition = other.startPosition.orElse(startPosition),
6362
objectType = other.objectType.orElse(objectType),
6463
objectName = other.objectName.orElse(objectName)

sql/pipelines/src/main/scala/org/apache/spark/sql/pipelines/util/InputReadInfo.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.sql.pipelines.util
1919

2020
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
21-
import org.apache.spark.sql.pipelines.Language
2221
import org.apache.spark.sql.pipelines.util.StreamingReadOptions.EmptyUserOptions
2322

2423
/**

sql/pipelines/src/test/scala/org/apache/spark/sql/pipelines/utils/PipelineTest.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,17 +17,15 @@
1717

1818
package org.apache.spark.sql.pipelines.utils
1919

20-
import java.io.{BufferedReader, File, FileNotFoundException, InputStreamReader}
21-
import java.nio.file.{Files, Paths}
20+
import java.io.{BufferedReader, FileNotFoundException, InputStreamReader}
21+
import java.nio.file.Files
2222

2323
import scala.collection.mutable.ArrayBuffer
24-
import scala.jdk.CollectionConverters._
2524
import scala.util.{Failure, Try}
2625
import scala.util.control.NonFatal
2726

2827
import org.scalactic.source
2928
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Tag}
30-
import org.scalatest.exceptions.TestFailedDueToTimeoutException
3129
import org.scalatest.matchers.should.Matchers
3230

3331
import org.apache.spark.{SparkConf, SparkFunSuite}
@@ -61,7 +59,7 @@ abstract class PipelineTest
6159
* all spark sessions created in tests.
6260
*/
6361
protected def sparkConf: SparkConf = {
64-
var conf = new SparkConf()
62+
new SparkConf()
6563
.set("spark.sql.shuffle.partitions", "2")
6664
.set("spark.sql.session.timeZone", "UTC")
6765
}

0 commit comments

Comments
 (0)