Skip to content

Commit

Permalink
Cleanup unused variables and imports.
Browse files Browse the repository at this point in the history
Signed-off-by: Pascal Spörri <psp@zurich.ibm.com>
  • Loading branch information
pspoerri committed Jun 27, 2023
1 parent 344a3b6 commit 7393636
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,6 @@ class S3ShuffleDispatcher extends Logging {
val alwaysCreateIndex: Boolean = conf.getBoolean("spark.shuffle.s3.alwaysCreateIndex", defaultValue = false)
val useBlockManager: Boolean = conf.getBoolean("spark.shuffle.s3.useBlockManager", defaultValue = true)
val forceBatchFetch: Boolean = conf.getBoolean("spark.shuffle.s3.forceBatchFetch", defaultValue = false)
val allowSerializedShuffle: Boolean = conf.getBoolean("spark.shuffle.s3.allowSerializedShuffle", defaultValue = true)
val forceBypassMergeSort: Boolean = conf.getBoolean("spark.shuffle.s3.forceBypassMergeSort", defaultValue = false)
val sortShuffleCloneRecords: Boolean = conf.getBoolean("spark.shuffle.s3.sort.cloneRecords", defaultValue = false)

val appDir = f"/${startTime}-${appId}/"
val fs: FileSystem = FileSystem.get(URI.create(rootDir), {
Expand All @@ -49,9 +46,6 @@ class S3ShuffleDispatcher extends Logging {
logInfo(s"- spark.shuffle.s3.alwaysCreateIndex=${alwaysCreateIndex}")
logInfo(s"- spark.shuffle.s3.useBlockManager=${useBlockManager}")
logInfo(s"- spark.shuffle.s3.forceBatchFetch=${forceBatchFetch}")
logInfo(s"- spark.shuffle.s3.allowSerializedShuffle=${allowSerializedShuffle}")
logInfo(s"- spark.shuffle.s3.forceBypassMergeSort=${forceBypassMergeSort}")
logInfo(s"- spark.shuffle.s3.sort.cloneRecords=${sortShuffleCloneRecords}")

def removeRoot(): Boolean = {
Range(0, 10).map(idx => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,9 @@ import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.shuffle._
import org.apache.spark.shuffle.api.ShuffleExecutorComponents
import org.apache.spark.shuffle.helper.S3ShuffleHelper.dispatcher
import org.apache.spark.shuffle.helper.{S3ShuffleDispatcher, S3ShuffleHelper}
import org.apache.spark.storage.S3ShuffleReader
import org.apache.spark.util.collection.OpenHashSet

import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,5 +252,4 @@ class S3ShuffleManagerTest {
.set("spark.local.dir", "./spark-temp") // Configure the working dir.
.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.S3ShuffleManager")
.set("spark.shuffle.sort.io.plugin.class", "org.apache.spark.shuffle.S3ShuffleDataIO")
.set("spark.shuffle.s3.forceBypassMergeSort", "false")
}
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,6 @@ class S3SortShuffleTest {
.set("spark.local.dir", "./spark-temp") // Configure the working dir.
.set("spark.shuffle.sort.io.plugin.class", "org.apache.spark.shuffle.S3ShuffleDataIO")
.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.S3ShuffleManager")
.set("spark.shuffle.s3.forceBypassMergeSort", "false")
.set("spark.shuffle.s3.cleanup", "false") // Avoid issues with cleanup.

def fakeTaskContext(env: SparkEnv): TaskContext = {
Expand Down

0 comments on commit 7393636

Please sign in to comment.