Instance Constructors
-
new
MPICommandBuilder(log: Logger, gpuMachines: Array[String], hdfsPath: Option[(String, String, String)], fileInputPath: String, username: String)
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
def
appendOverrideConfig(t: Seq[String]): MPICommandBuilder.this.type
-
val
argName: String
-
val
arguments: Seq[String]
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
val
command: String
-
-
-
var
dataFormat: String
-
-
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
hashCode(): Int
-
val
identityKeyException: String
-
def
insertBaseConfig(t: String): MPICommandBuilder.this.type
-
final
def
isInstanceOf[T0]: Boolean
-
var
modelName: String
-
var
modelOutputDir: String
-
-
def
nodeConfig: Map[String, Int]
-
def
nodeConfigToFile(nodeConfig: Map[String, Int]): String
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
var
outputDir: String
-
-
def
runCommand(): Unit
-
def
setDataFormat(d: String): MPICommandBuilder.this.type
-
def
setModelName(p: String): MPICommandBuilder.this.type
-
def
setModelOutputDir(p: String): MPICommandBuilder.this.type
-
def
setOutputDir(p: String): MPICommandBuilder.this.type
-
def
setSparkSession(p: SparkSession): MPICommandBuilder.this.type
-
def
setWorkingDir(p: String): MPICommandBuilder.this.type
-
var
sparkSession: SparkSession
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
var
workingDir: URI