原始碼中的設計模式之模板設計模式
阿新 • • 發佈:2021-04-03
原始碼中的設計模式之模板設計模式
該類的繼承如下:
父類SparkSubmitOptionParser
抽掉不必要的程式碼
class SparkSubmitOptionParser {
protected final void parse(List<String> args) {
Pattern eqSeparatedOpt = Pattern.compile("(--[^=]+)=(.+)");
int idx = 0;
for (idx = 0; idx < args.size(); idx++) {
String arg = args.get(idx);
String value = null;
Matcher m = eqSeparatedOpt.matcher(arg);
if (m.matches()) {
arg = m.group(1);
value = m.group(2);
}
// Look for options with a value.
String name = findCliOption(arg, opts);
if (name != null) {
if ( value == null) {
if (idx == args.size() - 1) {
throw new IllegalArgumentException(
String.format("Missing argument for option '%s'.", arg));
}
idx++;
value = args.get(idx);
}
if (!handle(name, value)) {
break ;
}
continue;
}
// Look for a switch.
name = findCliOption(arg, switches);
if (name != null) {
if (!handle(name, null)) {
break;
}
continue;
}
if (!handleUnknown(arg)) {
break;
}
}
if (idx < args.size()) {
idx++;
}
handleExtraArgs(args.subList(idx, args.size()));
}
protected boolean handle(String opt, String value) {
throw new UnsupportedOperationException();
}
protected boolean handleUnknown(String opt) {
throw new UnsupportedOperationException();
}
protected void handleExtraArgs(List<String> extra) {
throw new UnsupportedOperationException();
}
private String findCliOption(String name, String[][] available) {
for (String[] candidates : available) {
for (String candidate : candidates) {
if (candidate.equals(name)) {
return candidates[0];
}
}
}
return null;
}
}
parse 方法就被final修飾了不許子類繼承與修改,其下的程式碼執行流程都是固定了,只留下了handle(),handleUnknown(),handleExtraArgs()三個受保護的抽象方法供繼承它的子類實現。所以這個所謂的模板設計就是強調程式碼執行流程已經模板話,但具體執行的什麼樣的邏輯,就交給子類來實現細節。
繼承它的子類
private[spark] abstract class SparkSubmitArgumentsParser extends SparkSubmitOptionParser
實現抽象方法的子類
刪去不必要的方法與變數內容
private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, String] = sys.env)
extends SparkSubmitArgumentsParser {
// Set parameters from command line arguments
try {
parse(args.asJava)
} catch {
case e: IllegalArgumentException =>
SparkSubmit.printErrorAndExit(e.getMessage())
}
/** Fill in values by parsing user options. */
override protected def handle(opt: String, value: String): Boolean = {
opt match {
case NAME =>
name = value
case MASTER =>
master = value
case CLASS =>
mainClass = value
case DEPLOY_MODE =>
if (value != "client" && value != "cluster") {
SparkSubmit.printErrorAndExit("--deploy-mode must be either \"client\" or \"cluster\"")
}
deployMode = value
case NUM_EXECUTORS =>
numExecutors = value
case TOTAL_EXECUTOR_CORES =>
totalExecutorCores = value
case EXECUTOR_CORES =>
executorCores = value
case EXECUTOR_MEMORY =>
executorMemory = value
case DRIVER_MEMORY =>
driverMemory = value
case DRIVER_CORES =>
driverCores = value
case DRIVER_CLASS_PATH =>
driverExtraClassPath = value
case DRIVER_JAVA_OPTIONS =>
driverExtraJavaOptions = value
case DRIVER_LIBRARY_PATH =>
driverExtraLibraryPath = value
case PROPERTIES_FILE =>
propertiesFile = value
case KILL_SUBMISSION =>
submissionToKill = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $KILL.")
}
action = KILL
case STATUS =>
submissionToRequestStatusFor = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $REQUEST_STATUS.")
}
action = REQUEST_STATUS
case SUPERVISE =>
supervise = true
case QUEUE =>
queue = value
case FILES =>
files = Utils.resolveURIs(value)
case PY_FILES =>
pyFiles = Utils.resolveURIs(value)
case ARCHIVES =>
archives = Utils.resolveURIs(value)
case JARS =>
jars = Utils.resolveURIs(value)
case PACKAGES =>
packages = value
case PACKAGES_EXCLUDE =>
packagesExclusions = value
case REPOSITORIES =>
repositories = value
case CONF =>
val (confName, confValue) = SparkSubmit.parseSparkConfProperty(value)
sparkProperties(confName) = confValue
case PROXY_USER =>
proxyUser = value
case PRINCIPAL =>
principal = value
case KEYTAB =>
keytab = value
case HELP =>
printUsageAndExit(0)
case VERBOSE =>
verbose = true
case VERSION =>
SparkSubmit.printVersionAndExit()
case USAGE_ERROR =>
printUsageAndExit(1)
case _ =>
throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
}
true
}
/**
* Handle unrecognized command line options.
*
* The first unrecognized option is treated as the "primary resource". Everything else is
* treated as application arguments.
*/
override protected def handleUnknown(opt: String): Boolean = {
if (opt.startsWith("-")) {
SparkSubmit.printErrorAndExit(s"Unrecognized option '$opt'.")
}
primaryResource =
if (!SparkSubmit.isShell(opt) && !SparkSubmit.isInternal(opt)) {
Utils.resolveURI(opt).toString
} else {
opt
}
isPython = SparkSubmit.isPython(opt)
isR = SparkSubmit.isR(opt)
false
}
override protected def handleExtraArgs(extra: JList[String]): Unit = {
childArgs ++= extra.asScala
}
}
子類呼叫來自繼承parse父類的方法就會執行到自己重寫的方法!~~。