Paul Zhang
2 years ago
committed by
GitHub
11 changed files with 641 additions and 439 deletions
@ -0,0 +1,103 @@ |
|||||||
|
/* |
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||||
|
* contributor license agreements. See the NOTICE file distributed with |
||||||
|
* this work for additional information regarding copyright ownership. |
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||||
|
* (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.apache.dolphinscheduler.plugin.task.flink; |
||||||
|
|
||||||
|
import org.apache.commons.lang3.SystemUtils; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; |
||||||
|
import org.apache.dolphinscheduler.spi.utils.StringUtils; |
||||||
|
import org.slf4j.Logger; |
||||||
|
import org.slf4j.LoggerFactory; |
||||||
|
|
||||||
|
import java.io.File; |
||||||
|
import java.io.IOException; |
||||||
|
import java.nio.charset.StandardCharsets; |
||||||
|
import java.nio.file.Files; |
||||||
|
import java.nio.file.Path; |
||||||
|
import java.nio.file.StandardOpenOption; |
||||||
|
import java.nio.file.attribute.FileAttribute; |
||||||
|
import java.nio.file.attribute.PosixFilePermission; |
||||||
|
import java.nio.file.attribute.PosixFilePermissions; |
||||||
|
import java.util.Set; |
||||||
|
|
||||||
|
import static org.apache.dolphinscheduler.plugin.task.api.TaskConstants.RWXR_XR_X; |
||||||
|
|
||||||
|
public class FileUtils { |
||||||
|
private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); |
||||||
|
private FileUtils() {} |
||||||
|
|
||||||
|
public static String getInitScriptFilePath(TaskExecutionContext taskExecutionContext) { |
||||||
|
return String.format("%s/%s_init.sql", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId()); |
||||||
|
} |
||||||
|
|
||||||
|
public static String getScriptFilePath(TaskExecutionContext taskExecutionContext) { |
||||||
|
return String.format("%s/%s_node.sql", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId()); |
||||||
|
} |
||||||
|
|
||||||
|
public static void generateScriptFile(TaskExecutionContext taskExecutionContext, FlinkParameters flinkParameters) { |
||||||
|
String initScriptFilePath = FileUtils.getInitScriptFilePath(taskExecutionContext); |
||||||
|
String scriptFilePath = FileUtils.getScriptFilePath(taskExecutionContext); |
||||||
|
String initOptionsString = StringUtils.join( |
||||||
|
FlinkArgsUtils.buildInitOptionsForSql(flinkParameters), |
||||||
|
FlinkConstants.FLINK_SQL_NEWLINE |
||||||
|
).concat(FlinkConstants.FLINK_SQL_NEWLINE); |
||||||
|
writeScriptFile(initScriptFilePath, initOptionsString + flinkParameters.getInitScript()); |
||||||
|
writeScriptFile(scriptFilePath, flinkParameters.getRawScript()); |
||||||
|
} |
||||||
|
|
||||||
|
private static void writeScriptFile(String scriptFileFullPath, String script) { |
||||||
|
File scriptFile = new File(scriptFileFullPath); |
||||||
|
Path path = scriptFile.toPath(); |
||||||
|
if (Files.exists(path)) { |
||||||
|
try { |
||||||
|
Files.delete(path); |
||||||
|
} catch (IOException e) { |
||||||
|
throw new RuntimeException(String.format("Flink Script file exists in path: %s before creation and cannot be deleted", path), e); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
Set<PosixFilePermission> perms = PosixFilePermissions.fromString(RWXR_XR_X); |
||||||
|
FileAttribute<Set<PosixFilePermission>> attr = PosixFilePermissions.asFileAttribute(perms); |
||||||
|
try { |
||||||
|
if (SystemUtils.IS_OS_WINDOWS) { |
||||||
|
Files.createFile(path); |
||||||
|
} else { |
||||||
|
if (!scriptFile.getParentFile().exists()) { |
||||||
|
scriptFile.getParentFile().mkdirs(); |
||||||
|
} |
||||||
|
Files.createFile(path, attr); |
||||||
|
} |
||||||
|
|
||||||
|
if (StringUtils.isNotEmpty(script)) { |
||||||
|
String replacedScript = script.replaceAll("\\r\\n", "\n"); |
||||||
|
FileUtils.writeStringToFile(scriptFile, replacedScript, StandardOpenOption.APPEND); |
||||||
|
} |
||||||
|
} catch (IOException e) { |
||||||
|
throw new RuntimeException("Generate flink SQL script error", e); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
private static void writeStringToFile(File file, String content, StandardOpenOption standardOpenOption) { |
||||||
|
try { |
||||||
|
LOGGER.info("Writing content: " + content); |
||||||
|
LOGGER.info("To file: " + file.getAbsolutePath()); |
||||||
|
Files.write(file.getAbsoluteFile().toPath(), content.getBytes(StandardCharsets.UTF_8), standardOpenOption); |
||||||
|
} catch(IOException e) { |
||||||
|
throw new RuntimeException("Error writing file: " + file.getAbsoluteFile(), e); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,271 @@ |
|||||||
|
/* |
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||||
|
* contributor license agreements. See the NOTICE file distributed with |
||||||
|
* this work for additional information regarding copyright ownership. |
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||||
|
* (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.apache.dolphinscheduler.plugin.task.flink; |
||||||
|
|
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.model.Property; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.parser.ParamUtils; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.parser.ParameterUtils; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.utils.ArgsUtils; |
||||||
|
import org.apache.dolphinscheduler.spi.utils.StringUtils; |
||||||
|
import org.slf4j.Logger; |
||||||
|
import org.slf4j.LoggerFactory; |
||||||
|
|
||||||
|
import java.util.ArrayList; |
||||||
|
import java.util.List; |
||||||
|
import java.util.Map; |
||||||
|
import java.util.Optional; |
||||||
|
|
||||||
|
/** |
||||||
|
* flink args utils |
||||||
|
*/ |
||||||
|
public class FlinkArgsUtils { |
||||||
|
|
||||||
|
private FlinkArgsUtils() { |
||||||
|
throw new IllegalStateException("Utility class"); |
||||||
|
} |
||||||
|
|
||||||
|
private static final String LOCAL_DEPLOY_MODE = "local"; |
||||||
|
private static final String FLINK_VERSION_BEFORE_1_10 = "<1.10"; |
||||||
|
private static final String FLINK_VERSION_AFTER_OR_EQUALS_1_12 = ">=1.12"; |
||||||
|
private static final String FLINK_VERSION_AFTER_OR_EQUALS_1_13 = ">=1.13"; |
||||||
|
/** |
||||||
|
* default flink deploy mode |
||||||
|
*/ |
||||||
|
public static final FlinkDeployMode DEFAULT_DEPLOY_MODE = FlinkDeployMode.CLUSTER; |
||||||
|
|
||||||
|
/** |
||||||
|
* build flink command line |
||||||
|
* |
||||||
|
* @param param flink parameters |
||||||
|
* @return argument list |
||||||
|
*/ |
||||||
|
public static List<String> buildCommandLine(TaskExecutionContext taskExecutionContext, FlinkParameters param) { |
||||||
|
switch (param.getProgramType()) { |
||||||
|
case SQL: |
||||||
|
return buildCommandLineForSql(taskExecutionContext, param); |
||||||
|
default: |
||||||
|
return buildCommandLineForOthers(taskExecutionContext, param); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* build flink command line for SQL |
||||||
|
* |
||||||
|
* @return argument list |
||||||
|
*/ |
||||||
|
private static List<String> buildCommandLineForSql(TaskExecutionContext taskExecutionContext, FlinkParameters flinkParameters) { |
||||||
|
List<String> args = new ArrayList<>(); |
||||||
|
|
||||||
|
args.add(FlinkConstants.FLINK_SQL_COMMAND); |
||||||
|
|
||||||
|
// -i
|
||||||
|
String initScriptFilePath = FileUtils.getInitScriptFilePath(taskExecutionContext); |
||||||
|
args.add(FlinkConstants.FLINK_SQL_INIT_FILE); |
||||||
|
args.add(initScriptFilePath); |
||||||
|
|
||||||
|
// -f
|
||||||
|
String scriptFilePath = FileUtils.getScriptFilePath(taskExecutionContext); |
||||||
|
args.add(FlinkConstants.FLINK_SQL_SCRIPT_FILE); |
||||||
|
args.add(scriptFilePath); |
||||||
|
|
||||||
|
String others = flinkParameters.getOthers(); |
||||||
|
if (StringUtils.isNotEmpty(others)) { |
||||||
|
args.add(others); |
||||||
|
} |
||||||
|
return args; |
||||||
|
} |
||||||
|
|
||||||
|
public static List<String> buildInitOptionsForSql(FlinkParameters flinkParameters) { |
||||||
|
List<String> initOptions = new ArrayList<>(); |
||||||
|
|
||||||
|
FlinkDeployMode deployMode = Optional.ofNullable(flinkParameters.getDeployMode()).orElse(FlinkDeployMode.CLUSTER); |
||||||
|
|
||||||
|
/** |
||||||
|
* Currently flink sql on yarn only supports yarn-per-job mode |
||||||
|
*/ |
||||||
|
if (FlinkDeployMode.CLUSTER == deployMode) { |
||||||
|
// execution.target
|
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_EXECUTION_TARGET, "local")); |
||||||
|
} else { |
||||||
|
// execution.target
|
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_EXECUTION_TARGET, FlinkConstants.FLINK_YARN_PER_JOB)); |
||||||
|
|
||||||
|
// taskmanager.numberOfTaskSlots
|
||||||
|
int slot = flinkParameters.getSlot(); |
||||||
|
if (slot > 0) { |
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_TASKMANAGER_NUMBEROFTASKSLOTS, slot)); |
||||||
|
} |
||||||
|
|
||||||
|
// yarn.application.name
|
||||||
|
String appName = flinkParameters.getAppName(); |
||||||
|
if (StringUtils.isNotEmpty(appName)) { |
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_YARN_APPLICATION_NAME, ArgsUtils.escape(appName))); |
||||||
|
} |
||||||
|
|
||||||
|
// jobmanager.memory.process.size
|
||||||
|
String jobManagerMemory = flinkParameters.getJobManagerMemory(); |
||||||
|
if (StringUtils.isNotEmpty(jobManagerMemory)) { |
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_JOBMANAGER_MEMORY_PROCESS_SIZE, jobManagerMemory)); |
||||||
|
} |
||||||
|
|
||||||
|
// taskmanager.memory.process.size
|
||||||
|
String taskManagerMemory = flinkParameters.getTaskManagerMemory(); |
||||||
|
if (StringUtils.isNotEmpty(taskManagerMemory)) { |
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_TASKMANAGER_MEMORY_PROCESS_SIZE, taskManagerMemory)); |
||||||
|
} |
||||||
|
|
||||||
|
// yarn.application.queue
|
||||||
|
String others = flinkParameters.getOthers(); |
||||||
|
if (StringUtils.isEmpty(others) || !others.contains(FlinkConstants.FLINK_QUEUE)) { |
||||||
|
String queue = flinkParameters.getQueue(); |
||||||
|
if (StringUtils.isNotEmpty(queue)) { |
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_YARN_APPLICATION_QUEUE, queue)); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// parallelism.default
|
||||||
|
int parallelism = flinkParameters.getParallelism(); |
||||||
|
if (parallelism > 0) { |
||||||
|
initOptions.add(String.format(FlinkConstants.FLINK_FORMAT_PARALLELISM_DEFAULT, parallelism)); |
||||||
|
} |
||||||
|
|
||||||
|
return initOptions; |
||||||
|
} |
||||||
|
|
||||||
|
private static List<String> buildCommandLineForOthers(TaskExecutionContext taskExecutionContext, FlinkParameters flinkParameters) { |
||||||
|
List<String> args = new ArrayList<>(); |
||||||
|
|
||||||
|
args.add(FlinkConstants.FLINK_COMMAND); |
||||||
|
FlinkDeployMode deployMode = Optional.ofNullable(flinkParameters.getDeployMode()).orElse(DEFAULT_DEPLOY_MODE); |
||||||
|
String flinkVersion = flinkParameters.getFlinkVersion(); |
||||||
|
// build run command
|
||||||
|
switch (deployMode) { |
||||||
|
case CLUSTER: |
||||||
|
if (FLINK_VERSION_AFTER_OR_EQUALS_1_12.equals(flinkVersion) || FLINK_VERSION_AFTER_OR_EQUALS_1_13.equals(flinkVersion)) { |
||||||
|
args.add(FlinkConstants.FLINK_RUN); //run
|
||||||
|
args.add(FlinkConstants.FLINK_EXECUTION_TARGET); //-t
|
||||||
|
args.add(FlinkConstants.FLINK_YARN_PER_JOB); //yarn-per-job
|
||||||
|
} else { |
||||||
|
args.add(FlinkConstants.FLINK_RUN); //run
|
||||||
|
args.add(FlinkConstants.FLINK_RUN_MODE); //-m
|
||||||
|
args.add(FlinkConstants.FLINK_YARN_CLUSTER); //yarn-cluster
|
||||||
|
} |
||||||
|
break; |
||||||
|
case APPLICATION: |
||||||
|
args.add(FlinkConstants.FLINK_RUN_APPLICATION); //run-application
|
||||||
|
args.add(FlinkConstants.FLINK_EXECUTION_TARGET); //-t
|
||||||
|
args.add(FlinkConstants.FLINK_YARN_APPLICATION); //yarn-application
|
||||||
|
break; |
||||||
|
case LOCAL: |
||||||
|
args.add(FlinkConstants.FLINK_RUN); //run
|
||||||
|
break; |
||||||
|
} |
||||||
|
|
||||||
|
String others = flinkParameters.getOthers(); |
||||||
|
|
||||||
|
// build args
|
||||||
|
switch (deployMode) { |
||||||
|
case CLUSTER: |
||||||
|
case APPLICATION: |
||||||
|
int slot = flinkParameters.getSlot(); |
||||||
|
if (slot > 0) { |
||||||
|
args.add(FlinkConstants.FLINK_YARN_SLOT); |
||||||
|
args.add(String.format("%d", slot)); //-ys
|
||||||
|
} |
||||||
|
|
||||||
|
String appName = flinkParameters.getAppName(); |
||||||
|
if (StringUtils.isNotEmpty(appName)) { //-ynm
|
||||||
|
args.add(FlinkConstants.FLINK_APP_NAME); |
||||||
|
args.add(ArgsUtils.escape(appName)); |
||||||
|
} |
||||||
|
|
||||||
|
// judge flink version, the parameter -yn has removed from flink 1.10
|
||||||
|
if (flinkVersion == null || FLINK_VERSION_BEFORE_1_10.equals(flinkVersion)) { |
||||||
|
int taskManager = flinkParameters.getTaskManager(); |
||||||
|
if (taskManager > 0) { //-yn
|
||||||
|
args.add(FlinkConstants.FLINK_TASK_MANAGE); |
||||||
|
args.add(String.format("%d", taskManager)); |
||||||
|
} |
||||||
|
} |
||||||
|
String jobManagerMemory = flinkParameters.getJobManagerMemory(); |
||||||
|
if (StringUtils.isNotEmpty(jobManagerMemory)) { |
||||||
|
args.add(FlinkConstants.FLINK_JOB_MANAGE_MEM); |
||||||
|
args.add(jobManagerMemory); //-yjm
|
||||||
|
} |
||||||
|
|
||||||
|
String taskManagerMemory = flinkParameters.getTaskManagerMemory(); |
||||||
|
if (StringUtils.isNotEmpty(taskManagerMemory)) { // -ytm
|
||||||
|
args.add(FlinkConstants.FLINK_TASK_MANAGE_MEM); |
||||||
|
args.add(taskManagerMemory); |
||||||
|
} |
||||||
|
|
||||||
|
if (StringUtils.isEmpty(others) || !others.contains(FlinkConstants.FLINK_QUEUE)) { |
||||||
|
String queue = flinkParameters.getQueue(); |
||||||
|
if (StringUtils.isNotEmpty(queue)) { // -yqu
|
||||||
|
args.add(FlinkConstants.FLINK_QUEUE); |
||||||
|
args.add(queue); |
||||||
|
} |
||||||
|
} |
||||||
|
break; |
||||||
|
case LOCAL: |
||||||
|
break; |
||||||
|
} |
||||||
|
|
||||||
|
int parallelism = flinkParameters.getParallelism(); |
||||||
|
if (parallelism > 0) { |
||||||
|
args.add(FlinkConstants.FLINK_PARALLELISM); |
||||||
|
args.add(String.format("%d", parallelism)); // -p
|
||||||
|
} |
||||||
|
|
||||||
|
// If the job is submitted in attached mode, perform a best-effort cluster shutdown when the CLI is terminated abruptly
|
||||||
|
// The task status will be synchronized with the cluster job status
|
||||||
|
args.add(FlinkConstants.FLINK_SHUTDOWN_ON_ATTACHED_EXIT); // -sae
|
||||||
|
|
||||||
|
// -s -yqu -yat -yD -D
|
||||||
|
if (StringUtils.isNotEmpty(others)) { |
||||||
|
args.add(others); |
||||||
|
} |
||||||
|
|
||||||
|
ProgramType programType = flinkParameters.getProgramType(); |
||||||
|
String mainClass = flinkParameters.getMainClass(); |
||||||
|
if (programType != null && programType != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)) { |
||||||
|
args.add(FlinkConstants.FLINK_MAIN_CLASS); //-c
|
||||||
|
args.add(flinkParameters.getMainClass()); //main class
|
||||||
|
} |
||||||
|
|
||||||
|
ResourceInfo mainJar = flinkParameters.getMainJar(); |
||||||
|
if (mainJar != null) { |
||||||
|
// -py
|
||||||
|
if(ProgramType.PYTHON == programType) { |
||||||
|
args.add(FlinkConstants.FLINK_PYTHON); |
||||||
|
} |
||||||
|
args.add(mainJar.getRes()); |
||||||
|
} |
||||||
|
|
||||||
|
String mainArgs = flinkParameters.getMainArgs(); |
||||||
|
if (StringUtils.isNotEmpty(mainArgs)) { |
||||||
|
Map<String, Property> paramsMap = taskExecutionContext.getPrepareParamsMap(); |
||||||
|
args.add(ParameterUtils.convertParameterPlaceholders(mainArgs, ParamUtils.convert(paramsMap))); |
||||||
|
} |
||||||
|
|
||||||
|
return args; |
||||||
|
} |
||||||
|
} |
@ -0,0 +1,32 @@ |
|||||||
|
/* |
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||||
|
* contributor license agreements. See the NOTICE file distributed with |
||||||
|
* this work for additional information regarding copyright ownership. |
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||||
|
* (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.apache.dolphinscheduler.plugin.task.flink; |
||||||
|
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty; |
||||||
|
|
||||||
|
/** |
||||||
|
* Flink deploy mode |
||||||
|
*/ |
||||||
|
public enum FlinkDeployMode { |
||||||
|
@JsonProperty("local") |
||||||
|
LOCAL, |
||||||
|
@JsonProperty("cluster") |
||||||
|
CLUSTER, |
||||||
|
@JsonProperty("application") |
||||||
|
APPLICATION |
||||||
|
} |
@ -0,0 +1,132 @@ |
|||||||
|
/* |
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||||
|
* contributor license agreements. See the NOTICE file distributed with |
||||||
|
* this work for additional information regarding copyright ownership. |
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||||
|
* (the "License"); you may not use this file except in compliance with |
||||||
|
* the License. You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
*/ |
||||||
|
|
||||||
|
package org.apache.dolphinscheduler.plugin.task.flink; |
||||||
|
|
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; |
||||||
|
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; |
||||||
|
import org.junit.Assert; |
||||||
|
import org.junit.Test; |
||||||
|
|
||||||
|
import java.util.List; |
||||||
|
|
||||||
|
public class FlinkArgsUtilsTest { |
||||||
|
|
||||||
|
private String joinStringListWithSpace(List<String> stringList) { |
||||||
|
return String.join(" ", stringList); |
||||||
|
} |
||||||
|
|
||||||
|
private FlinkParameters buildTestFlinkParametersWithDeployMode(FlinkDeployMode flinkDeployMode) { |
||||||
|
FlinkParameters flinkParameters = new FlinkParameters(); |
||||||
|
flinkParameters.setProgramType(ProgramType.SCALA); |
||||||
|
flinkParameters.setDeployMode(flinkDeployMode); |
||||||
|
flinkParameters.setParallelism(4); |
||||||
|
ResourceInfo resourceInfo = new ResourceInfo(); |
||||||
|
resourceInfo.setId(1); |
||||||
|
resourceInfo.setResourceName("job"); |
||||||
|
resourceInfo.setRes("/opt/job.jar"); |
||||||
|
flinkParameters.setMainJar(resourceInfo); |
||||||
|
flinkParameters.setMainClass("org.example.Main"); |
||||||
|
flinkParameters.setSlot(4); |
||||||
|
flinkParameters.setAppName("demo-app-name"); |
||||||
|
flinkParameters.setJobManagerMemory("1024m"); |
||||||
|
flinkParameters.setTaskManagerMemory("1024m"); |
||||||
|
|
||||||
|
return flinkParameters; |
||||||
|
} |
||||||
|
private TaskExecutionContext buildTestTaskExecutionContext() { |
||||||
|
TaskExecutionContext taskExecutionContext = new TaskExecutionContext(); |
||||||
|
taskExecutionContext.setTaskAppId("app-id"); |
||||||
|
taskExecutionContext.setExecutePath("/tmp/execution"); |
||||||
|
return taskExecutionContext; |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
public void testRunJarInApplicationMode() throws Exception { |
||||||
|
FlinkParameters flinkParameters = buildTestFlinkParametersWithDeployMode(FlinkDeployMode.APPLICATION); |
||||||
|
List<String> commandLine = FlinkArgsUtils.buildCommandLine(buildTestTaskExecutionContext(), flinkParameters); |
||||||
|
|
||||||
|
Assert.assertEquals( |
||||||
|
"flink run-application -t yarn-application -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", |
||||||
|
joinStringListWithSpace(commandLine)); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
public void testRunJarInClusterMode() throws Exception { |
||||||
|
FlinkParameters flinkParameters = buildTestFlinkParametersWithDeployMode(FlinkDeployMode.CLUSTER); |
||||||
|
flinkParameters.setFlinkVersion("1.11"); |
||||||
|
List<String> commandLine1 = FlinkArgsUtils.buildCommandLine(buildTestTaskExecutionContext(), flinkParameters); |
||||||
|
|
||||||
|
Assert.assertEquals( |
||||||
|
"flink run -m yarn-cluster -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", |
||||||
|
joinStringListWithSpace(commandLine1)); |
||||||
|
|
||||||
|
flinkParameters.setFlinkVersion("<1.10"); |
||||||
|
List<String> commandLine2 = FlinkArgsUtils.buildCommandLine(buildTestTaskExecutionContext(), flinkParameters); |
||||||
|
|
||||||
|
Assert.assertEquals( |
||||||
|
"flink run -m yarn-cluster -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", |
||||||
|
joinStringListWithSpace(commandLine2)); |
||||||
|
|
||||||
|
flinkParameters.setFlinkVersion(">=1.12"); |
||||||
|
List<String> commandLine3 = FlinkArgsUtils.buildCommandLine(buildTestTaskExecutionContext(), flinkParameters); |
||||||
|
|
||||||
|
Assert.assertEquals( |
||||||
|
"flink run -t yarn-per-job -ys 4 -ynm demo-app-name -yjm 1024m -ytm 1024m -p 4 -sae -c org.example.Main /opt/job.jar", |
||||||
|
joinStringListWithSpace(commandLine3)); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
public void testRunJarInLocalMode() throws Exception { |
||||||
|
FlinkParameters flinkParameters = buildTestFlinkParametersWithDeployMode(FlinkDeployMode.LOCAL); |
||||||
|
List<String> commandLine = FlinkArgsUtils.buildCommandLine(buildTestTaskExecutionContext(), flinkParameters); |
||||||
|
|
||||||
|
Assert.assertEquals( |
||||||
|
"flink run -p 4 -sae -c org.example.Main /opt/job.jar", |
||||||
|
joinStringListWithSpace(commandLine)); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
public void testRunSql() throws Exception { |
||||||
|
FlinkParameters flinkParameters = buildTestFlinkParametersWithDeployMode(FlinkDeployMode.CLUSTER); |
||||||
|
flinkParameters.setProgramType(ProgramType.SQL); |
||||||
|
List<String> commandLine = FlinkArgsUtils.buildCommandLine(buildTestTaskExecutionContext(), flinkParameters); |
||||||
|
|
||||||
|
Assert.assertEquals("sql-client.sh -i /tmp/execution/app-id_init.sql -f /tmp/execution/app-id_node.sql", |
||||||
|
joinStringListWithSpace(commandLine)); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
public void testInitOptionsInClusterMode() throws Exception { |
||||||
|
List<String> initOptions = FlinkArgsUtils.buildInitOptionsForSql(buildTestFlinkParametersWithDeployMode(FlinkDeployMode.CLUSTER)); |
||||||
|
Assert.assertEquals(2, initOptions.size()); |
||||||
|
Assert.assertTrue(initOptions.contains("set execution.target=local")); |
||||||
|
Assert.assertTrue(initOptions.contains("set parallelism.default=4")); |
||||||
|
} |
||||||
|
|
||||||
|
@Test |
||||||
|
public void testInitOptionsInApplicationMode() throws Exception { |
||||||
|
List<String> initOptions = FlinkArgsUtils.buildInitOptionsForSql(buildTestFlinkParametersWithDeployMode(FlinkDeployMode.APPLICATION)); |
||||||
|
Assert.assertEquals(6, initOptions.size()); |
||||||
|
Assert.assertTrue(initOptions.contains("set execution.target=yarn-per-job")); |
||||||
|
Assert.assertTrue(initOptions.contains("set taskmanager.numberOfTaskSlots=4")); |
||||||
|
Assert.assertTrue(initOptions.contains("set yarn.application.name=demo-app-name")); |
||||||
|
Assert.assertTrue(initOptions.contains("set jobmanager.memory.process.size=1024m")); |
||||||
|
Assert.assertTrue(initOptions.contains("set taskmanager.memory.process.size=1024m")); |
||||||
|
Assert.assertTrue(initOptions.contains("set parallelism.default=4")); |
||||||
|
} |
||||||
|
} |
@ -1,116 +0,0 @@ |
|||||||
/* |
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
|
||||||
* contributor license agreements. See the NOTICE file distributed with |
|
||||||
* this work for additional information regarding copyright ownership. |
|
||||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
|
||||||
* (the "License"); you may not use this file except in compliance with |
|
||||||
* the License. You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
*/ |
|
||||||
|
|
||||||
package org.apache.dolphinscheduler.plugin.task.flink; |
|
||||||
|
|
||||||
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; |
|
||||||
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; |
|
||||||
import org.apache.dolphinscheduler.spi.utils.JSONUtils; |
|
||||||
|
|
||||||
import org.junit.Assert; |
|
||||||
import org.junit.Test; |
|
||||||
import org.junit.runner.RunWith; |
|
||||||
import org.powermock.api.mockito.PowerMockito; |
|
||||||
import org.powermock.core.classloader.annotations.PowerMockIgnore; |
|
||||||
import org.powermock.core.classloader.annotations.PrepareForTest; |
|
||||||
import org.powermock.modules.junit4.PowerMockRunner; |
|
||||||
|
|
||||||
import java.util.Collections; |
|
||||||
|
|
||||||
import static org.powermock.api.mockito.PowerMockito.spy; |
|
||||||
import static org.powermock.api.mockito.PowerMockito.when; |
|
||||||
|
|
||||||
@RunWith(PowerMockRunner.class) |
|
||||||
@PrepareForTest({ |
|
||||||
JSONUtils.class |
|
||||||
}) |
|
||||||
@PowerMockIgnore({"javax.*"}) |
|
||||||
public class FlinkTaskTest { |
|
||||||
|
|
||||||
@Test |
|
||||||
public void testBuildCommand() { |
|
||||||
String parameters = buildFlinkParameters(); |
|
||||||
TaskExecutionContext taskExecutionContext = PowerMockito.mock(TaskExecutionContext.class); |
|
||||||
when(taskExecutionContext.getTaskParams()).thenReturn(parameters); |
|
||||||
when(taskExecutionContext.getQueue()).thenReturn("default"); |
|
||||||
FlinkTask flinkTask = spy(new FlinkTask(taskExecutionContext)); |
|
||||||
flinkTask.init(); |
|
||||||
Assert.assertEquals( |
|
||||||
"flink run " + |
|
||||||
"-m yarn-cluster " + |
|
||||||
"-ys 1 " + |
|
||||||
"-ynm TopSpeedWindowing " + |
|
||||||
"-yjm 1G " + |
|
||||||
"-ytm 1G " + |
|
||||||
"-yqu default " + |
|
||||||
"-p 2 -sae " + |
|
||||||
"-c org.apache.flink.streaming.examples.windowing.TopSpeedWindowing " + |
|
||||||
"TopSpeedWindowing.jar", flinkTask.buildCommand()); |
|
||||||
} |
|
||||||
|
|
||||||
@Test |
|
||||||
public void testBuildCommandWithFlinkSql() { |
|
||||||
String parameters = buildFlinkParametersWithFlinkSql(); |
|
||||||
TaskExecutionContext taskExecutionContext = PowerMockito.mock(TaskExecutionContext.class); |
|
||||||
when(taskExecutionContext.getTaskParams()).thenReturn(parameters); |
|
||||||
when(taskExecutionContext.getExecutePath()).thenReturn("/tmp"); |
|
||||||
when(taskExecutionContext.getTaskAppId()).thenReturn("4483"); |
|
||||||
FlinkTask flinkTask = spy(new FlinkTask(taskExecutionContext)); |
|
||||||
flinkTask.init(); |
|
||||||
Assert.assertEquals("sql-client.sh -i /tmp/4483_init.sql -f /tmp/4483_node.sql", flinkTask.buildCommand()); |
|
||||||
} |
|
||||||
|
|
||||||
private String buildFlinkParameters() { |
|
||||||
ResourceInfo resource = new ResourceInfo(); |
|
||||||
resource.setId(2); |
|
||||||
resource.setResourceName("/TopSpeedWindowing.jar"); |
|
||||||
resource.setRes("TopSpeedWindowing.jar"); |
|
||||||
|
|
||||||
FlinkParameters parameters = new FlinkParameters(); |
|
||||||
parameters.setLocalParams(Collections.emptyList()); |
|
||||||
parameters.setResourceList(Collections.emptyList()); |
|
||||||
parameters.setProgramType(ProgramType.JAVA); |
|
||||||
parameters.setMainClass("org.apache.flink.streaming.examples.windowing.TopSpeedWindowing"); |
|
||||||
parameters.setMainJar(resource); |
|
||||||
parameters.setDeployMode("cluster"); |
|
||||||
parameters.setAppName("TopSpeedWindowing"); |
|
||||||
parameters.setFlinkVersion(">=1.10"); |
|
||||||
parameters.setJobManagerMemory("1G"); |
|
||||||
parameters.setTaskManagerMemory("1G"); |
|
||||||
parameters.setSlot(1); |
|
||||||
parameters.setTaskManager(2); |
|
||||||
parameters.setParallelism(2); |
|
||||||
return JSONUtils.toJsonString(parameters); |
|
||||||
} |
|
||||||
|
|
||||||
private String buildFlinkParametersWithFlinkSql() { |
|
||||||
FlinkParameters parameters = new FlinkParameters(); |
|
||||||
parameters.setLocalParams(Collections.emptyList()); |
|
||||||
parameters.setInitScript("set sql-client.execution.result-mode=tableau;"); |
|
||||||
parameters.setRawScript("selcet 11111;"); |
|
||||||
parameters.setProgramType(ProgramType.SQL); |
|
||||||
parameters.setMainClass(""); |
|
||||||
parameters.setDeployMode("cluster"); |
|
||||||
parameters.setAppName("FlinkSQL"); |
|
||||||
parameters.setOthers(""); |
|
||||||
parameters.setJobManagerMemory("1G"); |
|
||||||
parameters.setTaskManagerMemory("1G"); |
|
||||||
parameters.setParallelism(1); |
|
||||||
parameters.setFlinkVersion(">=1.10"); |
|
||||||
return JSONUtils.toJsonString(parameters); |
|
||||||
} |
|
||||||
} |
|
Loading…
Reference in new issue