Browse Source
* flink_sql * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * '[refactor]flinksql' * [refactor] flinksql * '[refactor]flinksql' * [refactor] dolphinscheduler flinksql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * [refactor] flink sql * Update docs/docs/en/guide/task/flink.md * Update docs/docs/zh/guide/task/flink.md * prettier front-end code Co-authored-by: Jiajie Zhong <zhongjiajie955@gmail.com> Co-authored-by: devosend <devosend@gmail.com>3.0.0/version-upgrade
Dannila
3 years ago
committed by
GitHub
16 changed files with 580 additions and 187 deletions
After Width: | Height: | Size: 973 KiB |
@ -1,136 +0,0 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.plugin.task.flink; |
||||
|
||||
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; |
||||
import org.apache.dolphinscheduler.plugin.task.api.utils.ArgsUtils; |
||||
import org.apache.dolphinscheduler.spi.utils.StringUtils; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* flink args utils |
||||
*/ |
||||
public class FlinkArgsUtils { |
||||
|
||||
private FlinkArgsUtils() { |
||||
throw new IllegalStateException("Utility class"); |
||||
} |
||||
|
||||
private static final String LOCAL_DEPLOY_MODE = "local"; |
||||
private static final String FLINK_VERSION_BEFORE_1_10 = "<1.10"; |
||||
|
||||
/** |
||||
* build args |
||||
* |
||||
* @param param flink parameters |
||||
* @return argument list |
||||
*/ |
||||
public static List<String> buildArgs(FlinkParameters param) { |
||||
List<String> args = new ArrayList<>(); |
||||
|
||||
String deployMode = "cluster"; |
||||
String tmpDeployMode = param.getDeployMode(); |
||||
if (StringUtils.isNotEmpty(tmpDeployMode)) { |
||||
deployMode = tmpDeployMode; |
||||
} |
||||
String others = param.getOthers(); |
||||
if (!LOCAL_DEPLOY_MODE.equals(deployMode)) { |
||||
args.add(FlinkConstants.FLINK_RUN_MODE); //-m
|
||||
|
||||
args.add(FlinkConstants.FLINK_YARN_CLUSTER); //yarn-cluster
|
||||
|
||||
int slot = param.getSlot(); |
||||
if (slot > 0) { |
||||
args.add(FlinkConstants.FLINK_YARN_SLOT); |
||||
args.add(String.format("%d", slot)); //-ys
|
||||
} |
||||
|
||||
String appName = param.getAppName(); |
||||
if (StringUtils.isNotEmpty(appName)) { //-ynm
|
||||
args.add(FlinkConstants.FLINK_APP_NAME); |
||||
args.add(ArgsUtils.escape(appName)); |
||||
} |
||||
|
||||
// judge flink version, the parameter -yn has removed from flink 1.10
|
||||
String flinkVersion = param.getFlinkVersion(); |
||||
if (flinkVersion == null || FLINK_VERSION_BEFORE_1_10.equals(flinkVersion)) { |
||||
int taskManager = param.getTaskManager(); |
||||
if (taskManager > 0) { //-yn
|
||||
args.add(FlinkConstants.FLINK_TASK_MANAGE); |
||||
args.add(String.format("%d", taskManager)); |
||||
} |
||||
} |
||||
String jobManagerMemory = param.getJobManagerMemory(); |
||||
if (StringUtils.isNotEmpty(jobManagerMemory)) { |
||||
args.add(FlinkConstants.FLINK_JOB_MANAGE_MEM); |
||||
args.add(jobManagerMemory); //-yjm
|
||||
} |
||||
|
||||
String taskManagerMemory = param.getTaskManagerMemory(); |
||||
if (StringUtils.isNotEmpty(taskManagerMemory)) { // -ytm
|
||||
args.add(FlinkConstants.FLINK_TASK_MANAGE_MEM); |
||||
args.add(taskManagerMemory); |
||||
} |
||||
|
||||
if (StringUtils.isEmpty(others) || !others.contains(FlinkConstants.FLINK_QUEUE)) { |
||||
String queue = param.getQueue(); |
||||
if (StringUtils.isNotEmpty(queue)) { // -yqu
|
||||
args.add(FlinkConstants.FLINK_QUEUE); |
||||
args.add(queue); |
||||
} |
||||
} |
||||
} |
||||
|
||||
int parallelism = param.getParallelism(); |
||||
if (parallelism > 0) { |
||||
args.add(FlinkConstants.FLINK_PARALLELISM); |
||||
args.add(String.format("%d", parallelism)); // -p
|
||||
} |
||||
|
||||
// If the job is submitted in attached mode, perform a best-effort cluster shutdown when the CLI is terminated abruptly
|
||||
// The task status will be synchronized with the cluster job status
|
||||
args.add(FlinkConstants.FLINK_SHUTDOWN_ON_ATTACHED_EXIT); // -sae
|
||||
|
||||
// -s -yqu -yat -yD -D
|
||||
if (StringUtils.isNotEmpty(others)) { |
||||
args.add(others); |
||||
} |
||||
|
||||
ProgramType programType = param.getProgramType(); |
||||
String mainClass = param.getMainClass(); |
||||
if (programType != null && programType != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)) { |
||||
args.add(FlinkConstants.FLINK_MAIN_CLASS); //-c
|
||||
args.add(param.getMainClass()); //main class
|
||||
} |
||||
|
||||
ResourceInfo mainJar = param.getMainJar(); |
||||
if (mainJar != null) { |
||||
args.add(mainJar.getRes()); |
||||
} |
||||
|
||||
String mainArgs = param.getMainArgs(); |
||||
if (StringUtils.isNotEmpty(mainArgs)) { |
||||
args.add(mainArgs); |
||||
} |
||||
|
||||
return args; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,118 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.plugin.task.flink; |
||||
|
||||
import org.apache.commons.lang.StringUtils; |
||||
|
||||
import org.apache.dolphinscheduler.plugin.task.api.TaskExecutionContext; |
||||
import org.apache.dolphinscheduler.plugin.task.api.model.ResourceInfo; |
||||
import org.apache.dolphinscheduler.spi.utils.JSONUtils; |
||||
|
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.powermock.api.mockito.PowerMockito; |
||||
import org.powermock.core.classloader.annotations.PowerMockIgnore; |
||||
import org.powermock.core.classloader.annotations.PrepareForTest; |
||||
import org.powermock.modules.junit4.PowerMockRunner; |
||||
|
||||
import java.util.Collections; |
||||
|
||||
import static org.powermock.api.mockito.PowerMockito.spy; |
||||
import static org.powermock.api.mockito.PowerMockito.when; |
||||
|
||||
@RunWith(PowerMockRunner.class) |
||||
@PrepareForTest({ |
||||
JSONUtils.class |
||||
}) |
||||
@PowerMockIgnore({"javax.*"}) |
||||
public class FlinkTaskTest { |
||||
|
||||
@Test |
||||
public void testBuildCommand() { |
||||
String parameters = buildFlinkParameters(); |
||||
TaskExecutionContext taskExecutionContext = PowerMockito.mock(TaskExecutionContext.class); |
||||
when(taskExecutionContext.getTaskParams()).thenReturn(parameters); |
||||
when(taskExecutionContext.getQueue()).thenReturn("default"); |
||||
FlinkTask flinkTask = spy(new FlinkTask(taskExecutionContext)); |
||||
flinkTask.init(); |
||||
Assert.assertEquals( |
||||
"flink run " + |
||||
"-m yarn-cluster " + |
||||
"-ys 1 " + |
||||
"-ynm TopSpeedWindowing " + |
||||
"-yjm 1G " + |
||||
"-ytm 1G " + |
||||
"-yqu default " + |
||||
"-p 2 -sae " + |
||||
"-c org.apache.flink.streaming.examples.windowing.TopSpeedWindowing " + |
||||
"TopSpeedWindowing.jar", flinkTask.buildCommand()); |
||||
} |
||||
|
||||
@Test |
||||
public void testBuildCommandWithFlinkSql() { |
||||
String parameters = buildFlinkParametersWithFlinkSql(); |
||||
TaskExecutionContext taskExecutionContext = PowerMockito.mock(TaskExecutionContext.class); |
||||
when(taskExecutionContext.getTaskParams()).thenReturn(parameters); |
||||
when(taskExecutionContext.getExecutePath()).thenReturn("/tmp"); |
||||
when(taskExecutionContext.getTaskAppId()).thenReturn("4483"); |
||||
FlinkTask flinkTask = spy(new FlinkTask(taskExecutionContext)); |
||||
flinkTask.init(); |
||||
Assert.assertEquals("sql-client.sh -i /tmp/4483_init.sql -f /tmp/4483_node.sql", flinkTask.buildCommand()); |
||||
} |
||||
|
||||
private String buildFlinkParameters() { |
||||
ResourceInfo resource = new ResourceInfo(); |
||||
resource.setId(2); |
||||
resource.setResourceName("/TopSpeedWindowing.jar"); |
||||
resource.setRes("TopSpeedWindowing.jar"); |
||||
|
||||
FlinkParameters parameters = new FlinkParameters(); |
||||
parameters.setLocalParams(Collections.emptyList()); |
||||
parameters.setResourceList(Collections.emptyList()); |
||||
parameters.setProgramType(ProgramType.JAVA); |
||||
parameters.setMainClass("org.apache.flink.streaming.examples.windowing.TopSpeedWindowing"); |
||||
parameters.setMainJar(resource); |
||||
parameters.setDeployMode("cluster"); |
||||
parameters.setAppName("TopSpeedWindowing"); |
||||
parameters.setFlinkVersion(">=1.10"); |
||||
parameters.setJobManagerMemory("1G"); |
||||
parameters.setTaskManagerMemory("1G"); |
||||
parameters.setSlot(1); |
||||
parameters.setTaskManager(2); |
||||
parameters.setParallelism(2); |
||||
return JSONUtils.toJsonString(parameters); |
||||
} |
||||
|
||||
private String buildFlinkParametersWithFlinkSql() { |
||||
FlinkParameters parameters = new FlinkParameters(); |
||||
parameters.setLocalParams(Collections.emptyList()); |
||||
parameters.setInitScript("set sql-client.execution.result-mode=tableau;"); |
||||
parameters.setRawScript("selcet 11111;"); |
||||
parameters.setProgramType(ProgramType.SQL); |
||||
parameters.setMainClass(StringUtils.EMPTY); |
||||
parameters.setDeployMode("cluster"); |
||||
parameters.setAppName("FlinkSQL"); |
||||
parameters.setOthers(StringUtils.EMPTY); |
||||
parameters.setJobManagerMemory("1G"); |
||||
parameters.setTaskManagerMemory("1G"); |
||||
parameters.setParallelism(1); |
||||
parameters.setFlinkVersion(">=1.10"); |
||||
return JSONUtils.toJsonString(parameters); |
||||
} |
||||
} |
Loading…
Reference in new issue