diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java index 35822dc615..010cfc1a5d 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java @@ -47,8 +47,7 @@ import java.util.Objects; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** *

@@ -56,9 +55,9 @@ import org.slf4j.LoggerFactory; * https://open.dingtalk.com/document/robots/customize-robot-security-settings *

*/ +@Slf4j public final class DingTalkSender { - private static final Logger logger = LoggerFactory.getLogger(DingTalkSender.class); private final String url; private final String keyword; private final String secret; @@ -127,13 +126,13 @@ public final class DingTalkSender { if (null == result) { alertResult.setMessage("send ding talk msg error"); - logger.info("send ding talk msg error,ding talk server resp is null"); + log.info("send ding talk msg error,ding talk server resp is null"); return alertResult; } DingTalkSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, DingTalkSendMsgResponse.class); if (null == sendMsgResponse) { alertResult.setMessage("send ding talk msg fail"); - logger.info("send ding talk msg error,resp error"); + log.info("send ding talk msg error,resp error"); return alertResult; } if (sendMsgResponse.errcode == 0) { @@ -142,7 +141,7 @@ public final class DingTalkSender { return alertResult; } alertResult.setMessage(String.format("alert send ding talk msg error : %s", sendMsgResponse.getErrmsg())); - logger.info("alert send ding talk msg error : {}", sendMsgResponse.getErrmsg()); + log.info("alert send ding talk msg error : {}", sendMsgResponse.getErrmsg()); return alertResult; } @@ -159,7 +158,7 @@ public final class DingTalkSender { String resp = sendMsg(title, content); return checkSendDingTalkSendMsgResult(resp); } catch (Exception e) { - logger.info("send ding talk alert msg exception : {}", e.getMessage()); + log.info("send ding talk alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); alertResult.setStatus("false"); alertResult.setMessage("send ding talk alert fail."); @@ -193,7 +192,7 @@ public final class DingTalkSender { } finally { response.close(); } - logger.info("Ding Talk send msg :{}, resp: {}", msg, resp); + log.info("Ding Talk send msg :{}, resp: {}", msg, resp); return resp; } finally { httpClient.close(); @@ -320,7 +319,7 @@ public final class DingTalkSender { byte[] signData = mac.doFinal(stringToSign.getBytes("UTF-8")); sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)), "UTF-8"); } catch (Exception e) { - logger.error("generate sign error, message:{}", e); + log.error("generate sign error, message:{}", e); } return url + "×tamp=" + timestamp + "&sign=" + sign; } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java index 663d251ea3..5728461ae6 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java @@ -24,13 +24,11 @@ import org.apache.dolphinscheduler.alert.api.AlertResult; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class EmailAlertChannel implements AlertChannel { - private static final Logger logger = LoggerFactory.getLogger(EmailAlertChannel.class); - @Override public AlertResult process(AlertInfo info) { @@ -48,18 +46,18 @@ public final class EmailAlertChannel implements AlertChannel { alertResult = new AlertResult(); alertResult.setStatus("false"); alertResult.setMessage("alert send error."); - logger.info("alert send error : {}", alertResult.getMessage()); + log.info("alert send error : {}", alertResult.getMessage()); return alertResult; } flag = Boolean.parseBoolean(String.valueOf(alertResult.getStatus())); if (flag) { - logger.info("alert send success"); + log.info("alert send success"); alertResult.setMessage("email send success."); } else { alertResult.setMessage("alert send error."); - logger.info("alert send error : {}", alertResult.getMessage()); + log.info("alert send error : {}", alertResult.getMessage()); } return alertResult; diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java index 5d76408b03..83784b1b95 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java @@ -35,14 +35,12 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class ExcelUtils { private static final int XLSX_WINDOW_ROW = 10000; - private static final Logger logger = LoggerFactory.getLogger(ExcelUtils.class); - private ExcelUtils() { throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } @@ -57,14 +55,14 @@ public final class ExcelUtils { public static void genExcelFile(String content, String title, String xlsFilePath) { File file = new File(xlsFilePath); if (!file.exists() && !file.mkdirs()) { - logger.error("Create xlsx directory error, path:{}", xlsFilePath); + log.error("Create xlsx directory error, path:{}", xlsFilePath); throw new AlertEmailException("Create xlsx directory error"); } List itemsList = JSONUtils.toList(content, LinkedHashMap.class); if (CollectionUtils.isEmpty(itemsList)) { - logger.error("itemsList is null"); + log.error("itemsList is null"); throw new AlertEmailException("itemsList is null"); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java index 29c9724f58..218b1cf16f 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java @@ -54,15 +54,13 @@ import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import javax.mail.internet.MimeUtility; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.sun.mail.smtp.SMTPProvider; +@Slf4j public final class MailSender { - private static final Logger logger = LoggerFactory.getLogger(MailSender.class); - private final List receivers; private final List receiverCcs; private final String mailProtocol = "SMTP"; @@ -390,12 +388,12 @@ public final class MailSender { public void deleteFile(File file) { if (file.exists()) { if (file.delete()) { - logger.info("delete success: {}", file.getAbsolutePath()); + log.info("delete success: {}", file.getAbsolutePath()); } else { - logger.info("delete fail: {}", file.getAbsolutePath()); + log.info("delete fail: {}", file.getAbsolutePath()); } } else { - logger.info("file not exists: {}", file.getAbsolutePath()); + log.info("file not exists: {}", file.getAbsolutePath()); } } @@ -403,7 +401,7 @@ public final class MailSender { * handle exception */ private void handleException(AlertResult alertResult, Exception e) { - logger.error("Send email to {} failed", receivers, e); + log.error("Send email to {} failed", receivers, e); alertResult.setMessage("Send email to {" + String.join(",", receivers) + "} failed," + e.toString()); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java index d53b1a9867..1c364dde4b 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java @@ -32,8 +32,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.configurationprocessor.json.JSONArray; import org.springframework.boot.configurationprocessor.json.JSONException; import org.springframework.boot.configurationprocessor.json.JSONTokener; @@ -42,10 +42,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; +@Slf4j public class DefaultHTMLTemplate implements AlertTemplate { - public static final Logger logger = LoggerFactory.getLogger(DefaultHTMLTemplate.class); - @Override public String getMessageFromTemplate(String content, ShowType showType, boolean showAll) { @@ -128,7 +127,7 @@ public class DefaultHTMLTemplate implements AlertTemplate { content = JSONUtils.toJsonString(Collections.singletonList(jsonNodes)); } } catch (JSONException e) { - logger.error("alert content is null"); + log.error("alert content is null"); } ArrayNode list = JSONUtils.parseArray(content); StringBuilder contents = new StringBuilder(100); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java index a56e2e88db..14a1d63ff0 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java @@ -36,14 +36,13 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.annotation.JsonProperty; +@Slf4j public final class FeiShuSender { - private static final Logger logger = LoggerFactory.getLogger(FeiShuSender.class); private final String url; private final Boolean enableProxy; @@ -84,14 +83,14 @@ public final class FeiShuSender { if (org.apache.commons.lang3.StringUtils.isBlank(result)) { alertResult.setMessage("send fei shu msg error"); - logger.info("send fei shu msg error,fei shu server resp is null"); + log.info("send fei shu msg error,fei shu server resp is null"); return alertResult; } FeiShuSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, FeiShuSendMsgResponse.class); if (null == sendMsgResponse) { alertResult.setMessage("send fei shu msg fail"); - logger.info("send fei shu msg error,resp error"); + log.info("send fei shu msg error,resp error"); return alertResult; } if (sendMsgResponse.statusCode == 0) { @@ -100,7 +99,7 @@ public final class FeiShuSender { return alertResult; } alertResult.setMessage(String.format("alert send fei shu msg error : %s", sendMsgResponse.getStatusMessage())); - logger.info("alert send fei shu msg error : {} ,Extra : {} ", sendMsgResponse.getStatusMessage(), + log.info("alert send fei shu msg error : {} ,Extra : {} ", sendMsgResponse.getStatusMessage(), sendMsgResponse.getExtra()); return alertResult; } @@ -134,7 +133,7 @@ public final class FeiShuSender { String resp = sendMsg(alertData); return checkSendFeiShuSendMsgResult(resp); } catch (Exception e) { - logger.info("send fei shu alert msg exception : {}", e.getMessage()); + log.info("send fei shu alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); alertResult.setStatus("false"); alertResult.setMessage("send fei shu alert fail."); @@ -157,7 +156,7 @@ public final class FeiShuSender { int statusCode = response.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { - logger.error("send feishu message error, return http status code: {} ", statusCode); + log.error("send feishu message error, return http status code: {} ", statusCode); } String resp; try { @@ -167,7 +166,7 @@ public final class FeiShuSender { } finally { response.close(); } - logger.info("Fei Shu send title :{} ,content :{}, resp: {}", alertData.getTitle(), alertData.getContent(), + log.info("Fei Shu send title :{} ,content :{}, resp: {}", alertData.getTitle(), alertData.getContent(), resp); return resp; } finally { diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java index cb2170ec05..c46f8cded5 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java @@ -39,14 +39,13 @@ import java.net.URL; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.databind.node.ObjectNode; +@Slf4j public final class HttpSender { - private static final Logger logger = LoggerFactory.getLogger(HttpSender.class); private static final String URL_SPLICE_CHAR = "?"; /** * request type post @@ -96,7 +95,7 @@ public final class HttpSender { alertResult.setStatus("true"); alertResult.setMessage(resp); } catch (Exception e) { - logger.error("send http alert msg exception : {}", e.getMessage()); + log.error("send http alert msg exception : {}", e.getMessage()); alertResult.setStatus("false"); alertResult.setMessage("send http request alert fail."); } @@ -170,7 +169,7 @@ public final class HttpSender { StringEntity entity = new StringEntity(JSONUtils.toJsonString(objectNode), DEFAULT_CHARSET); ((HttpPost) httpRequest).setEntity(entity); } catch (Exception e) { - logger.error("send http alert msg exception : {}", e.getMessage()); + log.error("send http alert msg exception : {}", e.getMessage()); } } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java index a328d16ab9..fb586cd255 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java @@ -34,14 +34,13 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import org.slf4j.Logger; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Preconditions; +@Slf4j public final class PagerDutySender { - private static final Logger log = org.slf4j.LoggerFactory.getLogger(PagerDutySender.class); - private final String integrationKey; public PagerDutySender(Map config) { diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java index 6ea9a8db92..b629ff5030 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java @@ -19,13 +19,11 @@ package org.apache.dolphinscheduler.plugin.alert.script; import java.io.IOException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class ProcessUtils { - private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); - private ProcessUtils() { throw new UnsupportedOperationException("This is a utility class and cannot be instantiated"); } @@ -50,7 +48,7 @@ public final class ProcessUtils { errorStreamGobbler.start(); return process.waitFor(); } catch (IOException | InterruptedException e) { - logger.error("execute alert script error {}", e.getMessage()); + log.error("execute alert script error {}", e.getMessage()); Thread.currentThread().interrupt(); } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java index 531290ff85..cc44d6fad3 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java @@ -24,12 +24,11 @@ import org.apache.commons.lang3.StringUtils; import java.io.File; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class ScriptSender { - private static final Logger logger = LoggerFactory.getLogger(ScriptSender.class); private static final String ALERT_TITLE_OPTION = " -t "; private static final String ALERT_CONTENT_OPTION = " -c "; private static final String ALERT_USER_PARAMS_OPTION = " -p "; @@ -57,7 +56,7 @@ public final class ScriptSender { // If it is another type of alarm script can be added here, such as python alertResult.setStatus("false"); - logger.error("script type error: {}", scriptType); + log.error("script type error: {}", scriptType); alertResult.setMessage("script type error : " + scriptType); return alertResult; } @@ -73,30 +72,30 @@ public final class ScriptSender { File shellScriptFile = new File(scriptPath); // validate existence if (!shellScriptFile.exists()) { - logger.error("shell script not exist : {}", scriptPath); + log.error("shell script not exist : {}", scriptPath); alertResult.setMessage("shell script not exist : " + scriptPath); return alertResult; } // validate is file if (!shellScriptFile.isFile()) { - logger.error("shell script is not a file : {}", scriptPath); + log.error("shell script is not a file : {}", scriptPath); alertResult.setMessage("shell script is not a file : " + scriptPath); return alertResult; } // avoid command injection (RCE vulnerability) if (userParams.contains("'")) { - logger.error("shell script illegal user params : {}", userParams); + log.error("shell script illegal user params : {}", userParams); alertResult.setMessage("shell script illegal user params : " + userParams); return alertResult; } if (title.contains("'")) { - logger.error("shell script illegal title : {}", title); + log.error("shell script illegal title : {}", title); alertResult.setMessage("shell script illegal title : " + title); return alertResult; } if (content.contains("'")) { - logger.error("shell script illegal content : {}", content); + log.error("shell script illegal content : {}", content); alertResult.setMessage("shell script illegal content : " + content); return alertResult; } @@ -111,7 +110,7 @@ public final class ScriptSender { return alertResult; } alertResult.setMessage("send script alert msg error,exitCode is " + exitCode); - logger.info("send script alert msg error,exitCode is {}", exitCode); + log.info("send script alert msg error,exitCode is {}", exitCode); return alertResult; } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java index e2765a8a40..1ba2fd6692 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java @@ -22,13 +22,11 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class StreamGobbler extends Thread { - private static final Logger logger = LoggerFactory.getLogger(StreamGobbler.class); - private final InputStream inputStream; StreamGobbler(InputStream inputStream) { @@ -48,16 +46,16 @@ public final class StreamGobbler extends Thread { output.append(System.getProperty("line.separator")); } if (output.length() > 0) { - logger.info("out put msg is{}", output); + log.info("out put msg is{}", output); } } catch (IOException e) { - logger.error("I/O error occurs {}", e.getMessage()); + log.error("I/O error occurs {}", e.getMessage()); } finally { try { inputBufferReader.close(); inputStreamReader.close(); } catch (IOException e) { - logger.error("I/O error occurs {}", e.getMessage()); + log.error("I/O error occurs {}", e.getMessage()); } } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java index 5a0e7caf65..aeb1782452 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java @@ -38,15 +38,13 @@ import java.util.Map.Entry; import java.util.Objects; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Preconditions; +@Slf4j public final class SlackSender { - private static final Logger logger = LoggerFactory.getLogger(SlackSender.class); - private final String webHookUrl; private final String botName; @@ -86,7 +84,7 @@ public final class SlackSender { HttpEntity entity = response.getEntity(); return EntityUtils.toString(entity, "UTF-8"); } catch (Exception e) { - logger.error("Send message to slack error.", e); + log.error("Send message to slack error.", e); return "System Exception"; } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java index c3b66c66ca..cc85c8ea21 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java @@ -43,15 +43,13 @@ import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.annotation.JsonProperty; +@Slf4j public final class TelegramSender { - private static final Logger logger = LoggerFactory.getLogger(TelegramSender.class); - private static final String BOT_TOKEN_REGEX = "{botToken}"; private final String chatId; @@ -104,7 +102,7 @@ public final class TelegramSender { String resp = sendInvoke(alertData.getTitle(), alertData.getContent()); result = parseRespToResult(resp); } catch (Exception e) { - logger.warn("send telegram alert msg exception : {}", e.getMessage()); + log.warn("send telegram alert msg exception : {}", e.getMessage()); result = new AlertResult(); result.setStatus("false"); result.setMessage(String.format("send telegram alert fail. %s", e.getMessage())); @@ -159,7 +157,7 @@ public final class TelegramSender { } finally { response.close(); } - logger.info("Telegram send title :{},content : {}, resp: {}", title, content, resp); + log.info("Telegram send title :{},content : {}, resp: {}", title, content, resp); return resp; } finally { httpClient.close(); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java index 9b64fbdc96..2d8349e9a8 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java @@ -34,14 +34,13 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import org.slf4j.Logger; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Preconditions; +@Slf4j public final class WebexTeamsSender { - private static final Logger log = org.slf4j.LoggerFactory.getLogger(WebexTeamsSender.class); - private final String botAccessToken; private final String roomId; private final String toPersonId; diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java index 7e4e4f1719..67360aec9c 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java @@ -44,12 +44,11 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class WeChatSender { - private static final Logger logger = LoggerFactory.getLogger(WeChatSender.class); private static final String MUST_NOT_NULL = " must not null"; private static final String ALERT_STATUS = "false"; private static final String AGENT_ID_REG_EXP = "{agentId}"; @@ -93,7 +92,7 @@ public final class WeChatSender { } finally { response.close(); } - logger.info("Enterprise WeChat send [{}], param:{}, resp:{}", + log.info("Enterprise WeChat send [{}], param:{}, resp:{}", url, data, resp); return resp; } @@ -110,7 +109,7 @@ public final class WeChatSender { if (StringUtils.isNotEmpty(content)) { List mapItemsList = JSONUtils.toList(content, LinkedHashMap.class); if (null == mapItemsList || mapItemsList.isEmpty()) { - logger.error("itemsList is null"); + log.error("itemsList is null"); throw new RuntimeException("itemsList is null"); } @@ -175,13 +174,13 @@ public final class WeChatSender { if (null == result) { alertResult.setMessage("we chat send fail"); - logger.info("send we chat msg error,resp is null"); + log.info("send we chat msg error,resp is null"); return alertResult; } WeChatSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, WeChatSendMsgResponse.class); if (null == sendMsgResponse) { alertResult.setMessage("we chat send fail"); - logger.info("send we chat msg error,resp error"); + log.info("send we chat msg error,resp error"); return alertResult; } if (sendMsgResponse.errcode == 0) { @@ -229,7 +228,7 @@ public final class WeChatSender { try { return checkWeChatSendMsgResult(post(enterpriseWeChatPushUrlReplace, msgJson)); } catch (Exception e) { - logger.info("send we chat alert msg exception : {}", e.getMessage()); + log.info("send we chat alert msg exception : {}", e.getMessage()); alertResult = new AlertResult(); alertResult.setMessage("send we chat alert fail"); alertResult.setStatus(ALERT_STATUS); @@ -250,7 +249,7 @@ public final class WeChatSender { try { return get(weChatTokenUrlReplace); } catch (IOException e) { - logger.info("we chat alert get token error{}", e.getMessage()); + log.info("we chat alert get token error{}", e.getMessage()); } return null; } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java index 78ff13d987..047df6e3b0 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java @@ -37,17 +37,16 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; @Component +@Slf4j public final class AlertPluginManager { - private static final Logger logger = LoggerFactory.getLogger(AlertPluginManager.class); - private final PluginDao pluginDao; public AlertPluginManager(PluginDao pluginDao) { @@ -79,11 +78,11 @@ public final class AlertPluginManager { String name = entry.getKey(); AlertChannelFactory factory = entry.getValue(); - logger.info("Registering alert plugin: {} - {}", name, factory.getClass()); + log.info("Registering alert plugin: {} - {}", name, factory.getClass()); final AlertChannel alertChannel = factory.create(); - logger.info("Registered alert plugin: {} - {}", name, factory.getClass()); + log.info("Registered alert plugin: {} - {}", name, factory.getClass()); final List params = new ArrayList<>(factory.params()); params.add(0, warningTypeParams); diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java index 930d9180be..a034e9586a 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java @@ -26,17 +26,16 @@ import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.remote.utils.JsonSerializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Component; import io.netty.channel.Channel; @Component +@Slf4j public final class AlertRequestProcessor implements NettyRequestProcessor { - private static final Logger logger = LoggerFactory.getLogger(AlertRequestProcessor.class); - private final AlertSenderService alertSenderService; public AlertRequestProcessor(AlertSenderService alertSenderService) { @@ -51,7 +50,7 @@ public final class AlertRequestProcessor implements NettyRequestProcessor { AlertSendRequestCommand alertSendRequestCommand = JsonSerializer.deserialize( command.getBody(), AlertSendRequestCommand.class); - logger.info("Received command : {}", alertSendRequestCommand); + log.info("Received command : {}", alertSendRequestCommand); AlertSendResponseCommand alertSendResponseCommand = alertSenderService.syncHandler( alertSendRequestCommand.getGroupId(), diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSenderService.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSenderService.java index 786a5d2a1c..8309ae9ae3 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSenderService.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSenderService.java @@ -49,17 +49,16 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Service; import com.google.common.collect.Lists; @Service +@Slf4j public final class AlertSenderService extends Thread { - private static final Logger logger = LoggerFactory.getLogger(AlertSenderService.class); - private final AlertDao alertDao; private final AlertPluginManager alertPluginManager; private final AlertConfig alertConfig; @@ -78,23 +77,23 @@ public final class AlertSenderService extends Thread { @Override public void run() { - logger.info("Alert sender thread started"); + log.info("Alert sender thread started"); while (!ServerLifeCycleManager.isStopped()) { try { List alerts = alertDao.listPendingAlerts(); if (CollectionUtils.isEmpty(alerts)) { - logger.debug("There is not waiting alerts"); + log.debug("There is not waiting alerts"); continue; } AlertServerMetrics.registerPendingAlertGauge(alerts::size); this.send(alerts); } catch (Exception e) { - logger.error("Alert sender thread meet an exception", e); + log.error("Alert sender thread meet an exception", e); } finally { ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS * 5L); } } - logger.info("Alert sender thread stopped"); + log.info("Alert sender thread stopped"); } public void send(List alerts) { @@ -104,7 +103,7 @@ public final class AlertSenderService extends Thread { int alertGroupId = Optional.ofNullable(alert.getAlertGroupId()).orElse(0); List alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId); if (CollectionUtils.isEmpty(alertInstanceList)) { - logger.error("send alert msg fail,no bind plugin instance."); + log.error("send alert msg fail,no bind plugin instance."); List alertResults = Lists.newArrayList(new AlertResult("false", "no bind plugin instance")); alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, JSONUtils.toJsonString(alertResults), alertId); @@ -184,7 +183,7 @@ public final class AlertSenderService extends Thread { alertSendResponseResult.setSuccess(false); alertSendResponseResult.setMessage(message); sendResponseResults.add(alertSendResponseResult); - logger.error("Alert GroupId {} send error : not found alert instance", alertGroupId); + log.error("Alert GroupId {} send error : not found alert instance", alertGroupId); return new AlertSendResponseCommand(false, sendResponseResults); } @@ -216,7 +215,7 @@ public final class AlertSenderService extends Thread { String message = String.format("Alert Plugin %s send error: the channel doesn't exist, pluginDefineId: %s", pluginInstanceName, pluginDefineId); - logger.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginDefineId); + log.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginDefineId); return new AlertResult("false", message); } AlertChannel alertChannel = alertChannelOptional.get(); @@ -232,7 +231,7 @@ public final class AlertSenderService extends Thread { if (warningType == null) { String message = String.format("Alert Plugin %s send error : plugin warnType is null", pluginInstanceName); - logger.error("Alert Plugin {} send error : plugin warnType is null", pluginInstanceName); + log.error("Alert Plugin {} send error : plugin warnType is null", pluginInstanceName); return new AlertResult("false", message); } @@ -258,7 +257,7 @@ public final class AlertSenderService extends Thread { String message = String.format( "Alert Plugin %s send ignore warning type not match: plugin warning type is %s, alert data warning type is %s", pluginInstanceName, warningType.getCode(), alertData.getWarnType()); - logger.info( + log.info( "Alert Plugin {} send ignore warning type not match: plugin warning type is {}, alert data warning type is {}", pluginInstanceName, warningType.getCode(), alertData.getWarnType()); return new AlertResult("false", message); @@ -292,11 +291,11 @@ public final class AlertSenderService extends Thread { } return alertResult; } catch (InterruptedException e) { - logger.error("send alert error alert data id :{},", alertData.getId(), e); + log.error("send alert error alert data id :{},", alertData.getId(), e); Thread.currentThread().interrupt(); return new AlertResult("false", e.getMessage()); } catch (Exception e) { - logger.error("send alert error alert data id :{},", alertData.getId(), e); + log.error("send alert error alert data id :{},", alertData.getId(), e); return new AlertResult("false", e.getMessage()); } } diff --git a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java index 64d5addebb..5c6f57508b 100644 --- a/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java +++ b/dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java @@ -29,8 +29,8 @@ import java.io.Closeable; import javax.annotation.PreDestroy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.context.event.ApplicationReadyEvent; @@ -39,10 +39,9 @@ import org.springframework.context.event.EventListener; @SpringBootApplication @ComponentScan("org.apache.dolphinscheduler") +@Slf4j public class AlertServer implements Closeable { - private static final Logger logger = LoggerFactory.getLogger(AlertServer.class); - private final PluginDao pluginDao; private final AlertSenderService alertSenderService; private final AlertRequestProcessor alertRequestProcessor; @@ -66,12 +65,12 @@ public class AlertServer implements Closeable { @EventListener public void run(ApplicationReadyEvent readyEvent) { - logger.info("Alert server is staring ..."); + log.info("Alert server is staring ..."); checkTable(); startServer(); alertSenderService.start(); - logger.info("Alert server is started ..."); + log.info("Alert server is started ..."); } @Override @@ -91,26 +90,26 @@ public class AlertServer implements Closeable { // set stop signal is true // execute only once if (!ServerLifeCycleManager.toStopped()) { - logger.warn("AlterServer is already stopped"); + log.warn("AlterServer is already stopped"); return; } - logger.info("Alert server is stopping, cause: {}", cause); + log.info("Alert server is stopping, cause: {}", cause); // thread sleep 3 seconds for thread quietly stop ThreadUtils.sleep(Constants.SERVER_CLOSE_WAIT_TIME.toMillis()); // close this.nettyRemotingServer.close(); - logger.info("Alter server stopped, cause: {}", cause); + log.info("Alter server stopped, cause: {}", cause); } catch (Exception e) { - logger.error("Alert server stop failed, cause: {}", cause, e); + log.error("Alert server stop failed, cause: {}", cause, e); } } protected void checkTable() { if (!pluginDao.checkPluginDefineTableExist()) { - logger.error("Plugin Define Table t_ds_plugin_define Not Exist . Please Create it First !"); + log.error("Plugin Define Table t_ds_plugin_define Not Exist . Please Create it First !"); System.exit(1); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java index 7824dc2a5d..2bb9b7cf51 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java @@ -28,8 +28,8 @@ import org.apache.dolphinscheduler.spi.params.base.PluginParams; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -41,10 +41,9 @@ import org.springframework.context.event.EventListener; @ServletComponentScan @SpringBootApplication @ComponentScan("org.apache.dolphinscheduler") +@Slf4j public class ApiApplicationServer { - private final Logger logger = LoggerFactory.getLogger(ApiApplicationServer.class); - @Autowired private TaskPluginManager taskPluginManager; @@ -57,7 +56,7 @@ public class ApiApplicationServer { @EventListener public void run(ApplicationReadyEvent readyEvent) { - logger.info("Received spring application context ready event will load taskPlugin and write to DB"); + log.info("Received spring application context ready event will load taskPlugin and write to DB"); // install task plugin taskPluginManager.loadPlugin(); for (Map.Entry entry : taskPluginManager.getTaskChannelFactoryMap().entrySet()) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java index 2f28616487..19d783cad5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java @@ -34,23 +34,22 @@ import java.util.stream.IntStream; import javax.servlet.http.HttpServletRequest; +import lombok.extern.slf4j.Slf4j; + import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.ServletRequestAttributes; @Aspect @Component +@Slf4j public class AccessLogAspect { - private static final Logger logger = LoggerFactory.getLogger(AccessLogAspect.class); - private static final String TRACE_ID = "traceId"; public static final String sensitiveDataRegEx = "(password=[\'\"]+)(\\S+)([\'\"]+)"; @@ -90,7 +89,7 @@ public class AccessLogAspect { String argsString = parseArgs(proceedingJoinPoint, annotation); // handle sensitive data in the string argsString = handleSensitiveData(argsString); - logger.info("REQUEST TRACE_ID:{}, LOGIN_USER:{}, URI:{}, METHOD:{}, HANDLER:{}, ARGS:{}", + log.info("REQUEST TRACE_ID:{}, LOGIN_USER:{}, URI:{}, METHOD:{}, HANDLER:{}, ARGS:{}", traceId, userName, request.getRequestURI(), @@ -106,7 +105,7 @@ public class AccessLogAspect { // log response if (!annotation.ignoreResponse()) { - logger.info("RESPONSE TRACE_ID:{}, BODY:{}, REQUEST DURATION:{} milliseconds", traceId, ob, + log.info("RESPONSE TRACE_ID:{}, BODY:{}, REQUEST DURATION:{} milliseconds", traceId, ob, (System.currentTimeMillis() - startTime)); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java index e7f1ba188c..8f3a057b7f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java @@ -28,13 +28,13 @@ import java.lang.reflect.Method; import java.util.Arrays; import java.util.List; +import lombok.extern.slf4j.Slf4j; + import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Pointcut; import org.aspectj.lang.reflect.MethodSignature; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cache.annotation.CacheConfig; import org.springframework.cache.annotation.CacheEvict; @@ -48,10 +48,9 @@ import org.springframework.stereotype.Component; */ @Aspect @Component +@Slf4j public class CacheEvictAspect { - private static final Logger logger = LoggerFactory.getLogger(CacheEvictAspect.class); - /** * symbol of spring el */ diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java index dd9b0847a9..6e47f87145 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java @@ -25,12 +25,13 @@ import java.util.concurrent.LinkedBlockingQueue; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class AuditPublishService { private BlockingQueue auditMessageQueue = new LinkedBlockingQueue<>(); @@ -41,8 +42,6 @@ public class AuditPublishService { @Autowired private AuditConfiguration auditConfiguration; - private static final Logger logger = LoggerFactory.getLogger(AuditPublishService.class); - /** * create a daemon thread to process the message queue */ @@ -63,7 +62,7 @@ public class AuditPublishService { */ public void publish(AuditMessage message) { if (auditConfiguration.getEnabled() && !auditMessageQueue.offer(message)) { - logger.error("Publish audit message failed, message:{}", message); + log.error("Publish audit message failed, message:{}", message); } } @@ -79,11 +78,11 @@ public class AuditPublishService { try { subscriber.execute(message); } catch (Exception e) { - logger.error("Consume audit message failed, message:{}", message, e); + log.error("Consume audit message failed, message:{}", message, e); } } } catch (InterruptedException e) { - logger.error("Consume audit message failed, message:{}", message, e); + log.error("Consume audit message failed, message:{}", message, e); Thread.currentThread().interrupt(); break; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java index 114da5ac36..5798d7eee8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java @@ -27,9 +27,8 @@ import java.util.List; import lombok.Getter; import lombok.Setter; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.PropertySource; @@ -41,9 +40,9 @@ import org.springframework.stereotype.Component; @ConfigurationProperties(prefix = "dynamic-task") @Getter @Setter +@Slf4j public class DynamicTaskTypeConfiguration { - private static final Logger logger = LoggerFactory.getLogger(DynamicTaskTypeConfiguration.class); private static final List defaultTaskCategories = Arrays.asList(Constants.TYPE_UNIVERSAL, Constants.TYPE_DATA_INTEGRATION, Constants.TYPE_CLOUD, Constants.TYPE_LOGIC, Constants.TYPE_DATA_QUALITY, Constants.TYPE_OTHER, @@ -83,12 +82,12 @@ public class DynamicTaskTypeConfiguration { } public void printDefaultTypes() { - logger.info("support default universal dynamic task types: {}", universal); - logger.info("support default cloud dynamic task types: {}", cloud); - logger.info("support default logic dynamic task types: {}", logic); - logger.info("support default dataIntegration dynamic task types: {}", dataIntegration); - logger.info("support default dataQuality dynamic task types: {}", dataQuality); - logger.info("support default machineLearning dynamic task types: {}", machineLearning); - logger.info("support default other dynamic task types: {}", other); + log.info("support default universal dynamic task types: {}", universal); + log.info("support default cloud dynamic task types: {}", cloud); + log.info("support default logic dynamic task types: {}", logic); + log.info("support default dataIntegration dynamic task types: {}", dataIntegration); + log.info("support default dataQuality dynamic task types: {}", dataQuality); + log.info("support default machineLearning dynamic task types: {}", machineLearning); + log.info("support default other dynamic task types: {}", other); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java index 3f068f31ba..1393a70ced 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java @@ -28,9 +28,8 @@ import java.util.List; import lombok.Getter; import lombok.Setter; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.PropertySource; @@ -42,10 +41,9 @@ import org.springframework.stereotype.Component; @ConfigurationProperties(prefix = "task") @Getter @Setter +@Slf4j public class TaskTypeConfiguration { - private static final Logger logger = LoggerFactory.getLogger(TaskTypeConfiguration.class); - private List universal; private List cloud; private List logic; @@ -74,12 +72,12 @@ public class TaskTypeConfiguration { } public void printDefaultTypes() { - logger.info("support default universal task types: {}", universal); - logger.info("support default cloud task types: {}", cloud); - logger.info("support default logic task types: {}", logic); - logger.info("support default dataIntegration task types: {}", dataIntegration); - logger.info("support default dataQuality task types: {}", dataQuality); - logger.info("support default machineLearning task types: {}", machineLearning); - logger.info("support default other task types: {}", other); + log.info("support default universal task types: {}", universal); + log.info("support default cloud task types: {}", cloud); + log.info("support default logic task types: {}", logic); + log.info("support default dataIntegration task types: {}", dataIntegration); + log.info("support default dataQuality task types: {}", dataQuality); + log.info("support default machineLearning task types: {}", machineLearning); + log.info("support default other task types: {}", other); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java index aad91f7ad0..74406e73f3 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java @@ -35,8 +35,8 @@ import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.DeleteMapping; @@ -62,10 +62,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "ALERT_GROUP_TAG") @RestController @RequestMapping("/alert-groups") +@Slf4j public class AlertGroupController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); - @Autowired private AlertGroupService alertGroupService; @@ -238,7 +237,7 @@ public class AlertGroupController extends BaseController { boolean exist = alertGroupService.existGroupName(groupName); Result result = new Result(); if (exist) { - logger.error("group {} has exist, can't create again.", groupName); + log.error("group {} has exist, can't create again.", groupName); result.setCode(Status.ALERT_GROUP_EXIST.getCode()); result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); } else { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java index 0b7879f316..76efc79723 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java @@ -35,8 +35,8 @@ import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.DeleteMapping; @@ -62,10 +62,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "ALERT_PLUGIN_INSTANCE_TAG") @RestController @RequestMapping("alert-plugin-instances") +@Slf4j public class AlertPluginInstanceController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(AlertPluginInstanceController.class); - @Autowired private AlertPluginInstanceService alertPluginInstanceService; @@ -200,7 +199,7 @@ public class AlertPluginInstanceController extends BaseController { boolean exist = alertPluginInstanceService.checkExistPluginInstanceName(alertInstanceName); if (exist) { - logger.error("alert plugin instance {} has exist, can't create again.", alertInstanceName); + log.error("alert plugin instance {} has exist, can't create again.", alertInstanceName); return Result.error(Status.PLUGIN_INSTANCE_ALREADY_EXISTS); } else { return Result.success(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java index 4b61e2260c..16ec43dbdd 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java @@ -51,8 +51,8 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; @@ -76,10 +76,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "EXECUTOR_TAG") @RestController @RequestMapping("projects/{projectCode}/executors") +@Slf4j public class ExecutorController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class); - @Autowired private ExecutorService execService; @@ -239,7 +238,7 @@ public class ExecutorController extends BaseController { @RequestParam(value = "complementDependentMode", required = false) ComplementDependentMode complementDependentMode) { if (timeout == null) { - logger.debug("Parameter timeout set to {} due to null.", Constants.MAX_TASK_TIMEOUT); + log.debug("Parameter timeout set to {} due to null.", Constants.MAX_TASK_TIMEOUT); timeout = Constants.MAX_TASK_TIMEOUT; } @@ -249,7 +248,7 @@ public class ExecutorController extends BaseController { } if (complementDependentMode == null) { - logger.debug("Parameter complementDependentMode set to {} due to null.", ComplementDependentMode.OFF_MODE); + log.debug("Parameter complementDependentMode set to {} due to null.", ComplementDependentMode.OFF_MODE); complementDependentMode = ComplementDependentMode.OFF_MODE; } @@ -268,11 +267,11 @@ public class ExecutorController extends BaseController { complementDependentMode, null); if (!Status.SUCCESS.equals(result.get(Constants.STATUS))) { - logger.error("Process definition start failed, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process definition start failed, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinitionCode); startFailedProcessDefinitionCodeList.add(String.valueOf(processDefinitionCode)); } else { - logger.info("Start process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.info("Start process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinitionCode); } } @@ -307,7 +306,7 @@ public class ExecutorController extends BaseController { @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam("processInstanceId") Integer processInstanceId, @RequestParam("executeType") ExecuteType executeType) { - logger.info("Start to execute process instance, projectCode:{}, processInstanceId:{}.", projectCode, + log.info("Start to execute process instance, projectCode:{}, processInstanceId:{}.", projectCode, processInstanceId); Map result = execService.execute(loginUser, projectCode, processInstanceId, executeType); return returnDataList(result); @@ -347,11 +346,11 @@ public class ExecutorController extends BaseController { Map singleResult = execService.execute(loginUser, projectCode, processInstanceId, executeType); if (!Status.SUCCESS.equals(singleResult.get(Constants.STATUS))) { - logger.error("Start to execute process instance error, projectCode:{}, processInstanceId:{}.", + log.error("Start to execute process instance error, projectCode:{}, processInstanceId:{}.", projectCode, processInstanceId); executeFailedIdList.add((String) singleResult.get(Constants.MSG)); } else - logger.info("Start to execute process instance complete, projectCode:{}, processInstanceId:{}.", + log.info("Start to execute process instance complete, projectCode:{}, processInstanceId:{}.", projectCode, processInstanceId); } catch (Exception e) { executeFailedIdList @@ -445,7 +444,7 @@ public class ExecutorController extends BaseController { startParamMap = JSONUtils.toMap(startParams); } - logger.info("Start to execute stream task instance, projectCode:{}, taskDefinitionCode:{}, taskVersion:{}.", + log.info("Start to execute stream task instance, projectCode:{}, taskDefinitionCode:{}, taskVersion:{}.", projectCode, code, version); Map result = execService.execStreamTaskInstance(loginUser, projectCode, code, version, warningGroupId, workerGroup, environmentCode, startParamMap, dryRun); @@ -477,7 +476,7 @@ public class ExecutorController extends BaseController { @RequestParam("processInstanceId") Integer processInstanceId, @RequestParam("startNodeList") String startNodeList, @RequestParam("taskDependType") TaskDependType taskDependType) { - logger.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.", + log.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.", projectCode, processInstanceId); return execService.executeTask(loginUser, projectCode, processInstanceId, startNodeList, taskDependType); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java index 5a5d8613b7..4e3a5a7a06 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java @@ -53,8 +53,8 @@ import java.util.Map; import javax.servlet.http.HttpServletResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.DeleteMapping; @@ -82,10 +82,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "PROCESS_DEFINITION_TAG") @RestController @RequestMapping("projects/{projectCode}/process-definition") +@Slf4j public class ProcessDefinitionController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class); - @Autowired private ProcessDefinitionService processDefinitionService; @@ -696,7 +695,7 @@ public class ProcessDefinitionController extends BaseController { try { processDefinitionService.batchExportProcessDefinitionByCodes(loginUser, projectCode, codes, response); } catch (Exception e) { - logger.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e); + log.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java index aba285d358..a3e9cab585 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java @@ -39,8 +39,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.DeleteMapping; @@ -66,10 +66,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "PROCESS_INSTANCE_TAG") @RestController @RequestMapping("/projects/{projectCode}/process-instances") +@Slf4j public class ProcessInstanceController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class); - @Autowired private ProcessInstanceService processInstanceService; @@ -400,7 +399,7 @@ public class ProcessInstanceController extends BaseController { try { processInstanceService.deleteProcessInstanceById(loginUser, processInstanceId); } catch (Exception e) { - logger.error("Delete workflow instance: {} error", strProcessInstanceId, e); + log.error("Delete workflow instance: {} error", strProcessInstanceId, e); deleteFailedIdList .add(MessageFormat.format(Status.PROCESS_INSTANCE_ERROR.getMsg(), strProcessInstanceId)); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java index dcfc774b4c..c699bd5dfd 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java @@ -35,8 +35,8 @@ import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.DeleteMapping; @@ -62,10 +62,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "PROJECT_TAG") @RestController @RequestMapping("projects") +@Slf4j public class ProjectController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(ProjectController.class); - @Autowired private ProjectService projectService; @@ -166,7 +165,7 @@ public class ProjectController extends BaseController { Result result = checkPageParams(pageNo, pageSize); if (!result.checkResult()) { - logger.warn("Pagination parameters check failed, pageNo:{}, pageSize:{}", pageNo, pageSize); + log.warn("Pagination parameters check failed, pageNo:{}, pageSize:{}", pageNo, pageSize); return result; } searchVal = ParameterUtils.handleEscapes(searchVal); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java index db4cad6822..156e47605d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java @@ -61,8 +61,8 @@ import org.apache.commons.lang3.StringUtils; import java.io.IOException; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.Resource; import org.springframework.http.HttpHeaders; @@ -93,10 +93,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "RESOURCES_TAG") @RestController @RequestMapping("resources") +@Slf4j public class ResourcesController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class); - @Autowired private ResourcesService resourceService; @Autowired @@ -414,7 +413,7 @@ public class ResourcesController extends BaseController { @RequestParam(value = "content") String content, @RequestParam(value = "currentDir") String currentDir) { if (StringUtils.isEmpty(content)) { - logger.error("resource file contents are not allowed to be empty"); + log.error("resource file contents are not allowed to be empty"); return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); } return resourceService.onlineCreateResource(loginUser, type, fileName, fileSuffix, description, content, @@ -442,7 +441,7 @@ public class ResourcesController extends BaseController { @RequestParam(value = "tenantCode") String tenantCode, @RequestParam(value = "content") String content) { if (StringUtils.isEmpty(content)) { - logger.error("The resource file contents are not allowed to be empty"); + log.error("The resource file contents are not allowed to be empty"); return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); } return resourceService.updateResourceContent(loginUser, fullName, tenantCode, content); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java index 93c13b6f50..af07c8a006 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java @@ -46,8 +46,8 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; @@ -71,10 +71,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "USERS_TAG") @RestController @RequestMapping("/users") +@Slf4j public class UsersController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(UsersController.class); - @Autowired private UsersService usersService; @@ -539,7 +538,7 @@ public class UsersController extends BaseController { Map result = usersService.authorizedUser(loginUser, alertgroupId); return returnDataList(result); } catch (Exception e) { - logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(), e); + log.error(Status.AUTHORIZED_USER_ERROR.getMsg(), e); return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java index cc3e70f5f5..cc25f16406 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java @@ -36,8 +36,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.GetMapping; @@ -61,10 +61,9 @@ import io.swagger.v3.oas.annotations.tags.Tag; @Tag(name = "WORK_FLOW_LINEAGE_TAG") @RestController @RequestMapping("projects/{projectCode}/lineages") +@Slf4j public class WorkFlowLineageController extends BaseController { - private static final Logger logger = LoggerFactory.getLogger(WorkFlowLineageController.class); - @Autowired private WorkFlowLineageService workFlowLineageService; @@ -80,7 +79,7 @@ public class WorkFlowLineageController extends BaseController { Map result = workFlowLineageService.queryWorkFlowLineageByName(projectCode, workFlowName); return returnDataList(result); } catch (Exception e) { - logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e); + log.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } } @@ -96,7 +95,7 @@ public class WorkFlowLineageController extends BaseController { Map result = workFlowLineageService.queryWorkFlowLineageByCode(projectCode, workFlowCode); return returnDataList(result); } catch (Exception e) { - logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e); + log.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } } @@ -111,7 +110,7 @@ public class WorkFlowLineageController extends BaseController { Map result = workFlowLineageService.queryWorkFlowLineage(projectCode); return returnDataList(result); } catch (Exception e) { - logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e); + log.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e); return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java index 863ad79a06..7a7b6700d5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java @@ -20,8 +20,8 @@ package org.apache.dolphinscheduler.api.exceptions; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestControllerAdvice; @@ -32,13 +32,12 @@ import org.springframework.web.method.HandlerMethod; */ @RestControllerAdvice @ResponseBody +@Slf4j public class ApiExceptionHandler { - private static final Logger logger = LoggerFactory.getLogger(ApiExceptionHandler.class); - @ExceptionHandler(ServiceException.class) public Result exceptionHandler(ServiceException e, HandlerMethod hm) { - logger.error("ServiceException: ", e); + log.error("ServiceException: ", e); return new Result(e.getCode(), e.getMessage()); } @@ -46,11 +45,11 @@ public class ApiExceptionHandler { public Result exceptionHandler(Exception e, HandlerMethod hm) { ApiException ce = hm.getMethodAnnotation(ApiException.class); if (ce == null) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); return Result.errorWithArgs(Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage()); } Status st = ce.value(); - logger.error(st.getMsg(), e); + log.error(st.getMsg(), e); return Result.error(st); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java index f371bae646..345c93b88a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java @@ -33,8 +33,8 @@ import java.util.Date; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.servlet.HandlerInterceptor; import org.springframework.web.servlet.ModelAndView; @@ -42,10 +42,9 @@ import org.springframework.web.servlet.ModelAndView; /** * login interceptor, must log in first */ +@Slf4j public class LoginHandlerInterceptor implements HandlerInterceptor { - private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class); - @Autowired private UserMapper userMapper; @@ -70,14 +69,14 @@ public class LoginHandlerInterceptor implements HandlerInterceptor { // if user is null if (user == null) { response.setStatus(HttpStatus.SC_UNAUTHORIZED); - logger.info("user does not exist"); + log.info("user does not exist"); return false; } } else { user = userMapper.queryUserByToken(token, new Date()); if (user == null) { response.setStatus(HttpStatus.SC_UNAUTHORIZED); - logger.info("user token has expired"); + log.info("user token has expired"); return false; } } @@ -85,7 +84,7 @@ public class LoginHandlerInterceptor implements HandlerInterceptor { // check user state if (user.getState() == Flag.NO.ordinal()) { response.setStatus(HttpStatus.SC_UNAUTHORIZED); - logger.info(Status.USER_DISABLED.getMsg()); + log.info(Status.USER_DISABLED.getMsg()); return false; } request.setAttribute(Constants.SESSION_USER, user); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/RateLimitInterceptor.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/RateLimitInterceptor.java index c419a25ccb..22dcafcbdc 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/RateLimitInterceptor.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/RateLimitInterceptor.java @@ -29,8 +29,8 @@ import java.util.concurrent.TimeUnit; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.http.HttpStatus; import org.springframework.web.servlet.HandlerInterceptor; @@ -44,10 +44,9 @@ import com.google.common.util.concurrent.RateLimiter; * If the current coming tenant reaches his tenant-level request quota, his request will be reject fast. * If the current system request number reaches the global request quota, all coming request will be reject fast. */ +@Slf4j public class RateLimitInterceptor implements HandlerInterceptor { - private static final Logger logger = LoggerFactory.getLogger(RateLimitInterceptor.class); - private TrafficConfiguration trafficConfiguration; private RateLimiter globalRateLimiter; @@ -81,7 +80,7 @@ public class RateLimitInterceptor implements HandlerInterceptor { RateLimiter tenantRateLimiter = tenantRateLimiterCache.get(token); if (!tenantRateLimiter.tryAcquire()) { response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value()); - logger.warn("Too many request, reach tenant rate limit, current tenant:{} qps is {}", token, + log.warn("Too many request, reach tenant rate limit, current tenant:{} qps is {}", token, tenantRateLimiter.getRate()); return false; } @@ -91,7 +90,7 @@ public class RateLimitInterceptor implements HandlerInterceptor { if (trafficConfiguration.isGlobalSwitch()) { if (!globalRateLimiter.tryAcquire()) { response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value()); - logger.warn("Too many request, reach global rate limit, current qps is {}", + log.warn("Too many request, reach global rate limit, current qps is {}", globalRateLimiter.getRate()); return false; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/k8s/K8sManager.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/k8s/K8sManager.java index 6b573baa42..81452f3399 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/k8s/K8sManager.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/k8s/K8sManager.java @@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.service.utils.ClusterConfUtils; import java.util.Hashtable; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -38,9 +38,9 @@ import io.fabric8.kubernetes.client.KubernetesClient; * use multiple environment feature */ @Component +@Slf4j public class K8sManager { - private static final Logger logger = LoggerFactory.getLogger(K8sManager.class); /** * cache k8s client */ @@ -112,7 +112,7 @@ public class K8sManager { client = getClient(k8sConfig); clientMap.put(clusterCode, client); } catch (RemotingException e) { - logger.error("cluster code ={},fail to get k8s ApiClient: {}", clusterCode, e.getMessage()); + log.error("cluster code ={},fail to get k8s ApiClient: {}", clusterCode, e.getMessage()); throw new RemotingException("fail to get k8s ApiClient:" + e.getMessage()); } } @@ -123,7 +123,7 @@ public class K8sManager { Config config = Config.fromKubeconfig(configYaml); return new DefaultKubernetesClient(config); } catch (Exception e) { - logger.error("Fail to get k8s ApiClient", e); + log.error("Fail to get k8s ApiClient", e); throw new RemotingException("fail to get k8s ApiClient:" + e.getMessage()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java index 4e98bc3251..23c6dc66f4 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java @@ -78,16 +78,15 @@ import java.util.stream.Collectors; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class PythonGateway { - private static final Logger logger = LoggerFactory.getLogger(PythonGateway.class); - private static final FailureStrategy DEFAULT_FAILURE_STRATEGY = FailureStrategy.CONTINUE; private static final Priority DEFAULT_PRIORITY = Priority.MEDIUM; private static final Long DEFAULT_ENVIRONMENT_CODE = -1L; @@ -304,7 +303,7 @@ public class PythonGateway { } else if (verifyStatus != Status.SUCCESS) { String msg = "Verify workflow exists status is invalid, neither SUCCESS or WORKFLOW_NAME_EXIST."; - logger.error(msg); + log.error(msg); throw new RuntimeException(msg); } @@ -501,11 +500,11 @@ public class PythonGateway { List dataSourceList = dataSourceMapper.queryDataSourceByName(datasourceName); if (dataSourceList == null || dataSourceList.isEmpty()) { String msg = String.format("Can not find any datasource by name %s", datasourceName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } else if (dataSourceList.size() > 1) { String msg = String.format("Get more than one datasource by name %s", datasourceName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } else { DataSource dataSource = dataSourceList.get(0); @@ -542,7 +541,7 @@ public class PythonGateway { result.put("code", processDefinition.getCode()); } else { String msg = String.format("Can not find valid workflow by name %s", workflowName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } @@ -563,7 +562,7 @@ public class PythonGateway { Project project = projectMapper.queryByName(projectName); if (project == null) { String msg = String.format("Can not find valid project by name %s", projectName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } long projectCode = project.getCode(); @@ -573,7 +572,7 @@ public class PythonGateway { processDefinitionMapper.queryByDefineName(projectCode, workflowName); if (processDefinition == null) { String msg = String.format("Can not find valid workflow by name %s", workflowName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } result.put("processDefinitionCode", processDefinition.getCode()); @@ -604,7 +603,7 @@ public class PythonGateway { if (CollectionUtils.isEmpty(namedResources)) { String msg = String.format("Can not find valid resource by program type %s and name %s", programType, fullName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } @@ -624,7 +623,7 @@ public class PythonGateway { if (result.get("data") == null) { String msg = String.format("Can not find valid environment by name %s", environmentName); - logger.error(msg); + log.error(msg); throw new IllegalArgumentException(msg); } EnvironmentDto environmentDto = EnvironmentDto.class.cast(result.get("data")); @@ -682,10 +681,10 @@ public class PythonGateway { } GatewayServer.turnLoggingOn(); - logger.info("PythonGatewayService started on: " + gatewayHost.toString()); + log.info("PythonGatewayService started on: " + gatewayHost.toString()); serverBuilder.build().start(); } catch (UnknownHostException e) { - logger.error("exception occurred while constructing PythonGatewayService().", e); + log.error("exception occurred while constructing PythonGatewayService().", e); } } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java index 3e3bb1ae79..7d0fedb1f9 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java @@ -22,8 +22,8 @@ import org.apache.dolphinscheduler.api.security.impl.pwd.PasswordAuthenticator; import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.config.AutowireCapableBeanFactory; @@ -31,10 +31,9 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration +@Slf4j public class SecurityConfig { - private static final Logger logger = LoggerFactory.getLogger(SecurityConfig.class); - @Value("${security.authentication.type:PASSWORD}") private String type; @@ -48,7 +47,7 @@ public class SecurityConfig { private void setAuthenticationType(String type) { if (StringUtils.isBlank(type)) { - logger.info("security.authentication.type configuration is empty, the default value 'PASSWORD'"); + log.info("security.authentication.type configuration is empty, the default value 'PASSWORD'"); this.authenticationType = AuthenticationType.PASSWORD; return; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java index 704f7585ca..1cbf7cc37c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java @@ -33,14 +33,13 @@ import java.util.Map; import javax.servlet.http.HttpServletRequest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; +@Slf4j public abstract class AbstractAuthenticator implements Authenticator { - private static final Logger logger = LoggerFactory.getLogger(AbstractAuthenticator.class); - @Autowired protected UsersService userService; @@ -65,7 +64,7 @@ public abstract class AbstractAuthenticator implements Authenticator { Result> result = new Result<>(); User user = login(userId, password, extra); if (user == null) { - logger.error("Username or password entered incorrectly."); + log.error("Username or password entered incorrectly."); result.setCode(Status.USER_NAME_PASSWD_ERROR.getCode()); result.setMsg(Status.USER_NAME_PASSWD_ERROR.getMsg()); return result; @@ -73,7 +72,7 @@ public abstract class AbstractAuthenticator implements Authenticator { // check user state if (user.getState() == Flag.NO.ordinal()) { - logger.error("The current user is deactivated, userName:{}.", user.getUserName()); + log.error("The current user is deactivated, userName:{}.", user.getUserName()); result.setCode(Status.USER_DISABLED.getCode()); result.setMsg(Status.USER_DISABLED.getMsg()); return result; @@ -82,13 +81,13 @@ public abstract class AbstractAuthenticator implements Authenticator { // create session String sessionId = sessionService.createSession(user, extra); if (sessionId == null) { - logger.error("Failed to create session, userName:{}.", user.getUserName()); + log.error("Failed to create session, userName:{}.", user.getUserName()); result.setCode(Status.LOGIN_SESSION_FAILED.getCode()); result.setMsg(Status.LOGIN_SESSION_FAILED.getMsg()); return result; } - logger.info("Session is created and sessionId is :{}.", sessionId); + log.info("Session is created and sessionId is :{}.", sessionId); Map data = new HashMap<>(); data.put(Constants.SESSION_ID, sessionId); @@ -104,7 +103,7 @@ public abstract class AbstractAuthenticator implements Authenticator { public User getAuthUser(HttpServletRequest request) { Session session = sessionService.getSession(request); if (session == null) { - logger.info("session info is null "); + log.info("session info is null "); return null; } // get user object from session diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java index 236b5fe45e..14d3170cc2 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java @@ -34,8 +34,8 @@ import javax.naming.directory.SearchResult; import javax.naming.ldap.InitialLdapContext; import javax.naming.ldap.LdapContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Configuration; import org.springframework.ldap.filter.EqualsFilter; @@ -43,10 +43,9 @@ import org.springframework.stereotype.Component; @Component @Configuration +@Slf4j public class LdapService { - private static final Logger logger = LoggerFactory.getLogger(LdapService.class); - @Value("${security.authentication.ldap.user.admin:#{null}}") private String adminUserId; @@ -109,7 +108,7 @@ public class LdapService { try { new InitialDirContext(searchEnv); } catch (Exception e) { - logger.warn("invalid ldap credentials or ldap search error", e); + log.warn("invalid ldap credentials or ldap search error", e); return null; } Attribute attr = attrs.next(); @@ -119,7 +118,7 @@ public class LdapService { } } } catch (NamingException e) { - logger.error("ldap search error", e); + log.error("ldap search error", e); return null; } finally { try { @@ -127,7 +126,7 @@ public class LdapService { ctx.close(); } } catch (NamingException e) { - logger.error("ldap context close error", e); + log.error("ldap context close error", e); } } @@ -150,7 +149,7 @@ public class LdapService { public LdapUserNotExistActionType getLdapUserNotExistAction() { if (StringUtils.isBlank(ldapUserNotExistAction)) { - logger.info( + log.info( "security.authentication.ldap.user.not.exist.action configuration is empty, the default value 'CREATE'"); return LdapUserNotExistActionType.CREATE; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java index c8ee24917d..2a264d6948 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java @@ -41,8 +41,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -53,10 +53,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * access token service impl */ @Service +@Slf4j public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTokenService { - private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class); - @Autowired private AccessTokenMapper accessTokenMapper; @@ -134,7 +133,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok // 2. check if user is existed if (userId <= 0) { String errorMsg = "User id should not less than or equals to 0."; - logger.error(errorMsg); + log.error(errorMsg); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, errorMsg); return result; } @@ -198,7 +197,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok AccessToken accessToken = accessTokenMapper.selectById(id); if (accessToken == null) { - logger.error("Access token does not exist, accessTokenId:{}.", id); + log.error("Access token does not exist, accessTokenId:{}.", id); putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); return result; } @@ -235,7 +234,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok // 2. check if token is existed AccessToken accessToken = accessTokenMapper.selectById(id); if (accessToken == null) { - logger.error("Access token does not exist, accessTokenId:{}.", id); + log.error("Access token does not exist, accessTokenId:{}.", id); putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java index 134bea098e..82c5cd1e69 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java @@ -43,8 +43,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; @@ -57,10 +57,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * alert group service impl */ @Service +@Slf4j public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroupService { - private Logger logger = LoggerFactory.getLogger(AlertGroupServiceImpl.class); - @Autowired private AlertGroupMapper alertGroupMapper; @@ -78,7 +77,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup alertGroups = alertGroupMapper.queryAllGroupList(); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), log); if (ids.isEmpty()) { result.put(Constants.DATA_LIST, Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -139,7 +138,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup alertGroupPage = alertGroupMapper.queryAlertGroupPage(page, searchVal); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), log); if (ids.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -175,7 +174,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup return result; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -196,14 +195,14 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup result.put(Constants.DATA_LIST, alertGroup); putMsg(result, Status.SUCCESS); permissionPostHandle(AuthorizationType.ALERT_GROUP, loginUser.getId(), - Collections.singletonList(alertGroup.getId()), logger); - logger.info("Create alert group complete, groupName:{}", alertGroup.getGroupName()); + Collections.singletonList(alertGroup.getId()), log); + log.info("Create alert group complete, groupName:{}", alertGroup.getGroupName()); } else { - logger.error("Create alert group error, groupName:{}", alertGroup.getGroupName()); + log.error("Create alert group error, groupName:{}", alertGroup.getGroupName()); putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); } } catch (DuplicateKeyException ex) { - logger.error("Create alert group error, groupName:{}", alertGroup.getGroupName(), ex); + log.error("Create alert group error, groupName:{}", alertGroup.getGroupName(), ex); putMsg(result, Status.ALERT_GROUP_EXIST); } @@ -230,14 +229,14 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup return result; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } AlertGroup alertGroup = alertGroupMapper.selectById(id); if (alertGroup == null) { - logger.error("Alert group does not exist, id:{}.", id); + log.error("Alert group does not exist, id:{}.", id); putMsg(result, Status.ALERT_GROUP_NOT_EXIST); return result; @@ -254,10 +253,10 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup alertGroup.setAlertInstanceIds(alertInstanceIds); try { alertGroupMapper.updateById(alertGroup); - logger.info("Update alert group complete, groupName:{}", alertGroup.getGroupName()); + log.info("Update alert group complete, groupName:{}", alertGroup.getGroupName()); putMsg(result, Status.SUCCESS); } catch (DuplicateKeyException ex) { - logger.error("Update alert group error, groupName:{}", alertGroup.getGroupName(), ex); + log.error("Update alert group error, groupName:{}", alertGroup.getGroupName(), ex); putMsg(result, Status.ALERT_GROUP_EXIST); } return result; @@ -284,7 +283,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup // Not allow to delete the default alarm group ,because the module of service need to use it. if (id == 1) { - logger.warn("Not allow to delete the default alarm group."); + log.warn("Not allow to delete the default alarm group."); putMsg(result, Status.NOT_ALLOW_TO_DELETE_DEFAULT_ALARM_GROUP); return result; } @@ -292,13 +291,13 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup // check exist AlertGroup alertGroup = alertGroupMapper.selectById(id); if (alertGroup == null) { - logger.error("Alert group does not exist, id:{}.", id); + log.error("Alert group does not exist, id:{}.", id); putMsg(result, Status.ALERT_GROUP_NOT_EXIST); return result; } alertGroupMapper.deleteById(id); - logger.info("Delete alert group complete, groupId:{}", id); + log.info("Delete alert group complete, groupId:{}", id); putMsg(result, Status.SUCCESS); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java index 9af3a522ef..d7c7581f11 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java @@ -50,8 +50,8 @@ import java.util.Optional; import java.util.function.Function; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; @@ -64,10 +64,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; */ @Service @Lazy +@Slf4j public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements AlertPluginInstanceService { - private static final Logger logger = LoggerFactory.getLogger(AlertPluginInstanceServiceImpl.class); - @Autowired private AlertPluginInstanceMapper alertPluginInstanceMapper; @@ -100,7 +99,7 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A return result; } if (alertPluginInstanceMapper.existInstanceName(alertPluginInstance.getInstanceName()) == Boolean.TRUE) { - logger.error("Plugin instance with the same name already exists, name:{}.", + log.error("Plugin instance with the same name already exists, name:{}.", alertPluginInstance.getInstanceName()); putMsg(result, Status.PLUGIN_INSTANCE_ALREADY_EXISTS); return result; @@ -108,12 +107,12 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A int i = alertPluginInstanceMapper.insert(alertPluginInstance); if (i > 0) { - logger.info("Create alert plugin instance complete, name:{}", alertPluginInstance.getInstanceName()); + log.info("Create alert plugin instance complete, name:{}", alertPluginInstance.getInstanceName()); result.put(Constants.DATA_LIST, alertPluginInstance); putMsg(result, Status.SUCCESS); return result; } - logger.error("Create alert plugin instance error, name:{}", alertPluginInstance.getInstanceName()); + log.error("Create alert plugin instance error, name:{}", alertPluginInstance.getInstanceName()); putMsg(result, Status.SAVE_ERROR); return result; } @@ -143,12 +142,12 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A int i = alertPluginInstanceMapper.updateById(alertPluginInstance); if (i > 0) { - logger.info("Update alert plugin instance complete, instanceId:{}, name:{}", alertPluginInstance.getId(), + log.info("Update alert plugin instance complete, instanceId:{}, name:{}", alertPluginInstance.getId(), alertPluginInstance.getInstanceName()); putMsg(result, Status.SUCCESS); return result; } - logger.error("Update alert plugin instance error, instanceId:{}, name:{}", alertPluginInstance.getId(), + log.error("Update alert plugin instance error, instanceId:{}, name:{}", alertPluginInstance.getId(), alertPluginInstance.getInstanceName()); putMsg(result, Status.SAVE_ERROR); return result; @@ -167,7 +166,7 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A // check if there is an associated alert group boolean hasAssociatedAlertGroup = checkHasAssociatedAlertGroup(String.valueOf(id)); if (hasAssociatedAlertGroup) { - logger.warn("Delete alert plugin failed because alert group is using it, pluginId:{}.", id); + log.warn("Delete alert plugin failed because alert group is using it, pluginId:{}.", id); putMsg(result, Status.DELETE_ALERT_PLUGIN_INSTANCE_ERROR_HAS_ALERT_GROUP_ASSOCIATED); return result; } @@ -178,10 +177,10 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A int i = alertPluginInstanceMapper.deleteById(id); if (i > 0) { - logger.info("Delete alert plugin instance complete, instanceId:{}", id); + log.info("Delete alert plugin instance complete, instanceId:{}", id); putMsg(result, Status.SUCCESS); } - logger.error("Delete alert plugin instance error, instanceId:{}", id); + log.error("Delete alert plugin instance error, instanceId:{}", id); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java index bdb1295bc8..b2fb11d3de 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java @@ -36,17 +36,17 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import lombok.extern.slf4j.Slf4j; + import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; /** * base service impl */ +@Slf4j public class BaseServiceImpl implements BaseService { - private static final Logger logger = LoggerFactory.getLogger(BaseServiceImpl.class); - @Autowired protected ResourcePermissionCheckService resourcePermissionCheckService; @@ -56,7 +56,7 @@ public class BaseServiceImpl implements BaseService { try { resourcePermissionCheckService.postHandle(authorizationType, userId, ids, logger); } catch (Exception e) { - logger.error("Post handle error, userId:{}.", userId, e); + log.error("Post handle error, userId:{}.", userId, e); throw new RuntimeException("Resource association user error", e); } } @@ -178,9 +178,9 @@ public class BaseServiceImpl implements BaseService { @Override public boolean canOperatorPermissions(User user, Object[] ids, AuthorizationType type, String permissionKey) { boolean operationPermissionCheck = - resourcePermissionCheckService.operationPermissionCheck(type, user.getId(), permissionKey, logger); + resourcePermissionCheckService.operationPermissionCheck(type, user.getId(), permissionKey, log); boolean resourcePermissionCheck = resourcePermissionCheckService.resourcePermissionCheck(type, ids, - user.getUserType().equals(UserType.ADMIN_USER) ? 0 : user.getId(), logger); + user.getUserType().equals(UserType.ADMIN_USER) ? 0 : user.getId(), log); return operationPermissionCheck && resourcePermissionCheck; } @@ -193,7 +193,7 @@ public class BaseServiceImpl implements BaseService { if (!StringUtils.isEmpty(startDateStr)) { start = DateUtils.stringToDate(startDateStr); if (Objects.isNull(start)) { - logger.warn("Parameter startDateStr is invalid."); + log.warn("Parameter startDateStr is invalid."); throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java index 7d8ceeb3b0..61e06a5841 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java @@ -44,8 +44,8 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -59,10 +59,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * cluster definition service impl */ @Service +@Slf4j public class ClusterServiceImpl extends BaseServiceImpl implements ClusterService { - private static final Logger logger = LoggerFactory.getLogger(ClusterServiceImpl.class); - @Autowired private ClusterMapper clusterMapper; @@ -84,7 +83,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic public Map createCluster(User loginUser, String name, String config, String desc) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { - logger.warn("Only admin can create cluster, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can create cluster, current login user name:{}.", loginUser.getUserName()); return result; } @@ -95,7 +94,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic Cluster clusterExistByName = clusterMapper.queryByClusterName(name); if (clusterExistByName != null) { - logger.warn("Cluster with the same name already exists, clusterName:{}.", clusterExistByName.getName()); + log.warn("Cluster with the same name already exists, clusterName:{}.", clusterExistByName.getName()); putMsg(result, Status.CLUSTER_NAME_EXISTS, name); return result; } @@ -112,7 +111,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic code = CodeGenerateUtils.getInstance().genCode(); cluster.setCode(code); } catch (CodeGenerateException e) { - logger.error("Generate cluster code error.", e); + log.error("Generate cluster code error.", e); } if (code == 0L) { putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating cluster code"); @@ -120,11 +119,11 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic } if (clusterMapper.insert(cluster) > 0) { - logger.info("Cluster create complete, clusterName:{}.", cluster.getName()); + log.info("Cluster create complete, clusterName:{}.", cluster.getName()); result.put(Constants.DATA_LIST, cluster.getCode()); putMsg(result, Status.SUCCESS); } else { - logger.error("Cluster create error, clusterName:{}.", cluster.getName()); + log.error("Cluster create error, clusterName:{}.", cluster.getName()); putMsg(result, Status.CREATE_CLUSTER_ERROR); } return result; @@ -227,7 +226,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic Cluster cluster = clusterMapper.queryByClusterName(name); if (cluster == null) { - logger.warn("Cluster does not exist, name:{}.", name); + log.warn("Cluster does not exist, name:{}.", name); putMsg(result, Status.QUERY_CLUSTER_BY_NAME_ERROR, name); } else { @@ -250,7 +249,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic public Map deleteClusterByCode(User loginUser, Long code) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { - logger.warn("Only admin can delete cluster, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can delete cluster, current login user name:{}.", loginUser.getUserName()); return result; } @@ -258,7 +257,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic .selectCount(new QueryWrapper().lambda().eq(K8sNamespace::getClusterCode, code)); if (relatedNamespaceNumber > 0) { - logger.warn("Delete cluster failed because {} namespace(s) is(are) using it, clusterCode:{}.", + log.warn("Delete cluster failed because {} namespace(s) is(are) using it, clusterCode:{}.", relatedNamespaceNumber, code); putMsg(result, Status.DELETE_CLUSTER_RELATED_NAMESPACE_EXISTS); return result; @@ -266,10 +265,10 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic int delete = clusterMapper.deleteByCode(code); if (delete > 0) { - logger.info("Delete cluster complete, clusterCode:{}.", code); + log.info("Delete cluster complete, clusterCode:{}.", code); putMsg(result, Status.SUCCESS); } else { - logger.error("Delete cluster error, clusterCode:{}.", code); + log.error("Delete cluster error, clusterCode:{}.", code); putMsg(result, Status.DELETE_CLUSTER_ERROR); } return result; @@ -289,12 +288,12 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic public Map updateClusterByCode(User loginUser, Long code, String name, String config, String desc) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { - logger.warn("Only admin can update cluster, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can update cluster, current login user name:{}.", loginUser.getUserName()); return result; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -306,14 +305,14 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic Cluster clusterExistByName = clusterMapper.queryByClusterName(name); if (clusterExistByName != null && !clusterExistByName.getCode().equals(code)) { - logger.warn("Cluster with the same name already exists, name:{}.", clusterExistByName.getName()); + log.warn("Cluster with the same name already exists, name:{}.", clusterExistByName.getName()); putMsg(result, Status.CLUSTER_NAME_EXISTS, name); return result; } Cluster clusterExist = clusterMapper.queryByClusterCode(code); if (clusterExist == null) { - logger.error("Cluster does not exist, code:{}.", code); + log.error("Cluster does not exist, code:{}.", code); putMsg(result, Status.CLUSTER_NOT_EXISTS, name); return result; } @@ -323,7 +322,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic try { k8sManager.getAndUpdateK8sClient(code, true); } catch (RemotingException e) { - logger.error("Update K8s error.", e); + log.error("Update K8s error.", e); putMsg(result, Status.K8S_CLIENT_OPS_ERROR, name); return result; } @@ -335,7 +334,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic clusterExist.setDescription(desc); clusterMapper.updateById(clusterExist); // need not update relation - logger.info("Cluster update complete, clusterId:{}.", clusterExist.getId()); + log.info("Cluster update complete, clusterId:{}.", clusterExist.getId()); putMsg(result, Status.SUCCESS); return result; } @@ -351,14 +350,14 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic Map result = new HashMap<>(); if (StringUtils.isEmpty(clusterName)) { - logger.warn("Parameter cluster name is empty."); + log.warn("Parameter cluster name is empty."); putMsg(result, Status.CLUSTER_NAME_IS_NULL); return result; } Cluster cluster = clusterMapper.queryByClusterName(clusterName); if (cluster != null) { - logger.warn("Cluster with the same name already exists, name:{}.", cluster.getName()); + log.warn("Cluster with the same name already exists, name:{}.", cluster.getName()); putMsg(result, Status.CLUSTER_NAME_EXISTS, clusterName); return result; } @@ -370,12 +369,12 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic public Map checkParams(String name, String config) { Map result = new HashMap<>(); if (StringUtils.isEmpty(name)) { - logger.warn("Parameter cluster name is empty."); + log.warn("Parameter cluster name is empty."); putMsg(result, Status.CLUSTER_NAME_IS_NULL); return result; } if (StringUtils.isEmpty(config)) { - logger.warn("Parameter cluster config is empty."); + log.warn("Parameter cluster config is empty."); putMsg(result, Status.CLUSTER_CONFIG_IS_NULL); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java index 8888b3ee69..6ae4977b81 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java @@ -63,8 +63,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -72,10 +72,9 @@ import org.springframework.stereotype.Service; * data analysis service impl */ @Service +@Slf4j public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnalysisService { - private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class); - @Autowired private ProjectMapper projectMapper; @@ -176,7 +175,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal start = DateUtils.stringToDate(startDate); end = DateUtils.stringToDate(endDate); if (Objects.isNull(start) || Objects.isNull(end)) { - logger.warn("Parameter startDate or endDate is invalid."); + log.warn("Parameter startDate or endDate is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE); return result; } @@ -293,7 +292,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal private Pair, Map> getProjectIds(User loginUser, Map result) { Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { List taskInstanceStateCounts = new ArrayList<>(); result.put(Constants.DATA_LIST, new TaskCountDto(taskInstanceStateCounts)); @@ -370,7 +369,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal Map result = new HashMap<>(); int count = 0; Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (!projectIds.isEmpty()) { List projects = projectMapper.selectBatchIds(projectIds); List projectCodes = projects.stream().map(project -> project.getCode()).collect(Collectors.toList()); @@ -392,7 +391,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal StatisticsStateRequest statisticsStateRequest) { Map result = new HashMap<>(); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { putMsg(result, Status.SUCCESS); return result; @@ -416,7 +415,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal workflowCode = processDefinitionMapper.queryByDefineName(projectCode, workflowName).getCode(); } } catch (Exception e) { - logger.warn(e.getMessage()); + log.warn(e.getMessage()); } Date date = new Date(); @@ -466,7 +465,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal public Map countTaskStates(User loginUser, StatisticsStateRequest statisticsStateRequest) { Map result = new HashMap<>(); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { putMsg(result, Status.SUCCESS); return result; @@ -497,7 +496,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal // taskCode = relationMapper.queryTaskCodeByTaskName(workflowCode, taskName); } } catch (Exception e) { - logger.warn(e.getMessage()); + log.warn(e.getMessage()); } Date date = new Date(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java index 5a654c0504..e7df5f39e5 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java @@ -60,8 +60,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DuplicateKeyException; import org.springframework.stereotype.Service; @@ -75,10 +75,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode; * data source service impl */ @Service +@Slf4j public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService { - private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class); - @Autowired private DataSourceMapper dataSourceMapper; @@ -110,12 +109,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource } // check name can use or not if (checkName(datasourceParam.getName())) { - logger.warn("Datasource with the same name already exists, name:{}.", datasourceParam.getName()); + log.warn("Datasource with the same name already exists, name:{}.", datasourceParam.getName()); putMsg(result, Status.DATASOURCE_EXIST); return result; } if (checkDescriptionLength(datasourceParam.getNote())) { - logger.warn("Parameter description is too long, description:{}.", datasourceParam.getNote()); + log.warn("Parameter description is too long, description:{}.", datasourceParam.getNote()); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -139,11 +138,11 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource dataSourceMapper.insert(dataSource); putMsg(result, Status.SUCCESS); permissionPostHandle(AuthorizationType.DATASOURCE, loginUser.getId(), - Collections.singletonList(dataSource.getId()), logger); - logger.info("Datasource create complete, dbType:{}, datasourceName:{}.", dataSource.getType().getDescp(), + Collections.singletonList(dataSource.getId()), log); + log.info("Datasource create complete, dbType:{}, datasourceName:{}.", dataSource.getType().getDescp(), dataSource.getName()); } catch (DuplicateKeyException ex) { - logger.error("Datasource create error.", ex); + log.error("Datasource create error.", ex); putMsg(result, Status.DATASOURCE_EXIST); } @@ -164,7 +163,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource // determine whether the data source exists DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { - logger.error("Datasource does not exist, id:{}.", id); + log.error("Datasource does not exist, id:{}.", id); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -177,12 +176,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource // check name can use or not if (!dataSourceParam.getName().trim().equals(dataSource.getName()) && checkName(dataSourceParam.getName())) { - logger.warn("Datasource with the same name already exists, name:{}.", dataSource.getName()); + log.warn("Datasource with the same name already exists, name:{}.", dataSource.getName()); putMsg(result, Status.DATASOURCE_EXIST); return result; } if (checkDescriptionLength(dataSourceParam.getNote())) { - logger.warn("Parameter description is too long, description:{}.", dataSourceParam.getNote()); + log.warn("Parameter description is too long, description:{}.", dataSourceParam.getNote()); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -211,11 +210,11 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource dataSource.setBindTestId(dataSourceParam.getBindTestId()); try { dataSourceMapper.updateById(dataSource); - logger.info("Update datasource complete, datasourceId:{}, datasourceName:{}.", dataSource.getId(), + log.info("Update datasource complete, datasourceId:{}, datasourceName:{}.", dataSource.getId(), dataSource.getName()); putMsg(result, Status.SUCCESS); } catch (DuplicateKeyException ex) { - logger.error("Update datasource error, datasourceId:{}, datasourceName:{}.", dataSource.getId(), + log.error("Update datasource error, datasourceId:{}, datasourceName:{}.", dataSource.getId(), dataSource.getName()); putMsg(result, Status.DATASOURCE_EXIST); } @@ -239,7 +238,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource Map result = new HashMap<>(); DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { - logger.error("Datasource does not exist, id:{}.", id); + log.error("Datasource does not exist, id:{}.", id); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -282,7 +281,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), log); if (ids.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -337,7 +336,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource datasourceList = dataSourceMapper.queryDataSourceByType(0, type, testFlag); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), log); if (ids.isEmpty()) { result.put(Constants.DATA_LIST, Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -363,7 +362,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource Result result = new Result<>(); List dataSourceList = dataSourceMapper.queryDataSourceByName(name); if (dataSourceList != null && !dataSourceList.isEmpty()) { - logger.warn("Datasource with the same name already exists, dataSourceName:{}.", + log.warn("Datasource with the same name already exists, dataSourceName:{}.", dataSourceList.get(0).getName()); putMsg(result, Status.DATASOURCE_EXIST); } else { @@ -386,12 +385,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource Result result = new Result<>(); try (Connection connection = DataSourceClientProvider.getInstance().getConnection(type, connectionParam)) { if (connection == null) { - logger.error("Connection test to {} datasource failed, connectionParam:{}.", type.getDescp(), + log.error("Connection test to {} datasource failed, connectionParam:{}.", type.getDescp(), connectionParam); putMsg(result, Status.CONNECTION_TEST_FAILURE); return result; } - logger.info("Connection test to {} datasource success, connectionParam:{}", type.getDescp(), + log.info("Connection test to {} datasource success, connectionParam:{}", type.getDescp(), connectionParam); putMsg(result, Status.SUCCESS); return result; @@ -399,7 +398,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource String message = Optional.of(e).map(Throwable::getCause) .map(Throwable::getMessage) .orElse(e.getMessage()); - logger.error("Datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type, + log.error("Datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type, connectionParam, message); return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(), message); } @@ -416,7 +415,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource DataSource dataSource = dataSourceMapper.selectById(id); if (dataSource == null) { Result result = new Result<>(); - logger.error("Datasource does not exist, datasourceId:{}.", id); + log.error("Datasource does not exist, datasourceId:{}.", id); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -439,7 +438,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource // query datasource by id DataSource dataSource = dataSourceMapper.selectById(datasourceId); if (dataSource == null) { - logger.warn("Datasource does not exist, datasourceId:{}.", datasourceId); + log.warn("Datasource does not exist, datasourceId:{}.", datasourceId); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -451,10 +450,10 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource dataSourceMapper.deleteById(datasourceId); datasourceUserMapper.deleteByDatasourceId(datasourceId); clearBindTestId(datasourceId); - logger.info("Delete datasource complete, datasourceId:{}.", datasourceId); + log.info("Delete datasource complete, datasourceId:{}.", datasourceId); putMsg(result, Status.SUCCESS); } catch (Exception e) { - logger.error("Delete datasource complete, datasourceId:{}.", datasourceId, e); + log.error("Delete datasource complete, datasourceId:{}.", datasourceId, e); throw new ServiceException(Status.DELETE_DATA_SOURCE_FAILURE); } return result; @@ -547,7 +546,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource try { schema = metaData.getConnection().getSchema(); } catch (SQLException e) { - logger.error("Cant not get the schema, datasourceId:{}.", datasourceId, e); + log.error("Cant not get the schema, datasourceId:{}.", datasourceId, e); } tables = metaData.getTables( @@ -555,7 +554,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource getDbSchemaPattern(dataSource.getType(), schema, connectionParam), "%", TABLE_TYPES); if (null == tables) { - logger.error("Get datasource tables error, datasourceId:{}.", datasourceId); + log.error("Get datasource tables error, datasourceId:{}.", datasourceId); putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR); return result; } @@ -567,7 +566,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource } } catch (Exception e) { - logger.error("Get datasource tables error, datasourceId:{}.", datasourceId, e); + log.error("Get datasource tables error, datasourceId:{}.", datasourceId, e); putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR); return result; } finally { @@ -622,7 +621,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource columnList.add(rs.getString(COLUMN_NAME)); } } catch (Exception e) { - logger.error("Get datasource table columns error, datasourceId:{}.", dataSource.getId(), e); + log.error("Get datasource table columns error, datasourceId:{}.", dataSource.getId(), e); } finally { closeResult(rs); releaseConnection(connection); @@ -684,7 +683,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource try { connection.close(); } catch (Exception e) { - logger.error("Connection release error", e); + log.error("Connection release error", e); } } } @@ -694,7 +693,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource try { rs.close(); } catch (Exception e) { - logger.error("ResultSet close error", e); + log.error("ResultSet close error", e); } } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java index d23850ba4d..f61a82f76d 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java @@ -30,8 +30,8 @@ import org.apache.commons.lang3.StringUtils; import java.util.Date; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -42,10 +42,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * DqExecuteResultServiceImpl */ @Service +@Slf4j public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExecuteResultService { - private final Logger logger = LoggerFactory.getLogger(DqExecuteResultServiceImpl.class); - @Autowired private DqExecuteResultMapper dqExecuteResultMapper; @@ -76,7 +75,7 @@ public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExe end = DateUtils.stringToDate(endTime); } } catch (Exception e) { - logger.warn("Parameter startTime or endTime is invalid."); + log.warn("Parameter startTime or endTime is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime"); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java index 1fdce91955..1b5fc4fdb0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java @@ -64,8 +64,8 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -80,10 +80,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; * DqRuleServiceImpl */ @Service +@Slf4j public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService { - private final Logger logger = LoggerFactory.getLogger(DqRuleServiceImpl.class); - @Autowired private DqRuleMapper dqRuleMapper; @@ -240,7 +239,7 @@ public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService try { result = mapper.writeValueAsString(params); } catch (JsonProcessingException e) { - logger.error("Json parse error.", e); + log.error("Json parse error.", e); } return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java index fd3659bdc6..18de9c5412 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java @@ -55,8 +55,8 @@ import java.util.Set; import java.util.TreeSet; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -72,10 +72,9 @@ import com.fasterxml.jackson.core.type.TypeReference; * task definition service impl */ @Service +@Slf4j public class EnvironmentServiceImpl extends BaseServiceImpl implements EnvironmentService { - private static final Logger logger = LoggerFactory.getLogger(EnvironmentServiceImpl.class); - @Autowired private EnvironmentMapper environmentMapper; @@ -104,7 +103,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme return result; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -115,7 +114,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme Environment environment = environmentMapper.queryByEnvironmentName(name); if (environment != null) { - logger.warn("Environment with the same name already exist, environmentName:{}.", environment.getName()); + log.warn("Environment with the same name already exist, environmentName:{}.", environment.getName()); putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, name); return result; } @@ -132,7 +131,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme code = CodeGenerateUtils.getInstance().genCode(); env.setCode(code); } catch (CodeGenerateException e) { - logger.error("Generate environment code error.", e); + log.error("Generate environment code error.", e); } if (code == 0L) { putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating environment code"); @@ -153,7 +152,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme relation.setCreateTime(new Date()); relation.setUpdateTime(new Date()); relationMapper.insert(relation); - logger.info( + log.info( "Environment-WorkerGroup relation create complete, environmentName:{}, workerGroup:{}.", env.getName(), relation.getWorkerGroup()); } @@ -163,10 +162,10 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme result.put(Constants.DATA_LIST, env.getCode()); putMsg(result, Status.SUCCESS); permissionPostHandle(AuthorizationType.ENVIRONMENT, loginUser.getId(), - Collections.singletonList(env.getId()), logger); - logger.info("Environment create complete, name:{}.", env.getName()); + Collections.singletonList(env.getId()), log); + log.info("Environment create complete, name:{}.", env.getName()); } else { - logger.error("Environment create error, name:{}.", env.getName()); + log.error("Environment create error, name:{}.", env.getName()); putMsg(result, Status.CREATE_ENVIRONMENT_ERROR); } return result; @@ -191,7 +190,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme environmentIPage = environmentMapper.queryEnvironmentListPaging(page, searchVal); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, loginUser.getId(), log); if (ids.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -235,7 +234,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme public Map queryAllEnvironmentList(User loginUser) { Map result = new HashMap<>(); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, - loginUser.getId(), logger); + loginUser.getId(), log); if (ids.isEmpty()) { result.put(Constants.DATA_LIST, Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -335,7 +334,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme .selectCount(new QueryWrapper().lambda().eq(TaskDefinition::getEnvironmentCode, code)); if (relatedTaskNumber > 0) { - logger.warn("Delete environment failed because {} tasks is using it, environmentCode:{}.", + log.warn("Delete environment failed because {} tasks is using it, environmentCode:{}.", relatedTaskNumber, code); putMsg(result, Status.DELETE_ENVIRONMENT_RELATED_TASK_EXISTS); return result; @@ -346,10 +345,10 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme relationMapper.delete(new QueryWrapper() .lambda() .eq(EnvironmentWorkerGroupRelation::getEnvironmentCode, code)); - logger.info("Environment and relations delete complete, environmentCode:{}.", code); + log.info("Environment and relations delete complete, environmentCode:{}.", code); putMsg(result, Status.SUCCESS); } else { - logger.error("Environment delete error, environmentCode:{}.", code); + log.error("Environment delete error, environmentCode:{}.", code); putMsg(result, Status.DELETE_ENVIRONMENT_ERROR); } return result; @@ -380,14 +379,14 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme return checkResult; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } Environment environment = environmentMapper.queryByEnvironmentName(name); if (environment != null && !environment.getCode().equals(code)) { - logger.warn("Environment with the same name already exist, name:{}.", environment.getName()); + log.warn("Environment with the same name already exist, name:{}.", environment.getName()); putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, name); return result; } @@ -445,10 +444,10 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme relationMapper.insert(relation); } }); - logger.info("Environment and relations update complete, environmentId:{}.", env.getId()); + log.info("Environment and relations update complete, environmentId:{}.", env.getId()); putMsg(result, Status.SUCCESS); } else { - logger.error("Environment update error, environmentId:{}.", env.getId()); + log.error("Environment update error, environmentId:{}.", env.getId()); putMsg(result, Status.UPDATE_ENVIRONMENT_ERROR, name); } return result; @@ -465,14 +464,14 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme Map result = new HashMap<>(); if (StringUtils.isEmpty(environmentName)) { - logger.warn("parameter environment name is empty."); + log.warn("parameter environment name is empty."); putMsg(result, Status.ENVIRONMENT_NAME_IS_NULL); return result; } Environment environment = environmentMapper.queryByEnvironmentName(environmentName); if (environment != null) { - logger.warn("Environment with the same name already exist, name:{}.", environment.getName()); + log.warn("Environment with the same name already exist, name:{}.", environment.getName()); putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, environmentName); return result; } @@ -493,7 +492,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme if (Objects.nonNull(taskDefinitionList) && taskDefinitionList.size() != 0) { Set collect = taskDefinitionList.stream().map(TaskDefinition::getName).collect(Collectors.toSet()); - logger.warn("Environment {} and worker group {} is being used by task {}, so can not update.", + log.warn("Environment {} and worker group {} is being used by task {}, so can not update.", taskDefinitionList.get(0).getEnvironmentCode(), taskDefinitionList.get(0).getWorkerGroup(), collect); putMsg(result, Status.UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR, workerGroup, environmentName, @@ -508,12 +507,12 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme public Map checkParams(String name, String config, String workerGroups) { Map result = new HashMap<>(); if (StringUtils.isEmpty(name)) { - logger.warn("parameter environment name is empty."); + log.warn("parameter environment name is empty."); putMsg(result, Status.ENVIRONMENT_NAME_IS_NULL); return result; } if (StringUtils.isEmpty(config)) { - logger.warn("parameter environment config is empty."); + log.warn("parameter environment config is empty."); putMsg(result, Status.ENVIRONMENT_CONFIG_IS_NULL); return result; } @@ -521,7 +520,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme List workerGroupList = JSONUtils.parseObject(workerGroups, new TypeReference>() { }); if (Objects.isNull(workerGroupList)) { - logger.warn("Parameter worker groups list is invalid."); + log.warn("Parameter worker groups list is invalid."); putMsg(result, Status.ENVIRONMENT_WORKER_GROUPS_IS_INVALID); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentWorkerGroupRelationServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentWorkerGroupRelationServiceImpl.java index ce714ea33f..29272a1338 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentWorkerGroupRelationServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentWorkerGroupRelationServiceImpl.java @@ -27,8 +27,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -36,12 +36,11 @@ import org.springframework.stereotype.Service; * task definition service impl */ @Service +@Slf4j public class EnvironmentWorkerGroupRelationServiceImpl extends BaseServiceImpl implements EnvironmentWorkerGroupRelationService { - private static final Logger logger = LoggerFactory.getLogger(EnvironmentWorkerGroupRelationServiceImpl.class); - @Autowired private EnvironmentWorkerGroupRelationMapper environmentWorkerGroupRelationMapper; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java index 5a31c8efdd..533a254eea 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java @@ -104,8 +104,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; @@ -118,10 +118,9 @@ import com.google.common.collect.Lists; * executor service impl */ @Service +@Slf4j public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService { - private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class); - @Autowired private ProjectMapper projectMapper; @@ -214,7 +213,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } // timeout is invalid if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { - logger.warn("Parameter timeout is invalid, timeout:{}.", timeout); + log.warn("Parameter timeout is invalid, timeout:{}.", timeout); putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); return result; } @@ -230,7 +229,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // check current version whether include startNodeList checkStartNodeList(startNodeList, processDefinitionCode, processDefinition.getVersion()); if (!checkTenantSuitable(processDefinition)) { - logger.error( + log.error( "There is not any valid tenant for the process definition, processDefinitionCode:{}, processDefinitionName:{}.", processDefinition.getCode(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); @@ -264,12 +263,12 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ if (create > 0) { processDefinition.setWarningGroupId(warningGroupId); processDefinitionMapper.updateById(processDefinition); - logger.info("Create command complete, processDefinitionCode:{}, commandCount:{}.", + log.info("Create command complete, processDefinitionCode:{}, commandCount:{}.", processDefinition.getCode(), create); result.put(Constants.DATA_LIST, triggerCode); putMsg(result, Status.SUCCESS); } else { - logger.error("Start process instance failed because create command error, processDefinitionCode:{}.", + log.error("Start process instance failed because create command error, processDefinitionCode:{}.", processDefinition.getCode()); putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); } @@ -288,7 +287,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // no master if (masterServers.isEmpty()) { - logger.error("Master does not exist."); + log.error("Master does not exist."); putMsg(result, Status.MASTER_NOT_EXISTS); return false; } @@ -311,7 +310,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ if (cronMap.containsKey(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST)) { String[] stringDates = cronMap.get(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST).split(COMMA); if (stringDates.length > SCHEDULE_TIME_MAX_LENGTH) { - logger.warn("Parameter cornTime is bigger than {}.", SCHEDULE_TIME_MAX_LENGTH); + log.warn("Parameter cornTime is bigger than {}.", SCHEDULE_TIME_MAX_LENGTH); return false; } } @@ -425,7 +424,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ return result; } if (!checkTenantSuitable(processDefinition)) { - logger.error( + log.error( "There is not any valid tenant for the process definition, processDefinitionId:{}, processDefinitionCode:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(result, Status.TENANT_NOT_SUITABLE); @@ -461,7 +460,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ break; case STOP: if (processInstance.getState() == WorkflowExecutionStatus.READY_STOP) { - logger.warn("Process instance status is already {}, processInstanceName:{}.", + log.warn("Process instance status is already {}, processInstanceName:{}.", WorkflowExecutionStatus.READY_STOP.getDesc(), processInstance.getName()); putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); @@ -473,7 +472,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ break; case PAUSE: if (processInstance.getState() == WorkflowExecutionStatus.READY_PAUSE) { - logger.warn("Process instance status is already {}, processInstanceName:{}.", + log.warn("Process instance status is already {}, processInstanceName:{}.", WorkflowExecutionStatus.READY_STOP.getDesc(), processInstance.getName()); putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); @@ -483,7 +482,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } break; default: - logger.warn("Unknown execute type for process instance, processInstanceId:{}.", + log.warn("Unknown execute type for process instance, processInstanceId:{}.", processInstance.getId()); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); @@ -537,7 +536,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ .orElseThrow(() -> new ServiceException(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId)); if (!processInstance.getState().isFinished()) { - logger.error("Can not execute task for process instance which is not finished, processInstanceId:{}.", + log.error("Can not execute task for process instance which is not finished, processInstanceId:{}.", processInstanceId); putMsg(response, Status.WORKFLOW_INSTANCE_IS_NOT_FINISHED); return response; @@ -551,7 +550,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ processInstance.getProcessDefinitionVersion()); if (!checkTenantSuitable(processDefinition)) { - logger.error( + log.error( "There is not any valid tenant for the process definition, processDefinitionId:{}, processDefinitionCode:{}, ", processDefinition.getId(), processDefinition.getName()); putMsg(response, Status.TENANT_NOT_SUITABLE); @@ -564,7 +563,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ try { startNodeListLong = Long.parseLong(startNodeList); } catch (NumberFormatException e) { - logger.error("startNodeList is not a number"); + log.error("startNodeList is not a number"); putMsg(response, Status.REQUEST_PARAMS_NOT_VALID_ERROR, startNodeList); return response; } @@ -593,7 +592,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ command.setTaskDependType(taskDependType); if (!commandService.verifyIsNeedCreateCommand(command)) { - logger.warn( + log.warn( "Process instance is executing the command, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.", processDefinition.getCode(), processDefinition.getVersion(), processInstanceId); putMsg(response, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, @@ -601,16 +600,16 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ return response; } - logger.info("Creating command, commandInfo:{}.", command); + log.info("Creating command, commandInfo:{}.", command); int create = commandService.createCommand(command); if (create > 0) { - logger.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.", + log.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.", command.getCommandType().getDescp(), command.getProcessDefinitionCode(), processDefinition.getVersion()); putMsg(response, Status.SUCCESS); } else { - logger.error( + log.error( "Execute process instance failed because create {} command error, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.", command.getCommandType().getDescp(), command.getProcessDefinitionCode(), processDefinition.getVersion(), @@ -628,7 +627,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // check process instance exist ProcessInstance processInstance = processInstanceMapper.selectById(taskGroupQueue.getProcessId()); if (processInstance == null) { - logger.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", + log.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", taskGroupQueue.getProjectCode(), taskGroupQueue.getProcessId()); putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, taskGroupQueue.getProcessId()); return result; @@ -735,7 +734,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // determine whether the process is normal if (update > 0) { - logger.info("Process instance state is updated to {} in database, processInstanceName:{}.", + log.info("Process instance state is updated to {} in database, processInstanceName:{}.", executionStatus.getDesc(), processInstance.getName()); // directly send the process instance state change event to target master, not guarantee the event send // success @@ -745,7 +744,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ stateEventCallbackService.sendResult(host, workflowStateEventChangeCommand.convert2Command()); putMsg(result, Status.SUCCESS); } else { - logger.error("Process instance state update error, processInstanceName:{}.", processInstance.getName()); + log.error("Process instance state update error, processInstanceName:{}.", processInstance.getName()); putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); } return result; @@ -760,14 +759,14 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ private Map forceStart(ProcessInstance processInstance, TaskGroupQueue taskGroupQueue) { Map result = new HashMap<>(); if (taskGroupQueue.getStatus() != TaskGroupQueueStatus.WAIT_QUEUE) { - logger.warn("Task group queue already starts, taskGroupQueueId:{}.", taskGroupQueue.getId()); + log.warn("Task group queue already starts, taskGroupQueueId:{}.", taskGroupQueue.getId()); putMsg(result, Status.TASK_GROUP_QUEUE_ALREADY_START); return result; } taskGroupQueue.setForceStart(Flag.YES.getCode()); processService.updateTaskGroupQueue(taskGroupQueue); - logger.info("Sending force start command to master."); + log.info("Sending force start command to master."); processService.sendStartTask2Master(processInstance, taskGroupQueue.getTaskId(), org.apache.dolphinscheduler.remote.command.CommandType.TASK_FORCE_STATE_EVENT_REQUEST); putMsg(result, Status.SUCCESS); @@ -805,22 +804,22 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ command.setProcessInstanceId(instanceId); command.setTestFlag(testFlag); if (!commandService.verifyIsNeedCreateCommand(command)) { - logger.warn( + log.warn( "Process instance is executing the command, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.", processDefinitionCode, processVersion, instanceId); putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, String.valueOf(processDefinitionCode)); return result; } - logger.info("Creating command, commandInfo:{}.", command); + log.info("Creating command, commandInfo:{}.", command); int create = commandService.createCommand(command); if (create > 0) { - logger.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.", + log.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.", command.getCommandType().getDescp(), command.getProcessDefinitionCode(), processVersion); putMsg(result, Status.SUCCESS); } else { - logger.error( + log.error( "Execute process instance failed because create {} command error, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.", command.getCommandType().getDescp(), command.getProcessDefinitionCode(), processVersion, instanceId); @@ -843,7 +842,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null) { - logger.error("Process definition is not be found, processDefinitionCode:{}.", processDefinitionCode); + log.error("Process definition is not be found, processDefinitionCode:{}.", processDefinitionCode); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "processDefinitionCode"); return result; } @@ -858,7 +857,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ * if there is no online process, exit directly */ if (processDefinitionTmp.getReleaseState() != ReleaseState.ONLINE) { - logger.warn("Subprocess definition {} of process definition {} is not {}.", + log.warn("Subprocess definition {} of process definition {} is not {}.", processDefinitionTmp.getName(), processDefinition.getName(), ReleaseState.ONLINE.getDescp()); putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinitionTmp.getName()); @@ -942,7 +941,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ // determine whether to complement if (commandType == CommandType.COMPLEMENT_DATA) { if (schedule == null || StringUtils.isEmpty(schedule)) { - logger.error("Create {} type command error because parameter schedule is invalid.", + log.error("Create {} type command error because parameter schedule is invalid.", command.getCommandType().getDescp()); return 0; } @@ -950,7 +949,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ return 0; } try { - logger.info("Start to create {} command, processDefinitionCode:{}.", + log.info("Start to create {} command, processDefinitionCode:{}.", command.getCommandType().getDescp(), processDefineCode); return createComplementCommandList(triggerCode, schedule, runMode, command, expectedParallelismNumber, complementDependentMode); @@ -1000,18 +999,18 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ } switch (runMode) { case RUN_MODE_SERIAL: { - logger.info("RunMode of {} command is serial run, processDefinitionCode:{}.", + log.info("RunMode of {} command is serial run, processDefinitionCode:{}.", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); if (StringUtils.isNotEmpty(dateList)) { cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST, dateList); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - logger.info("Creating command, commandInfo:{}.", command); + log.info("Creating command, commandInfo:{}.", command); createCount = commandService.createCommand(command); if (createCount > 0) { - logger.info("Create {} command complete, processDefinitionCode:{}", + log.info("Create {} command complete, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } else { - logger.error("Create {} command error, processDefinitionCode:{}", + log.error("Create {} command error, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } } @@ -1019,13 +1018,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_START_DATE, startDate); cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_END_DATE, endDate); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - logger.info("Creating command, commandInfo:{}.", command); + log.info("Creating command, commandInfo:{}.", command); createCount = commandService.createCommand(command); if (createCount > 0) { - logger.info("Create {} command complete, processDefinitionCode:{}", + log.info("Create {} command complete, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } else { - logger.error("Create {} command error, processDefinitionCode:{}", + log.error("Create {} command error, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } // dependent process definition @@ -1033,11 +1032,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ command.getProcessDefinitionCode()); if (schedules.isEmpty() || complementDependentMode == ComplementDependentMode.OFF_MODE) { - logger.info( + log.info( "Complement dependent mode is off mode or Scheduler is empty, so skip create complement dependent command, processDefinitionCode:{}.", command.getProcessDefinitionCode()); } else { - logger.info( + log.info( "Complement dependent mode is all dependent and Scheduler is not empty, need create complement dependent command, processDefinitionCode:{}.", command.getProcessDefinitionCode()); dependentProcessDefinitionCreateCount += createComplementDependentCommand(schedules, command); @@ -1049,7 +1048,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ break; } case RUN_MODE_PARALLEL: { - logger.info("RunMode of {} command is parallel run, processDefinitionCode:{}.", + log.info("RunMode of {} command is parallel run, processDefinitionCode:{}.", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); if (startDate != null && endDate != null) { List schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode( @@ -1064,7 +1063,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ if (expectedParallelismNumber != null && expectedParallelismNumber != 0) { createCount = Math.min(createCount, expectedParallelismNumber); } - logger.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.", + log.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.", createCount); // Distribute the number of tasks equally to each command. @@ -1090,22 +1089,22 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(listDate.get(endDateIndex))); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - logger.info("Creating command, commandInfo:{}.", command); + log.info("Creating command, commandInfo:{}.", command); if (commandService.createCommand(command) > 0) { - logger.info("Create {} command complete, processDefinitionCode:{}", + log.info("Create {} command complete, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); triggerRelationService.saveTriggerToDb(ApiTriggerType.COMMAND, triggerCode, command.getId()); } else { - logger.error("Create {} command error, processDefinitionCode:{}", + log.error("Create {} command error, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } if (schedules.isEmpty() || complementDependentMode == ComplementDependentMode.OFF_MODE) { - logger.info( + log.info( "Complement dependent mode is off mode or Scheduler is empty, so skip create complement dependent command, processDefinitionCode:{}.", command.getProcessDefinitionCode()); } else { - logger.info( + log.info( "Complement dependent mode is all dependent and Scheduler is not empty, need create complement dependent command, processDefinitionCode:{}.", command.getProcessDefinitionCode()); dependentProcessDefinitionCreateCount += @@ -1121,17 +1120,17 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ if (expectedParallelismNumber != null && expectedParallelismNumber != 0) { createCount = Math.min(createCount, expectedParallelismNumber); } - logger.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.", + log.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.", createCount); for (List stringDate : Lists.partition(listDate, createCount)) { cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST, String.join(COMMA, stringDate)); command.setCommandParam(JSONUtils.toJsonString(cmdParam)); - logger.info("Creating command, commandInfo:{}.", command); + log.info("Creating command, commandInfo:{}.", command); if (commandService.createCommand(command) > 0) { - logger.info("Create {} command complete, processDefinitionCode:{}", + log.info("Create {} command complete, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } else { - logger.error("Create {} command error, processDefinitionCode:{}", + log.error("Create {} command error, processDefinitionCode:{}", command.getCommandType().getDescp(), command.getProcessDefinitionCode()); } } @@ -1142,7 +1141,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ default: break; } - logger.info("Create complement command count:{}, Create dependent complement command count:{}", createCount, + log.info("Create complement command count:{}, Create dependent complement command count:{}", createCount, dependentProcessDefinitionCreateCount); return createCount; } @@ -1157,7 +1156,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ try { dependentCommand = (Command) BeanUtils.cloneBean(command); } catch (Exception e) { - logger.error("Copy dependent command error.", e); + log.error("Copy dependent command error.", e); return dependentProcessDefinitionCreateCount; } @@ -1175,7 +1174,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ Map cmdParam = JSONUtils.toMap(dependentCommand.getCommandParam()); cmdParam.put(CMD_PARAM_START_NODES, String.valueOf(dependentProcessDefinition.getTaskDefinitionCode())); dependentCommand.setCommandParam(JSONUtils.toJsonString(cmdParam)); - logger.info("Creating complement dependent command, commandInfo:{}.", command); + log.info("Creating complement dependent command, commandInfo:{}.", command); dependentProcessDefinitionCreateCount += commandService.createCommand(dependentCommand); } @@ -1255,13 +1254,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ return false; } if (start.isAfter(end)) { - logger.error( + log.error( "Complement data parameter error, start time should be before end time, startDate:{}, endDate:{}.", start, end); return false; } } catch (Exception ex) { - logger.warn("Parse schedule time error, startDate:{}, endDate:{}.", startDate, endDate); + log.warn("Parse schedule time error, startDate:{}, endDate:{}.", startDate, endDate); return false; } } @@ -1289,7 +1288,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ public WorkflowExecuteDto queryExecutingWorkflowByProcessInstanceId(Integer processInstanceId) { ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId).orElse(null); if (processInstance == null) { - logger.error("Process instance does not exist, processInstanceId:{}.", processInstanceId); + log.error("Process instance does not exist, processInstanceId:{}.", processInstanceId); return null; } Host host = new Host(processInstance.getHost()); @@ -1298,7 +1297,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ org.apache.dolphinscheduler.remote.command.Command command = stateEventCallbackService.sendSync(host, requestCommand.convert2Command()); if (command == null) { - logger.error("Query executing process instance from master error, processInstanceId:{}.", + log.error("Query executing process instance from master error, processInstanceId:{}.", processInstanceId); return null; } @@ -1344,10 +1343,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ org.apache.dolphinscheduler.remote.command.Command response = stateEventCallbackService.sendSync(host, taskExecuteStartCommand.convert2Command()); if (response != null) { - logger.info("Send task execute start command complete, response is {}.", response); + log.info("Send task execute start command complete, response is {}.", response); putMsg(result, Status.SUCCESS); } else { - logger.error( + log.error( "Start to execute stream task instance error, projectCode:{}, taskDefinitionCode:{}, taskVersion:{}.", projectCode, taskDefinitionCode, taskDefinitionVersion); putMsg(result, Status.START_TASK_INSTANCE_ERROR); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java index 908d80cd2f..194feb9d77 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java @@ -43,8 +43,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -55,10 +55,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * k8s namespace service impl */ @Service +@Slf4j public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNamespaceService { - private static final Logger logger = LoggerFactory.getLogger(K8SNamespaceServiceImpl.class); - private static String resourceYaml = "apiVersion: v1\n" + "kind: ResourceQuota\n" + "metadata:\n" @@ -91,7 +90,7 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames public Result queryListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { Result result = new Result(); if (!isAdmin(loginUser)) { - logger.warn("Only admin can query namespace list, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can query namespace list, current login user name:{}.", loginUser.getUserName()); putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } @@ -125,43 +124,43 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames Integer limitsMemory) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { - logger.warn("Only admin can create K8s namespace, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can create K8s namespace, current login user name:{}.", loginUser.getUserName()); return result; } if (StringUtils.isEmpty(namespace)) { - logger.warn("Parameter namespace is empty."); + log.warn("Parameter namespace is empty."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.NAMESPACE); return result; } if (clusterCode == null) { - logger.warn("Parameter clusterCode is null."); + log.warn("Parameter clusterCode is null."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.CLUSTER); return result; } if (limitsCpu != null && limitsCpu < 0.0) { - logger.warn("Parameter limitsCpu is invalid."); + log.warn("Parameter limitsCpu is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_CPU); return result; } if (limitsMemory != null && limitsMemory < 0) { - logger.warn("Parameter limitsMemory is invalid."); + log.warn("Parameter limitsMemory is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_MEMORY); return result; } if (checkNamespaceExistInDb(namespace, clusterCode)) { - logger.warn("K8S namespace already exists."); + log.warn("K8S namespace already exists."); putMsg(result, Status.K8S_NAMESPACE_EXIST, namespace, clusterCode); return result; } Cluster cluster = clusterMapper.queryByClusterCode(clusterCode); if (cluster == null) { - logger.error("Cluster does not exist, clusterCode:{}", clusterCode); + log.error("Cluster does not exist, clusterCode:{}", clusterCode); putMsg(result, Status.CLUSTER_NOT_EXISTS, namespace, clusterCode); return result; } @@ -171,7 +170,7 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames code = CodeGenerateUtils.getInstance().genCode(); cluster.setCode(code); } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("Generate cluster code error.", e); + log.error("Generate cluster code error.", e); } if (code == 0L) { putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating cluster code"); @@ -198,14 +197,14 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames String yamlStr = genDefaultResourceYaml(k8sNamespaceObj); k8sClientService.upsertNamespaceAndResourceToK8s(k8sNamespaceObj, yamlStr); } catch (Exception e) { - logger.error("Namespace create to k8s error", e); + log.error("Namespace create to k8s error", e); putMsg(result, Status.K8S_CLIENT_OPS_ERROR, e.getMessage()); return result; } } k8sNamespaceMapper.insert(k8sNamespaceObj); - logger.info("K8s namespace create complete, namespace:{}.", k8sNamespaceObj.getNamespace()); + log.info("K8s namespace create complete, namespace:{}.", k8sNamespaceObj.getNamespace()); putMsg(result, Status.SUCCESS); return result; @@ -225,25 +224,25 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames Integer limitsMemory) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { - logger.warn("Only admin can update K8s namespace, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can update K8s namespace, current login user name:{}.", loginUser.getUserName()); return result; } if (limitsCpu != null && limitsCpu < 0.0) { - logger.warn("Parameter limitsCpu is invalid."); + log.warn("Parameter limitsCpu is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_CPU); return result; } if (limitsMemory != null && limitsMemory < 0) { - logger.warn("Parameter limitsMemory is invalid."); + log.warn("Parameter limitsMemory is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_MEMORY); return result; } K8sNamespace k8sNamespaceObj = k8sNamespaceMapper.selectById(id); if (k8sNamespaceObj == null) { - logger.error("K8s namespace does not exist, namespaceId:{}.", id); + log.error("K8s namespace does not exist, namespaceId:{}.", id); putMsg(result, Status.K8S_NAMESPACE_NOT_EXIST, id); return result; } @@ -258,14 +257,14 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames String yamlStr = genDefaultResourceYaml(k8sNamespaceObj); k8sClientService.upsertNamespaceAndResourceToK8s(k8sNamespaceObj, yamlStr); } catch (Exception e) { - logger.error("Namespace update to k8s error", e); + log.error("Namespace update to k8s error", e); putMsg(result, Status.K8S_CLIENT_OPS_ERROR, e.getMessage()); return result; } } // update to db k8sNamespaceMapper.updateById(k8sNamespaceObj); - logger.info("K8s namespace update complete, namespace:{}.", k8sNamespaceObj.getNamespace()); + log.info("K8s namespace update complete, namespace:{}.", k8sNamespaceObj.getNamespace()); putMsg(result, Status.SUCCESS); return result; } @@ -281,19 +280,19 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames public Result verifyNamespaceK8s(String namespace, Long clusterCode) { Result result = new Result<>(); if (StringUtils.isEmpty(namespace)) { - logger.warn("Parameter namespace is empty."); + log.warn("Parameter namespace is empty."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.NAMESPACE); return result; } if (clusterCode == null) { - logger.warn("Parameter clusterCode is null."); + log.warn("Parameter clusterCode is null."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.CLUSTER); return result; } if (checkNamespaceExistInDb(namespace, clusterCode)) { - logger.warn("K8S namespace already exists."); + log.warn("K8S namespace already exists."); putMsg(result, Status.K8S_NAMESPACE_EXIST, namespace, clusterCode); return result; } @@ -313,13 +312,13 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames public Map deleteNamespaceById(User loginUser, int id) { Map result = new HashMap<>(); if (isNotAdmin(loginUser, result)) { - logger.warn("Only admin can delete K8s namespace, current login user name:{}.", loginUser.getUserName()); + log.warn("Only admin can delete K8s namespace, current login user name:{}.", loginUser.getUserName()); return result; } K8sNamespace k8sNamespaceObj = k8sNamespaceMapper.selectById(id); if (k8sNamespaceObj == null) { - logger.error("K8s namespace does not exist, namespaceId:{}.", id); + log.error("K8s namespace does not exist, namespaceId:{}.", id); putMsg(result, Status.K8S_NAMESPACE_NOT_EXIST, id); return result; } @@ -327,13 +326,13 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames try { k8sClientService.deleteNamespaceToK8s(k8sNamespaceObj.getNamespace(), k8sNamespaceObj.getClusterCode()); } catch (RemotingException e) { - logger.error("Namespace delete in k8s error, namespaceId:{}.", id, e); + log.error("Namespace delete in k8s error, namespaceId:{}.", id, e); putMsg(result, Status.K8S_CLIENT_OPS_ERROR, id); return result; } } k8sNamespaceMapper.deleteById(id); - logger.info("K8s namespace delete complete, namespace:{}.", k8sNamespaceObj.getNamespace()); + log.info("K8s namespace delete complete, namespace:{}.", k8sNamespaceObj.getNamespace()); putMsg(result, Status.SUCCESS); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java index 227f74c12d..4fe9e96892 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java @@ -42,8 +42,8 @@ import org.apache.commons.lang3.StringUtils; import java.nio.charset.StandardCharsets; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -53,10 +53,9 @@ import com.google.common.primitives.Bytes; * logger service impl */ @Service +@Slf4j public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService { - private static final Logger logger = LoggerFactory.getLogger(LoggerServiceImpl.class); - private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s"; @Autowired @@ -90,11 +89,11 @@ public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService TaskInstance taskInstance = taskInstanceDao.findTaskInstanceById(taskInstId); if (taskInstance == null) { - logger.error("Task instance does not exist, taskInstanceId:{}.", taskInstId); + log.error("Task instance does not exist, taskInstanceId:{}.", taskInstId); return Result.error(Status.TASK_INSTANCE_NOT_FOUND); } if (StringUtils.isBlank(taskInstance.getHost())) { - logger.error("Host of task instance is null, taskInstanceId:{}.", taskInstId); + log.error("Host of task instance is null, taskInstanceId:{}.", taskInstId); return Result.error(Status.TASK_INSTANCE_HOST_IS_NULL); } Project project = projectMapper.queryProjectByTaskInstanceId(taskInstId); @@ -200,7 +199,7 @@ public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService private String queryLog(TaskInstance taskInstance, int skipLineNum, int limit) { Host host = Host.of(taskInstance.getHost()); - logger.info("Query task instance log, taskInstanceId:{}, taskInstanceName:{}, host:{}, logPath:{}, port:{}", + log.info("Query task instance log, taskInstanceId:{}, taskInstanceName:{}, host:{}, logPath:{}, port:{}", taskInstance.getId(), taskInstance.getName(), host.getIp(), taskInstance.getLogPath(), host.getPort()); StringBuilder log = new StringBuilder(); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java index 2b13f4d5c9..2a0b89c6e1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java @@ -34,8 +34,8 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -45,10 +45,9 @@ import com.google.common.collect.Sets; * monitor service impl */ @Service +@Slf4j public class MonitorServiceImpl extends BaseServiceImpl implements MonitorService { - public static final Logger logger = LoggerFactory.getLogger(MonitorServiceImpl.class); - @Autowired private MonitorDBDao monitorDBDao; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java index 5afc8b32ea..c7e36efb3a 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java @@ -156,9 +156,8 @@ import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.http.MediaType; @@ -177,10 +176,9 @@ import com.google.common.collect.Lists; * process definition service impl */ @Service +@Slf4j public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements ProcessDefinitionService { - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class); - private static final String RELEASESTATE = "releaseState"; @Autowired @@ -292,13 +290,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); throw new ServiceException(Status.DESCRIPTION_TOO_LONG_ERROR); } // check whether the new process define name exist ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name); if (definition != null) { - logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", + log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", definition.getName(), definition.getCode()); throw new ServiceException(Status.PROCESS_DEFINITION_NAME_EXIST, name); } @@ -308,7 +306,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro if (!Constants.DEFAULT.equals(tenantCode)) { Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); if (tenant == null) { - logger.error("Tenant does not exist."); + log.error("Tenant does not exist."); throw new ServiceException(Status.TENANT_NOT_EXIST); } tenantId = tenant.getId(); @@ -406,29 +404,29 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE); if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) { - logger.info("The task has not changed, so skip"); + log.info("The task has not changed, so skip"); } if (saveTaskResult == Constants.DEFINITION_FAILURE) { - logger.error("Save task definition error."); + log.error("Save task definition error."); throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); } int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); if (insertVersion == 0) { - logger.error("Save process definition error, processCode:{}.", processDefinition.getCode()); + log.error("Save process definition error, processCode:{}.", processDefinition.getCode()); throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR); } else { - logger.info("Save process definition complete, processCode:{}, processVersion:{}.", + log.info("Save process definition complete, processCode:{}, processVersion:{}.", processDefinition.getCode(), insertVersion); } int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE); if (insertResult != Constants.EXIT_CODE_SUCCESS) { - logger.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", + log.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR); } else { - logger.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", + log.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); } @@ -443,7 +441,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro try { List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); if (CollectionUtils.isEmpty(taskDefinitionLogs)) { - logger.error("Generate task definition list failed, the given taskDefinitionJson is invalided: {}", + log.error("Generate task definition list failed, the given taskDefinitionJson is invalided: {}", taskDefinitionJson); throw new ServiceException(Status.DATA_IS_NOT_VALID, taskDefinitionJson); } @@ -453,7 +451,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro .taskParams(taskDefinitionLog.getTaskParams()) .dependence(taskDefinitionLog.getDependence()) .build())) { - logger.error( + log.error( "Generate task definition list failed, the given task definition parameter is invalided, taskName: {}, taskDefinition: {}", taskDefinitionLog.getName(), taskDefinitionLog); throw new ServiceException(Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); @@ -463,7 +461,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } catch (ServiceException ex) { throw ex; } catch (Exception e) { - logger.error("Generate task definition list failed, meet an unknown exception", e); + log.error("Generate task definition list failed, meet an unknown exception", e); throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR); } } @@ -474,7 +472,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List taskRelationList = JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class); if (CollectionUtils.isEmpty(taskRelationList)) { - logger.error("Generate task relation list failed the taskRelation list is empty, taskRelationJson: {}", + log.error("Generate task relation list failed the taskRelation list is empty, taskRelationJson: {}", taskRelationJson); throw new ServiceException(Status.DATA_IS_NOT_VALID); } @@ -490,19 +488,19 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Collection codes = CollectionUtils.subtract(postTaskCodes, taskNodeCodes); if (CollectionUtils.isNotEmpty(codes)) { String taskCodes = StringUtils.join(codes, Constants.COMMA); - logger.error("Task definitions do not exist, taskCodes:{}.", taskCodes); + log.error("Task definitions do not exist, taskCodes:{}.", taskCodes); throw new ServiceException(Status.TASK_DEFINE_NOT_EXIST, taskCodes); } } if (graphHasCycle(taskNodeList)) { - logger.error("Process DAG has cycle."); + log.error("Process DAG has cycle."); throw new ServiceException(Status.PROCESS_NODE_HAS_CYCLE); } // check whether the task relation json is normal for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) { if (processTaskRelationLog.getPostTaskCode() == 0) { - logger.error("The post_task_code or post_task_version of processTaskRelationLog can not be zero, " + + log.error("The post_task_code or post_task_version of processTaskRelationLog can not be zero, " + "processTaskRelationLogId:{}.", processTaskRelationLog.getId()); throw new ServiceException(Status.CHECK_PROCESS_TASK_RELATION_ERROR); } @@ -511,7 +509,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } catch (ServiceException ex) { throw ex; } catch (Exception e) { - logger.error("Check task relation list error, meet an unknown exception, given taskRelationJson: {}", + log.error("Check task relation list error, meet an unknown exception, given taskRelationJson: {}", taskRelationJson, e); throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR); } @@ -686,7 +684,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processCode:{}.", code); + log.error("Process definition does not exist, processCode:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); } else { Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId()); @@ -738,7 +736,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, name); if (processDefinition == null) { - logger.error("Process definition does not exist, projectCode:{}.", projectCode); + log.error("Process definition does not exist, projectCode:{}.", projectCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, name); } else { DagData dagData = processService.genDagData(processDefinition); @@ -790,7 +788,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -801,7 +799,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro if (!Constants.DEFAULT.equals(tenantCode)) { Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); if (tenant == null) { - logger.error("Tenant does not exist."); + log.error("Tenant does not exist."); putMsg(result, Status.TENANT_NOT_EXIST); return result; } @@ -811,13 +809,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); // check process definition exists if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processCode:{}.", code); + log.error("Process definition does not exist, processCode:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { // online can not permit edit - logger.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.", + log.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.", ReleaseState.ONLINE.getDescp(), processDefinition.getCode()); putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); return result; @@ -826,7 +824,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro // check whether the new process define name exist ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name); if (definition != null) { - logger.warn("Process definition with the same name already exists, processDefinitionCode:{}.", + log.warn("Process definition with the same name already exists, processDefinitionCode:{}.", definition.getCode()); putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name); return result; @@ -865,7 +863,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro taskDepMsg.ifPresent(sb::append); } if (sb.length() != 0) { - logger.error("Task cannot be deleted because it is dependent"); + log.error("Task cannot be deleted because it is dependent"); throw new ServiceException(sb.toString()); } } @@ -881,10 +879,10 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(), taskDefinitionLogs, Boolean.TRUE); if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) { - logger.info("The task has not changed, so skip"); + log.info("The task has not changed, so skip"); } if (saveTaskResult == Constants.DEFINITION_FAILURE) { - logger.error("Update task definitions error, projectCode:{}, processCode:{}.", + log.error("Update task definitions error, projectCode:{}, processCode:{}.", processDefinition.getProjectCode(), processDefinition.getCode()); putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); @@ -911,17 +909,17 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro isChange = true; } if (isChange) { - logger.info("Process definition needs to be updated, projectCode:{}, processCode:{}, processVersion:{}.", + log.info("Process definition needs to be updated, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion()); processDefinition.setUpdateTime(new Date()); int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); if (insertVersion <= 0) { - logger.error("Update process definition error, processCode:{}.", processDefinition.getCode()); + log.error("Update process definition error, processCode:{}.", processDefinition.getCode()); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } else { - logger.info("Update process definition complete, processCode:{}, processVersion:{}.", + log.info("Update process definition complete, processCode:{}, processVersion:{}.", processDefinition.getCode(), insertVersion); } @@ -929,20 +927,20 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE); if (insertResult == Constants.EXIT_CODE_SUCCESS) { - logger.info( + log.info( "Update process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); } else { - logger.error("Update process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", + log.error("Update process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } saveOtherRelation(loginUser, processDefinition, result, otherParamsJson); } else { - logger.info( + log.info( "Process definition does not need to be updated because there is no change, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion()); putMsg(result, Status.SUCCESS); @@ -979,7 +977,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro putMsg(result, Status.SUCCESS); return result; } - logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", + log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", processDefinition.getName(), processDefinition.getCode()); putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name.trim()); return result; @@ -990,7 +988,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro public Map batchDeleteProcessDefinitionByCodes(User loginUser, long projectCode, String codes) { Map result = new HashMap<>(); if (StringUtils.isEmpty(codes)) { - logger.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode); + log.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode); putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY); return result; } @@ -1005,7 +1003,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro definitionCodes.stream().filter(code -> !queryCodes.contains(code)).collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(diffCode)) { - logger.error("Process definition does not exist, processCodes:{}.", + log.error("Process definition does not exist, processCodes:{}.", diffCode.stream().map(String::valueOf).collect(Collectors.joining(Constants.COMMA))); throw new ServiceException(Status.BATCH_DELETE_PROCESS_DEFINE_BY_CODES_ERROR, diffCode.stream().map(code -> code + "[process definition not exist]") @@ -1099,7 +1097,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro // we delete the workflow definition at last to avoid using transaction here. // If delete error, we can call this interface again. processDefinitionDao.deleteByWorkflowDefinitionCode(processDefinition.getCode()); - logger.info("Success delete workflow definition workflowDefinitionCode: {}", code); + log.info("Success delete workflow definition workflowDefinitionCode: {}", code); } /** @@ -1131,7 +1129,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", code); + log.error("Process definition does not exist, processDefinitionCode:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } @@ -1140,13 +1138,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List relationList = processService.findRelationByCode(code, processDefinition.getVersion()); if (CollectionUtils.isEmpty(relationList)) { - logger.warn("Process definition has no task relation, processDefinitionCode:{}.", code); + log.warn("Process definition has no task relation, processDefinitionCode:{}.", code); putMsg(result, Status.PROCESS_DAG_IS_EMPTY); return result; } processDefinition.setReleaseState(releaseState); processDefinitionMapper.updateById(processDefinition); - logger.info("Set process definition online, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.info("Set process definition online, projectCode:{}, processDefinitionCode:{}.", projectCode, code); break; case OFFLINE: @@ -1154,20 +1152,20 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int updateProcess = processDefinitionMapper.updateById(processDefinition); Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(code); if (updateProcess > 0) { - logger.info("Set process definition offline, projectCode:{}, processDefinitionCode:{}.", + log.info("Set process definition offline, projectCode:{}, processDefinitionCode:{}.", projectCode, code); if (schedule != null) { // set status schedule.setReleaseState(releaseState); int updateSchedule = scheduleMapper.updateById(schedule); if (updateSchedule == 0) { - logger.error( + log.error( "Set schedule offline error, projectCode:{}, processDefinitionCode:{}, scheduleId:{}", projectCode, code, schedule.getId()); putMsg(result, Status.OFFLINE_SCHEDULE_ERROR); throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR); } else { - logger.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}", + log.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}", projectCode, code, schedule.getId()); } schedulerService.deleteSchedule(project.getId(), schedule.getId()); @@ -1190,7 +1188,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro public void batchExportProcessDefinitionByCodes(User loginUser, long projectCode, String codes, HttpServletResponse response) { if (StringUtils.isEmpty(codes)) { - logger.warn("Process definition codes to be exported is empty."); + log.warn("Process definition codes to be exported is empty."); return; } Project project = projectMapper.queryByCode(projectCode); @@ -1204,7 +1202,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro .collect(Collectors.toSet()); List processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet); if (CollectionUtils.isEmpty(processDefinitionList)) { - logger.error("Process definitions to be exported do not exist, processDefinitionCodes:{}.", defineCodeSet); + log.error("Process definitions to be exported do not exist, processDefinitionCodes:{}.", defineCodeSet); return; } // check processDefinition exist in project @@ -1213,10 +1211,10 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List dagDataSchedules = processDefinitionListInProject.stream().map(this::exportProcessDagData).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(dagDataSchedules)) { - logger.info("Start download process definition file, processDefinitionCodes:{}.", defineCodeSet); + log.info("Start download process definition file, processDefinitionCodes:{}.", defineCodeSet); downloadProcessDefinitionFile(response, dagDataSchedules); } else { - logger.error("There is no exported process dag data."); + log.error("There is no exported process dag data."); } } @@ -1234,20 +1232,20 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro buff.flush(); buff.close(); } catch (IOException e) { - logger.warn("Export process definition fail", e); + log.warn("Export process definition fail", e); } finally { if (null != buff) { try { buff.close(); } catch (Exception e) { - logger.warn("Buffer does not close", e); + log.warn("Buffer does not close", e); } } if (null != out) { try { out.close(); } catch (Exception e) { - logger.warn("Output stream does not close", e); + log.warn("Output stream does not close", e); } } } @@ -1290,7 +1288,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } // check file content if (CollectionUtils.isEmpty(dagDataScheduleList)) { - logger.warn("Process definition file content is empty."); + log.warn("Process definition file content is empty."); putMsg(result, Status.DATA_IS_NULL, "fileContent"); return result; } @@ -1408,7 +1406,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro dataSource = queryDatasourceByNameAndUser(datasourceName, loginUser); } if (dataSource == null) { - logger.error("Datasource does not found, may be its name is illegal."); + log.error("Datasource does not found, may be its name is illegal."); putMsg(result, Status.DATASOURCE_NAME_ILLEGAL); return result; } @@ -1433,7 +1431,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } } } catch (Exception e) { - logger.error("Import process definition error.", e); + log.error("Import process definition error.", e); putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); return result; } @@ -1538,7 +1536,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro try { processDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); } catch (CodeGenerateException e) { - logger.error( + log.error( "Save process definition error because generate process definition code error, projectCode:{}.", projectCode, e); putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR); @@ -1563,7 +1561,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro taskCodeMap.put(taskDefinitionLog.getCode(), code); taskDefinitionLog.setCode(code); } catch (CodeGenerateException e) { - logger.error("Generate task definition code error, projectCode:{}, processDefinitionCode:{}", + log.error("Generate task definition code error, projectCode:{}, processDefinitionCode:{}", projectCode, processDefinition.getCode(), e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code"); return false; @@ -1573,7 +1571,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int insert = taskDefinitionMapper.batchInsert(taskDefinitionLogList); int logInsert = taskDefinitionLogMapper.batchInsert(taskDefinitionLogList); if ((logInsert & insert) == 0) { - logger.error("Save task definition error, projectCode:{}, processDefinitionCode:{}", projectCode, + log.error("Save task definition error, projectCode:{}, processDefinitionCode:{}", projectCode, processDefinition.getCode()); putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); @@ -1617,7 +1615,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro putMsg(createDagResult, Status.SUCCESS); } else { result.putAll(createDagResult); - logger.error("Import process definition error, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Import process definition error, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinition.getCode()); throw new ServiceException(Status.IMPORT_PROCESS_DEFINE_ERROR); } @@ -1631,7 +1629,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro schedule.setUpdateTime(now); int scheduleInsert = scheduleMapper.insert(schedule); if (0 == scheduleInsert) { - logger.error( + log.error( "Import process definition error due to save schedule fail, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinition.getCode()); putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR); @@ -1639,7 +1637,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } } - logger.info("Import process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.info("Import process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinition.getCode()); return true; } @@ -1649,17 +1647,17 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro */ private boolean checkImportanceParams(DagDataSchedule dagDataSchedule, Map result) { if (dagDataSchedule.getProcessDefinition() == null) { - logger.warn("Process definition is null."); + log.warn("Process definition is null."); putMsg(result, Status.DATA_IS_NULL, "ProcessDefinition"); return false; } if (CollectionUtils.isEmpty(dagDataSchedule.getTaskDefinitionList())) { - logger.warn("Task definition list is null."); + log.warn("Task definition list is null."); putMsg(result, Status.DATA_IS_NULL, "TaskDefinitionList"); return false; } if (CollectionUtils.isEmpty(dagDataSchedule.getProcessTaskRelationList())) { - logger.warn("Process task relation list is null."); + log.warn("Process task relation list is null."); putMsg(result, Status.DATA_IS_NULL, "ProcessTaskRelationList"); return false; } @@ -1694,7 +1692,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Map result = new HashMap<>(); try { if (processTaskRelationJson == null) { - logger.error("Process task relation data is null."); + log.error("Process task relation data is null."); putMsg(result, Status.DATA_IS_NOT_VALID, processTaskRelationJson); return result; } @@ -1705,14 +1703,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List taskNodes = processService.transformTask(taskRelationList, taskDefinitionLogsList); if (CollectionUtils.isEmpty(taskNodes)) { - logger.error("Task node data is empty."); + log.error("Task node data is empty."); putMsg(result, Status.PROCESS_DAG_IS_EMPTY); return result; } // check has cycle if (graphHasCycle(taskNodes)) { - logger.error("Process DAG has cycle."); + log.error("Process DAG has cycle."); putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); return result; } @@ -1725,7 +1723,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro .dependence(taskNode.getDependence()) .switchResult(taskNode.getSwitchResult()) .build())) { - logger.error("Task node {} parameter invalid.", taskNode.getName()); + log.error("Task node {} parameter invalid.", taskNode.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); return result; } @@ -1737,7 +1735,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } catch (Exception e) { result.put(Constants.STATUS, Status.INTERNAL_SERVER_ERROR_ARGS); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage()); - logger.error(Status.INTERNAL_SERVER_ERROR_ARGS.getMsg(), e); + log.error(Status.INTERNAL_SERVER_ERROR_ARGS.getMsg(), e); } return result; } @@ -1760,7 +1758,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", code); + log.error("Process definition does not exist, processDefinitionCode:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } @@ -1792,7 +1790,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro .collect(Collectors.toSet()); List processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet); if (CollectionUtils.isEmpty(processDefinitionList)) { - logger.error("Process definitions do not exist, codes:{}.", defineCodeSet); + log.error("Process definitions do not exist, codes:{}.", defineCodeSet); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes); return result; } @@ -1806,7 +1804,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro if (CollectionUtils.isEmpty(processDefinitionListInProject)) { Set codesInProject = processDefinitionListInProject.stream() .map(ProcessDefinition::getCode).collect(Collectors.toSet()); - logger.error("Process definitions do not exist in project, projectCode:{}, processDefinitionsCodes:{}.", + log.error("Process definitions do not exist in project, projectCode:{}, processDefinitionsCodes:{}.", processDefinitionListInProject.get(0).getProjectCode(), codesInProject); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes); return result; @@ -1924,7 +1922,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (null == processDefinition || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, code:{}.", code); + log.error("Process definition does not exist, code:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } @@ -2113,7 +2111,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } if (projectCode == targetProjectCode) { - logger.warn("Project code is same as target project code, projectCode:{}.", projectCode); + log.warn("Project code is same as target project code, projectCode:{}.", projectCode); return result; } @@ -2135,7 +2133,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } if (StringUtils.isEmpty(processDefinitionCodes)) { - logger.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode); + log.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode); putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY, processDefinitionCodes); return result; } @@ -2176,7 +2174,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro processDefinition.setProjectCode(targetProjectCode); String otherParamsJson = doOtherOperateProcess(loginUser, processDefinition); if (isCopy) { - logger.info("Copy process definition..."); + log.info("Copy process definition..."); List taskDefinitionLogs = taskDefinitionLogDao.getTaskDefineLogList(processTaskRelations); Map taskCodeMap = new HashMap<>(); @@ -2186,7 +2184,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro taskCodeMap.put(taskDefinitionLog.getCode(), taskCode); taskDefinitionLog.setCode(taskCode); } catch (CodeGenerateException e) { - logger.error("Generate task definition code error, projectCode:{}.", targetProjectCode, e); + log.error("Generate task definition code error, projectCode:{}.", targetProjectCode, e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS); } @@ -2207,7 +2205,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro try { processDefinition.setCode(CodeGenerateUtils.getInstance().genCode()); } catch (CodeGenerateException e) { - logger.error("Generate process definition code error, projectCode:{}.", targetProjectCode, e); + log.error("Generate process definition code error, projectCode:{}.", targetProjectCode, e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS); } @@ -2238,7 +2236,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro scheduleObj.setUpdateTime(date); int insertResult = scheduleMapper.insert(scheduleObj); if (insertResult != 1) { - logger.error("Schedule create error, processDefinitionCode:{}.", processDefinition.getCode()); + log.error("Schedule create error, processDefinitionCode:{}.", processDefinition.getCode()); putMsg(result, Status.CREATE_SCHEDULE_ERROR); throw new ServiceException(Status.CREATE_SCHEDULE_ERROR); } @@ -2247,18 +2245,18 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro result.putAll(createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs, otherParamsJson)); } catch (Exception e) { - logger.error("Copy process definition error, processDefinitionCode from {} to {}.", + log.error("Copy process definition error, processDefinitionCode from {} to {}.", oldProcessDefinitionCode, processDefinition.getCode(), e); putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.COPY_PROCESS_DEFINITION_ERROR); } } else { - logger.info("Move process definition..."); + log.info("Move process definition..."); try { result.putAll(updateDagDefine(loginUser, taskRelationList, processDefinition, null, Lists.newArrayList(), otherParamsJson)); } catch (Exception e) { - logger.error("Move process definition error, processDefinitionCode:{}.", + log.error("Move process definition error, processDefinitionCode:{}.", processDefinition.getCode(), e); putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.MOVE_PROCESS_DEFINITION_ERROR); @@ -2315,7 +2313,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) { - logger.error( + log.error( "Switch process definition error because it does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, code); putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR, code); @@ -2325,7 +2323,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinitionLog processDefinitionLog = processDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, version); if (Objects.isNull(processDefinitionLog)) { - logger.error( + log.error( "Switch process definition error because version does not exist, projectCode:{}, processDefinitionCode:{}, version:{}.", projectCode, code, version); putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR, @@ -2334,13 +2332,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } int switchVersion = processService.switchVersion(processDefinition, processDefinitionLog); if (switchVersion <= 0) { - logger.error( + log.error( "Switch process definition version error, projectCode:{}, processDefinitionCode:{}, version:{}.", projectCode, code, version); putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR); throw new ServiceException(Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR); } - logger.info("Switch process definition version complete, projectCode:{}, processDefinitionCode:{}, version:{}.", + log.info("Switch process definition version complete, projectCode:{}, processDefinitionCode:{}, version:{}.", projectCode, code, version); putMsg(result, Status.SUCCESS); return result; @@ -2360,18 +2358,18 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro if (!failedProcessList.isEmpty()) { String failedProcess = String.join(",", failedProcessList); if (isCopy) { - logger.error( + log.error( "Copy process definition error, srcProjectCode:{}, targetProjectCode:{}, failedProcessList:{}.", srcProjectCode, targetProjectCode, failedProcess); putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, failedProcess); } else { - logger.error( + log.error( "Move process definition error, srcProjectCode:{}, targetProjectCode:{}, failedProcessList:{}.", srcProjectCode, targetProjectCode, failedProcess); putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, failedProcess); } } else { - logger.info("Batch {} process definition complete, srcProjectCode:{}, targetProjectCode:{}.", + log.info("Batch {} process definition complete, srcProjectCode:{}, targetProjectCode:{}.", isCopy ? "copy" : "move", srcProjectCode, targetProjectCode); putMsg(result, Status.SUCCESS); } @@ -2438,11 +2436,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, code:{}.", code); + log.error("Process definition does not exist, code:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); } else { if (processDefinition.getVersion() == version) { - logger.warn( + log.warn( "Process definition can not be deleted due to version is being used, projectCode:{}, processDefinitionCode:{}, version:{}.", projectCode, code, version); putMsg(result, Status.MAIN_TABLE_USING_VERSION); @@ -2451,14 +2449,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int deleteLog = processDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version); int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(code, version); if (deleteLog == 0 || deleteRelationLog == 0) { - logger.error( + log.error( "Delete process definition version error, projectCode:{}, processDefinitionCode:{}, version:{}.", projectCode, code, version); putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR); } deleteOtherRelation(project, result, processDefinition); - logger.info( + log.info( "Delete process definition version complete, projectCode:{}, processDefinitionCode:{}, version:{}.", projectCode, code, version); putMsg(result, Status.SUCCESS); @@ -2499,14 +2497,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } // check whether the new process define name exist ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name); if (definition != null) { - logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", + log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", definition.getName(), definition.getCode()); putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name); return result; @@ -2516,7 +2514,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro if (!Constants.DEFAULT.equals(tenantCode)) { Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); if (tenant == null) { - logger.error("Tenant does not exist."); + log.error("Tenant does not exist."); putMsg(result, Status.TENANT_NOT_EXIST); return result; } @@ -2526,7 +2524,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro try { processDefinitionCode = CodeGenerateUtils.getInstance().genCode(); } catch (CodeGenerateException e) { - logger.error("Generate process definition code error, projectCode:{}.", projectCode, e); + log.error("Generate process definition code error, projectCode:{}.", projectCode, e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS); return result; } @@ -2536,7 +2534,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro processDefinition.setExecutionType(executionType); result = createEmptyDagDefine(loginUser, processDefinition); if (result.get(Constants.STATUS) != Status.SUCCESS) { - logger.error("Create empty process definition error."); + log.error("Create empty process definition error."); return result; } @@ -2558,7 +2556,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Map result = new HashMap<>(); int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); if (insertVersion == 0) { - logger.error("Save process definition error, processDefinitionCode:{}.", processDefinition.getCode()); + log.error("Save process definition error, processDefinitionCode:{}.", processDefinition.getCode()); putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR); } @@ -2578,13 +2576,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro Date now = new Date(); scheduleObj.setProcessDefinitionCode(processDefinition.getCode()); if (DateUtils.differSec(scheduleObj.getStartTime(), scheduleObj.getEndTime()) == 0) { - logger.warn("The schedule start time must not be the same as the end, processDefinitionCode:{}.", + log.warn("The schedule start time must not be the same as the end, processDefinitionCode:{}.", processDefinition.getCode()); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } if (!org.quartz.CronExpression.isValidExpression(scheduleObj.getCrontab())) { - logger.error("CronExpression verify failure, cron:{}.", scheduleObj.getCrontab()); + log.error("CronExpression verify failure, cron:{}.", scheduleObj.getCrontab()); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleObj.getCrontab()); return result; } @@ -2647,7 +2645,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro return result; } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -2655,7 +2653,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro if (!Constants.DEFAULT.equals(tenantCode)) { Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); if (tenant == null) { - logger.error("Tenant does not exist."); + log.error("Tenant does not exist."); putMsg(result, Status.TENANT_NOT_EXIST); return result; } @@ -2665,13 +2663,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); // check process definition exists if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, code:{}.", code); + log.error("Process definition does not exist, code:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { // online can not permit edit - logger.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.", + log.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.", ReleaseState.ONLINE.getDescp(), processDefinition.getCode()); putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); return result; @@ -2680,7 +2678,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro // check whether the new process define name exist ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name); if (definition != null) { - logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", + log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.", definition.getName(), definition.getCode()); putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name); return result; @@ -2695,7 +2693,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro result = updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy, Lists.newArrayList(), otherParamsJson); if (result.get(Constants.STATUS) != Status.SUCCESS) { - logger.error("Update process definition basic info error."); + log.error("Update process definition basic info error."); return result; } @@ -2768,7 +2766,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro } int insertVersion = this.saveProcessDefine(loginUser, processDefinitionUpdate); if (insertVersion == 0) { - logger.error("Update process definition error, projectCode:{}, processDefinitionName:{}.", + log.error("Update process definition error, projectCode:{}, processDefinitionName:{}.", processDefinitionUpdate.getCode(), processDefinitionUpdate.getName()); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); @@ -2776,11 +2774,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro int insertRelationVersion = this.saveTaskRelation(loginUser, processDefinitionUpdate, insertVersion); if (insertRelationVersion != Constants.EXIT_CODE_SUCCESS) { - logger.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", + log.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR); } - logger.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", + log.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); processDefinitionUpdate.setVersion(insertVersion); return processDefinitionUpdate; @@ -2859,7 +2857,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro boolean isSame = CollectionUtils.isEqualCollection(processTaskRelationSet, taskRelationSet); if (isSame) { - logger.info("process task relations is non-existent, projectCode:{}, processCode:{}.", + log.info("process task relations is non-existent, projectCode:{}, processCode:{}.", processDefinition.getProjectCode(), processDefinition.getCode()); return Constants.EXIT_CODE_SUCCESS; } @@ -2939,13 +2937,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (processDefinition == null) { - logger.error("Process definition does not exist, code:{}.", code); + log.error("Process definition does not exist, code:{}.", code); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code); if (scheduleObj == null) { - logger.error("Schedule cron does not exist, processDefinitionCode:{}.", code); + log.error("Schedule cron does not exist, processDefinitionCode:{}.", code); putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, "processDefinitionCode:" + code); return result; } @@ -2954,7 +2952,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro List relationList = processService.findRelationByCode(code, processDefinition.getVersion()); if (CollectionUtils.isEmpty(relationList)) { - logger.warn("Process definition has no task relation, processDefinitionCode:{}.", code); + log.warn("Process definition has no task relation, processDefinitionCode:{}.", code); putMsg(result, Status.PROCESS_DAG_IS_EMPTY); return result; } @@ -2966,13 +2964,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro processDefinition.setReleaseState(releaseState); int updateProcess = processDefinitionMapper.updateById(processDefinition); if (updateProcess > 0) { - logger.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", + log.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", projectCode, code, scheduleObj.getId()); // set status scheduleObj.setReleaseState(ReleaseState.OFFLINE); int updateSchedule = scheduleMapper.updateById(scheduleObj); if (updateSchedule == 0) { - logger.error( + log.error( "Set schedule offline error, projectCode:{}, processDefinitionCode:{}, scheduleId:{}", projectCode, code, scheduleObj.getId()); putMsg(result, Status.OFFLINE_SCHEDULE_ERROR); @@ -3035,7 +3033,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code); if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, code); putMsg(result, PROCESS_DEFINE_NOT_EXIST, code); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java index af4d9ab96e..9d151e528f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java @@ -102,8 +102,8 @@ import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; @@ -117,10 +117,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * process instance service impl */ @Service +@Slf4j public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService { - private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceImpl.class); - public static final String TASK_TYPE = "taskType"; public static final String LOCAL_PARAMS_LIST = "localParamsList"; @@ -263,7 +262,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce processInstance.getProcessDefinitionVersion()); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, projectCode:{}.", projectCode); + log.error("Process definition does not exist, projectCode:{}.", projectCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId); } else { Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId()); @@ -468,7 +467,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); if (processDefinition != null && projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, projectCode:{}, processDefinitionId:{}.", projectCode, + log.error("Process definition does not exist, projectCode:{}, processDefinitionId:{}.", projectCode, processId); putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processId); return result; @@ -491,7 +490,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce private void addDependResultForTaskList(User loginUser, List taskInstanceList) throws IOException { for (TaskInstance taskInstance : taskInstanceList) { if (TASK_TYPE_DEPENDENT.equalsIgnoreCase(taskInstance.getTaskType())) { - logger.info("DEPENDENT type task instance need to set dependent result, taskCode:{}, taskInstanceId:{}", + log.info("DEPENDENT type task instance need to set dependent result, taskCode:{}, taskInstanceId:{}", taskInstance.getTaskCode(), taskInstance.getId()); Result logResult = loggerService.queryLog(loginUser, taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT); @@ -505,14 +504,14 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce } @Override - public Map parseLogForDependentResult(String log) throws IOException { + public Map parseLogForDependentResult(String content) throws IOException { Map resultMap = new HashMap<>(); - if (StringUtils.isEmpty(log)) { - logger.warn("Log content is empty."); + if (StringUtils.isEmpty(content)) { + log.warn("Log content is empty."); return resultMap; } - BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( + BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(content.getBytes( StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { @@ -555,21 +554,21 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce TaskInstance taskInstance = taskInstanceDao.findTaskInstanceById(taskId); if (taskInstance == null) { - logger.error("Task instance does not exist, projectCode:{}, taskInstanceId{}.", projectCode, taskId); + log.error("Task instance does not exist, projectCode:{}, taskInstanceId{}.", projectCode, taskId); putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); return result; } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskInstance.getTaskCode()); if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { - logger.error("Task definition does not exist, projectCode:{}, taskDefinitionCode:{}.", projectCode, + log.error("Task definition does not exist, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskInstance.getTaskCode()); putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); return result; } if (!taskInstance.isSubProcess()) { - logger.warn("Task instance is not {} type instance, projectCode:{}, taskInstanceId:{}.", + log.warn("Task instance is not {} type instance, projectCode:{}, taskInstanceId:{}.", TASK_TYPE_SUB_PROCESS, projectCode, taskId); putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); return result; @@ -578,7 +577,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessInstance subWorkflowInstance = processService.findSubProcessInstance( taskInstance.getProcessInstanceId(), taskInstance.getId()); if (subWorkflowInstance == null) { - logger.error("SubProcess instance does not exist, projectCode:{}, taskInstanceId:{}.", projectCode, + log.error("SubProcess instance does not exist, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstance.getId()); putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); return result; @@ -628,14 +627,14 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessDefinition processDefinition0 = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); if (processDefinition0 != null && projectCode != processDefinition0.getProjectCode()) { - logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, processInstance.getProcessDefinitionCode()); putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; } // check process instance status if (!processInstance.getState().isFinished()) { - logger.warn("Process Instance state is {} so can not update process instance, processInstanceId:{}.", + log.warn("Process Instance state is {} so can not update process instance, processInstanceId:{}.", processInstance.getState().getDesc(), processInstanceId); putMsg(result, PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), processInstance.getState().toString(), "update"); @@ -654,7 +653,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce setProcessInstance(processInstance, tenantCode, scheduleTime, globalParams, timeout, timezoneId); List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); if (taskDefinitionLogs.isEmpty()) { - logger.warn("Parameter taskDefinitionJson is empty"); + log.warn("Parameter taskDefinitionJson is empty"); putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson); return result; } @@ -664,14 +663,14 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce .taskParams(taskDefinitionLog.getTaskParams()) .dependence(taskDefinitionLog.getDependence()) .build())) { - logger.error("Task parameters are invalid, taskDefinitionName:{}.", taskDefinitionLog.getName()); + log.error("Task parameters are invalid, taskDefinitionName:{}.", taskDefinitionLog.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); return result; } } int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, syncDefine); if (saveTaskResult == Constants.DEFINITION_FAILURE) { - logger.error("Update task definition error, projectCode:{}, processInstanceId:{}", projectCode, + log.error("Update task definition error, projectCode:{}, processInstanceId:{}", projectCode, processInstanceId); putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); @@ -689,7 +688,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce if (!Constants.DEFAULT.equals(tenantCode)) { Tenant tenant = tenantMapper.queryByTenantCode(tenantCode); if (tenant == null) { - logger.error("Tenant does not exist."); + log.error("Tenant does not exist."); putMsg(result, Status.TENANT_NOT_EXIST); return result; } @@ -700,23 +699,23 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce processDefinition.setUpdateTime(new Date()); int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, syncDefine, Boolean.FALSE); if (insertVersion == 0) { - logger.error("Update process definition error, projectCode:{}, processDefinitionName:{}.", projectCode, + log.error("Update process definition error, projectCode:{}, processDefinitionName:{}.", projectCode, processDefinition.getName()); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } else - logger.info("Update process definition complete, projectCode:{}, processDefinitionName:{}.", projectCode, + log.info("Update process definition complete, projectCode:{}, processDefinitionName:{}.", projectCode, processDefinition.getName()); int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, syncDefine); if (insertResult == Constants.EXIT_CODE_SUCCESS) { - logger.info( + log.info( "Update task relations complete, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.", projectCode, processDefinition.getCode(), insertVersion); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); } else { - logger.info( + log.info( "Update task relations error, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.", projectCode, processDefinition.getCode(), insertVersion); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); @@ -725,13 +724,13 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce processInstance.setProcessDefinitionVersion(insertVersion); int update = processInstanceDao.updateProcessInstance(processInstance); if (update == 0) { - logger.error( + log.error( "Update process instance version error, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}", projectCode, processDefinition.getCode(), insertVersion); putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); throw new ServiceException(Status.UPDATE_PROCESS_INSTANCE_ERROR); } - logger.info( + log.info( "Update process instance complete, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}", projectCode, processDefinition.getCode(), insertVersion, processInstanceId); putMsg(result, Status.SUCCESS); @@ -780,7 +779,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId) .orElseThrow(() -> new ServiceException(PROCESS_INSTANCE_NOT_EXIST, subId)); if (subInstance.getIsSubProcess() == Flag.NO) { - logger.warn( + log.warn( "Process instance is not sub process instance type, processInstanceId:{}, processInstanceName:{}.", subId, subInstance.getName()); putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); @@ -789,7 +788,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId); if (parentWorkflowInstance == null) { - logger.error("Parent process instance does not exist, projectCode:{}, subProcessInstanceId:{}.", + log.error("Parent process instance does not exist, projectCode:{}, subProcessInstanceId:{}.", projectCode, subId); putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); return result; @@ -822,7 +821,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ApiFuncIdentificationConstant.INSTANCE_DELETE); // check process instance status if (!processInstance.getState().isFinished()) { - logger.warn("Process Instance state is {} so can not delete process instance, processInstanceId:{}.", + log.warn("Process Instance state is {} so can not delete process instance, processInstanceId:{}.", processInstance.getState().getDesc(), processInstanceId); throw new ServiceException(PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), processInstance.getState(), "delete"); @@ -844,7 +843,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); if (processInstance == null) { - logger.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode, + log.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode, processInstanceId); putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; @@ -853,7 +852,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); if (processDefinition != null && projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, processInstance.getProcessDefinitionCode()); putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; @@ -938,7 +937,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); if (processInstance == null) { - logger.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode, + log.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode, processInstanceId); putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; @@ -948,7 +947,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion()); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, processInstance.getProcessDefinitionCode()); putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; @@ -1056,17 +1055,17 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce if (CollectionUtils.isEmpty(processInstances)) { break; } - logger.info("Begin to delete workflow instance, workflow definition code: {}", workflowDefinitionCode); + log.info("Begin to delete workflow instance, workflow definition code: {}", workflowDefinitionCode); for (ProcessInstance processInstance : processInstances) { if (!processInstance.getState().isFinished()) { - logger.warn("Workflow instance is not finished cannot delete, process instance id:{}", + log.warn("Workflow instance is not finished cannot delete, process instance id:{}", processInstance.getId()); throw new ServiceException(PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), processInstance.getState(), "delete"); } deleteProcessInstanceById(processInstance.getId()); } - logger.info("Success delete workflow instance, workflow definition code: {}, size: {}", + log.info("Success delete workflow instance, workflow definition code: {}, size: {}", workflowDefinitionCode, processInstances.size()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java index dbbcedb465..2f1f0ba4c0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java @@ -60,8 +60,8 @@ import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -74,10 +74,9 @@ import com.google.common.collect.Lists; * process task relation service impl */ @Service +@Slf4j public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements ProcessTaskRelationService { - private static final Logger logger = LoggerFactory.getLogger(ProcessTaskRelationServiceImpl.class); - @Autowired private ProjectMapper projectMapper; @@ -127,12 +126,12 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null) { - logger.error("Process definition does not exist, processCode:{}.", processDefinitionCode); + log.error("Process definition does not exist, processCode:{}.", processDefinitionCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode)); return result; } if (processDefinition.getProjectCode() != projectCode) { - logger.error("Process definition's project does not match project {}.", projectCode); + log.error("Process definition's project does not match project {}.", projectCode); putMsg(result, Status.PROJECT_PROCESS_NOT_MATCH); return result; } @@ -278,12 +277,12 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P ProcessDefinition processDefinition) { int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); if (insertVersion <= 0) { - logger.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.", + log.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.", processDefinition.getProjectCode(), processDefinition.getCode()); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } else - logger.info( + log.info( "Update process definition complete, new version is {}, projectCode:{}, processDefinitionCode:{}.", insertVersion, processDefinition.getProjectCode(), processDefinition.getCode()); processDefinition.setVersion(insertVersion); @@ -309,7 +308,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P return result; } if (taskCode == 0) { - logger.error( + log.error( "Delete task process relation error due to parameter taskCode is 0, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinitionCode); putMsg(result, Status.DELETE_TASK_PROCESS_RELATION_ERROR); @@ -317,13 +316,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); + log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode)); return result; } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (null == taskDefinition) { - logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); + log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode)); return result; } @@ -331,7 +330,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); List processTaskRelationList = Lists.newArrayList(processTaskRelations); if (CollectionUtils.isEmpty(processTaskRelationList)) { - logger.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinitionCode); putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList"); return result; @@ -347,7 +346,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P } if (CollectionUtils.isNotEmpty(downstreamList)) { String downstream = StringUtils.join(downstreamList, ","); - logger.warn( + log.warn( "Relation can not be deleted because task has downstream tasks:[{}], projectCode:{}, processDefinitionCode:{}, taskDefinitionCode:{}.", downstream, projectCode, processDefinitionCode, taskCode); putMsg(result, Status.TASK_HAS_DOWNSTREAM, downstream); @@ -360,11 +359,11 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P || TASK_TYPE_SUB_PROCESS.equals(taskDefinition.getTaskType())) { int deleteTaskDefinition = taskDefinitionMapper.deleteByCode(taskCode); if (0 == deleteTaskDefinition) { - logger.error("Delete task definition error, taskDefinitionCode:{}.", taskCode); + log.error("Delete task definition error, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR); } else - logger.info("Delete {} type task definition complete, taskDefinitionCode:{}.", + log.info("Delete {} type task definition complete, taskDefinitionCode:{}.", taskDefinition.getTaskType(), taskCode); } putMsg(result, Status.SUCCESS); @@ -500,11 +499,11 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P // batch sync to process task relation log int saveTaskRelationResult = saveTaskRelation(loginUser, processDefinition, insertVersion); if (saveTaskRelationResult != Constants.EXIT_CODE_SUCCESS) { - logger.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", + log.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR); } - logger.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", + log.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion); processTaskRelations.get(0).setProcessDefinitionVersion(insertVersion); return processTaskRelations; @@ -598,13 +597,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P processDefinition.getCode(), processDefinition.getVersion(), relationLogs, Lists.newArrayList(), Boolean.TRUE); if (insertResult == Constants.EXIT_CODE_SUCCESS) { - logger.info( + log.info( "Update task relations complete, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion()); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, processDefinition); } else { - logger.error( + log.error( "Update task relations error, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion()); putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR); @@ -632,13 +631,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P return result; } if (StringUtils.isEmpty(preTaskCodes)) { - logger.warn("Parameter preTaskCodes is empty."); + log.warn("Parameter preTaskCodes is empty."); putMsg(result, Status.DATA_IS_NULL, "preTaskCodes"); return result; } List upstreamList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); if (CollectionUtils.isEmpty(upstreamList)) { - logger.error("Upstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode); + log.error("Upstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode); putMsg(result, Status.DATA_IS_NULL, "taskCode"); return result; } @@ -646,14 +645,14 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P List preTaskCodeList = Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream() .map(Long::parseLong).collect(Collectors.toList()); if (preTaskCodeList.contains(0L)) { - logger.warn("Parameter preTaskCodes contain 0."); + log.warn("Parameter preTaskCodes contain 0."); putMsg(result, Status.DATA_IS_NULL, "preTaskCodes"); return result; } List currentUpstreamList = upstreamList.stream().map(ProcessTaskRelation::getPreTaskCode).collect(Collectors.toList()); if (currentUpstreamList.contains(0L)) { - logger.error("Upstream taskCodes based on the task contain, theTaskDefinitionCode:{}.", taskCode); + log.error("Upstream taskCodes based on the task contain, theTaskDefinitionCode:{}.", taskCode); putMsg(result, Status.DATA_IS_NOT_VALID, "currentUpstreamList"); return result; } @@ -662,14 +661,14 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P preTaskCodeList.removeAll(currentUpstreamList); if (!preTaskCodeList.isEmpty()) { String invalidPreTaskCodes = StringUtils.join(preTaskCodeList, Constants.COMMA); - logger.error("Some upstream taskCodes are invalid, preTaskCodeList:{}.", invalidPreTaskCodes); + log.error("Some upstream taskCodes are invalid, preTaskCodeList:{}.", invalidPreTaskCodes); putMsg(result, Status.DATA_IS_NOT_VALID, invalidPreTaskCodes); return result; } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode()); if (processDefinition == null) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", + log.error("Process definition does not exist, processDefinitionCode:{}.", upstreamList.get(0).getProcessDefinitionCode()); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(upstreamList.get(0).getProcessDefinitionCode())); @@ -719,28 +718,28 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P return result; } if (StringUtils.isEmpty(postTaskCodes)) { - logger.warn("Parameter postTaskCodes is empty."); + log.warn("Parameter postTaskCodes is empty."); putMsg(result, Status.DATA_IS_NULL, "postTaskCodes"); return result; } List downstreamList = processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode); if (CollectionUtils.isEmpty(downstreamList)) { - logger.error("Downstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode); + log.error("Downstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode); putMsg(result, Status.DATA_IS_NULL, "taskCode"); return result; } List postTaskCodeList = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream() .map(Long::parseLong).collect(Collectors.toList()); if (postTaskCodeList.contains(0L)) { - logger.warn("Parameter postTaskCodes contains 0."); + log.warn("Parameter postTaskCodes contains 0."); putMsg(result, Status.DATA_IS_NULL, "postTaskCodes"); return result; } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode()); if (processDefinition == null) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", + log.error("Process definition does not exist, processDefinitionCode:{}.", downstreamList.get(0).getProcessDefinitionCode()); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(downstreamList.get(0).getProcessDefinitionCode())); @@ -853,7 +852,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null) { - logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinitionCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode)); return result; @@ -862,7 +861,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode); List processTaskRelationList = Lists.newArrayList(processTaskRelations); if (CollectionUtils.isEmpty(processTaskRelationList)) { - logger.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinitionCode); putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList"); return result; @@ -886,13 +885,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P if (processTaskRelation.getPreTaskCode() == preTaskCode) { int delete = processTaskRelationMapper.deleteById(processTaskRelation.getId()); if (delete == 0) { - logger.error( + log.error( "Delete task relation edge error, processTaskRelationId:{}, preTaskCode:{}, postTaskCode:{}", processTaskRelation.getId(), preTaskCode, postTaskCode); putMsg(result, Status.DELETE_EDGE_ERROR); throw new ServiceException(Status.DELETE_EDGE_ERROR); } else - logger.info( + log.info( "Delete task relation edge complete, processTaskRelationId:{}, preTaskCode:{}, postTaskCode:{}", processTaskRelation.getId(), preTaskCode, postTaskCode); processTaskRelationList.remove(processTaskRelation); @@ -904,7 +903,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P processTaskRelation.setPreTaskVersion(0); processTaskRelation.setPreTaskCode(0L); processTaskRelationList.add(processTaskRelation); - logger.info( + log.info( "Delete task relation through set invalid value for it: preTaskCode from {} to 0, processTaskRelationId:{}.", preTaskCode, processTaskRelation.getId()); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java index 2279edff6e..bd1349f4fa 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java @@ -57,9 +57,8 @@ import java.util.Set; import javax.annotation.Nullable; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Service; @@ -72,10 +71,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * project service impl **/ @Service +@Slf4j public class ProjectServiceImpl extends BaseServiceImpl implements ProjectService { - private static final Logger logger = LoggerFactory.getLogger(ProjectServiceImpl.class); - @Lazy @Autowired private TaskGroupService taskGroupService; @@ -116,7 +114,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic Project project = projectMapper.queryByName(name); if (project != null) { - logger.warn("Project {} already exists.", project.getName()); + log.warn("Project {} already exists.", project.getName()); putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); return result; } @@ -135,19 +133,19 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic .updateTime(now) .build(); } catch (CodeGenerateException e) { - logger.error("Generate process definition code error.", e); + log.error("Generate process definition code error.", e); putMsg(result, Status.CREATE_PROJECT_ERROR); return result; } if (projectMapper.insert(project) > 0) { - logger.info("Project is created and id is :{}", project.getId()); + log.info("Project is created and id is :{}", project.getId()); result.setData(project); putMsg(result, Status.SUCCESS); permissionPostHandle(AuthorizationType.PROJECTS, loginUser.getId(), - Collections.singletonList(project.getId()), logger); + Collections.singletonList(project.getId()), log); } else { - logger.error("Project create error, projectName:{}.", project.getName()); + log.error("Project create error, projectName:{}.", project.getName()); putMsg(result, Status.CREATE_PROJECT_ERROR); } return result; @@ -161,7 +159,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic */ public static void checkDesc(Result result, String desc) { if (!StringUtils.isEmpty(desc) && desc.codePointCount(0, desc.length()) > 255) { - logger.warn("Parameter description check failed."); + log.warn("Parameter description check failed."); result.setCode(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode()); result.setMsg(MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "desc length")); } else { @@ -218,12 +216,12 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic String permission) { Map result = new HashMap<>(); if (project == null) { - logger.error("Project does not exist, projectCode:{}.", projectCode); + log.error("Project does not exist, projectCode:{}.", projectCode); putMsg(result, Status.PROJECT_NOT_EXIST); } else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS, permission)) { // check read permission - logger.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.", + log.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.", permission, loginUser.getUserName(), projectCode); putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectCode); } else { @@ -248,11 +246,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic public boolean hasProjectAndPerm(User loginUser, Project project, Map result, String permission) { boolean checkResult = false; if (project == null) { - logger.error("Project does not exist."); + log.error("Project does not exist."); putMsg(result, Status.PROJECT_NOT_FOUND, ""); } else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS, permission)) { - logger.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.", + log.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.", permission, loginUser.getUserName(), project.getCode()); putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getCode()); } else { @@ -265,7 +263,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic public boolean hasProjectAndWritePerm(User loginUser, Project project, Result result) { boolean checkResult = false; if (project == null) { - logger.error("Project does not exist."); + log.error("Project does not exist."); putMsg(result, Status.PROJECT_NOT_FOUND, ""); } else { // case 1: user is admin @@ -292,7 +290,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic public boolean hasProjectAndWritePerm(User loginUser, Project project, Map result) { boolean checkResult = false; if (project == null) { - logger.error("Project does not exist."); + log.error("Project does not exist."); putMsg(result, Status.PROJECT_NOT_FOUND, ""); } else { // case 1: user is admin @@ -319,11 +317,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic public boolean hasProjectAndPerm(User loginUser, Project project, Result result, String permission) { boolean checkResult = false; if (project == null) { - logger.error("Project does not exist."); + log.error("Project does not exist."); putMsg(result, Status.PROJECT_NOT_FOUND, ""); } else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS, permission)) { - logger.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.", + log.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.", permission, loginUser.getUserName(), project.getCode()); putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName()); } else { @@ -347,7 +345,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Page page = new Page<>(pageNo, pageSize); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -386,9 +384,9 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Page page = new Page<>(pageNo, pageSize); Set allProjectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); Set userProjectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, userId, logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, userId, log); if (allProjectIds.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -448,7 +446,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic processDefinitionMapper.queryAllDefinitionList(project.getCode()); if (!processDefinitionList.isEmpty()) { - logger.warn("Please delete the process definitions in project first! project code:{}.", projectCode); + log.warn("Please delete the process definitions in project first! project code:{}.", projectCode); putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL); return result; } @@ -457,11 +455,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic int delete = projectMapper.deleteById(project.getId()); if (delete > 0) { - logger.info("Project is deleted and id is :{}.", project.getId()); + log.info("Project is deleted and id is :{}.", project.getId()); result.setData(Boolean.TRUE); putMsg(result, Status.SUCCESS); } else { - logger.error("Project delete error, project code:{}, project name:{}.", projectCode, project.getName()); + log.error("Project delete error, project code:{}, project name:{}.", projectCode, project.getName()); putMsg(result, Status.DELETE_PROJECT_ERROR); } return result; @@ -515,7 +513,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic } User user = userMapper.queryByUserNameAccurately(userName); if (user == null) { - logger.error("User does not exist."); + log.error("User does not exist."); putMsg(result, Status.USER_NOT_EXIST, userName); return result; } @@ -525,11 +523,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic project.setUserId(user.getId()); int update = projectMapper.updateById(project); if (update > 0) { - logger.info("Project is updated and id is :{}", project.getId()); + log.info("Project is updated and id is :{}", project.getId()); result.setData(project); putMsg(result, Status.SUCCESS); } else { - logger.error("Project update error, projectCode:{}, projectName:{}.", project.getCode(), project.getName()); + log.error("Project update error, projectCode:{}, projectName:{}.", project.getCode(), project.getName()); putMsg(result, Status.UPDATE_PROJECT_ERROR); } return result; @@ -546,7 +544,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic Result result = new Result(); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); List projectList = projectMapper.listAuthorizedProjects( loginUser.getUserType().equals(UserType.ADMIN_USER) ? 0 : loginUser.getId(), new ArrayList<>(projectIds)); @@ -589,7 +587,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic Result result = new Result(); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { result.setData(Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -702,7 +700,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic Result result = new Result(); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { result.setData(Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -784,7 +782,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic public void checkProjectAndAuth(Result result, User loginUser, Project project, long projectCode, String permission) { if (project == null) { - logger.error("Project does not exist, project code:{}.", projectCode); + log.error("Project does not exist, project code:{}.", projectCode); putMsg(result, Status.PROJECT_NOT_EXIST); } else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS, permission)) { diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java index 12c518e223..ecdb3773ed 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java @@ -42,8 +42,8 @@ import java.util.List; import java.util.Objects; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -55,10 +55,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * queue service impl */ @Service +@Slf4j public class QueueServiceImpl extends BaseServiceImpl implements QueueService { - private static final Logger logger = LoggerFactory.getLogger(QueueServiceImpl.class); - @Autowired private QueueMapper queueMapper; @@ -120,7 +119,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { public Result queryList(User loginUser) { Result result = new Result(); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.QUEUE, - loginUser.getId(), logger); + loginUser.getId(), log); if (loginUser.getUserType().equals(UserType.GENERAL_USER)) { ids = ids.isEmpty() ? new HashSet<>() : ids; ids.add(Constants.DEFAULT_QUEUE_ID); @@ -145,7 +144,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { Result result = new Result(); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.QUEUE, - loginUser.getId(), logger); + loginUser.getId(), log); if (ids.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -183,10 +182,10 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { queueMapper.insert(queueObj); result.setData(queueObj); - logger.info("Queue create complete, queueName:{}.", queueObj.getQueueName()); + log.info("Queue create complete, queueName:{}.", queueObj.getQueueName()); putMsg(result, Status.SUCCESS); permissionPostHandle(AuthorizationType.QUEUE, loginUser.getId(), Collections.singletonList(queueObj.getId()), - logger); + log); return result; } @@ -215,7 +214,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { // update user related old queue Integer relatedUserNums = userMapper.updateUserQueue(existsQueue.getQueueName(), updateQueue.getQueueName()); - logger.info("Old queue have related {} users, exec update user success.", relatedUserNums); + log.info("Old queue have related {} users, exec update user success.", relatedUserNums); } queueMapper.updateById(updateQueue); @@ -290,13 +289,13 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService { public Queue createQueueIfNotExists(String queue, String queueName) { Queue existsQueue = queueMapper.queryQueueName(queue, queueName); if (!Objects.isNull(existsQueue)) { - logger.info("Queue exists, so return it, queueName:{}.", queueName); + log.info("Queue exists, so return it, queueName:{}.", queueName); return existsQueue; } Queue queueObj = new Queue(queueName, queue); createQueueValid(queueObj); queueMapper.insert(queueObj); - logger.info("Queue create complete, queueName:{}.", queueObj.getQueueName()); + log.info("Queue create complete, queueName:{}.", queueObj.getQueueName()); return queueObj; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java index 70d4a90ac1..05c78e0849 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java @@ -88,8 +88,8 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -104,10 +104,9 @@ import com.google.common.io.Files; * resources service impl */ @Service +@Slf4j public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService { - private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceImpl.class); - @Autowired private ResourceMapper resourcesMapper; @@ -170,21 +169,21 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe return result; } if (FileUtils.directoryTraversal(name)) { - logger.warn("Parameter name is invalid, name:{}.", RegexUtils.escapeNRT(name)); + log.warn("Parameter name is invalid, name:{}.", RegexUtils.escapeNRT(name)); putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); return result; } User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -192,13 +191,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, "")) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -209,12 +208,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { if (checkResourceExists(fullName)) { - logger.error("resource directory {} has exist, can't recreate", fullName); + log.error("resource directory {} has exist, can't recreate", fullName); putMsg(result, Status.RESOURCE_EXIST); return result; } } catch (Exception e) { - logger.warn("Resource exists, can not create again, fullName:{}.", fullName, e); + log.warn("Resource exists, can not create again, fullName:{}.", fullName, e); throw new ServiceException("resource already exists, can't recreate"); } @@ -256,14 +255,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -271,7 +270,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, "")) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -288,7 +287,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { if (checkResourceExists(currDirNFileName)) { - logger.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name)); + log.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name)); putMsg(result, Status.RESOURCE_EXIST); return result; } @@ -296,7 +295,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe throw new ServiceException("resource already exists, can't recreate"); } if (currDirNFileName.length() > Constants.RESOURCE_FULL_NAME_MAX_LENGTH) { - logger.error( + log.error( "Resource file's name is longer than max full name length, fullName:{}, " + "fullNameSize:{}, maxFullNameSize:{}", RegexUtils.escapeNRT(name), currDirNFileName.length(), Constants.RESOURCE_FULL_NAME_MAX_LENGTH); @@ -306,13 +305,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // fail upload if (!upload(loginUser, currDirNFileName, file, type)) { - logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), + log.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.STORE_OPERATE_CREATE_ERROR); throw new ServiceException( String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); } else - logger.info("Upload resource file complete, resourceName:{}, fileName:{}.", + log.info("Upload resource file complete, resourceName:{}, fileName:{}.", RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename())); return result; } @@ -339,7 +338,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe parentResource.setSize(0L); } resourcesMapper.updateById(parentResource); - logger.info("Resource size update complete, resourceFullName:{}, newSize:{}.", + log.info("Resource size update complete, resourceFullName:{}, newSize:{}.", parentResource.getFullName(), parentResource.getSize()); } } @@ -358,7 +357,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { existResource = storageOperate.exists(fullName); } catch (IOException e) { - logger.error("error occurred when checking resource: " + fullName, e); + log.error("error occurred when checking resource: " + fullName, e); } return Boolean.TRUE.equals(existResource); } @@ -394,14 +393,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -409,7 +408,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -420,13 +419,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { resource = storageOperate.getFileStatus(resourceFullName, defaultPath, resTenantCode, type); } catch (Exception e) { - logger.error("Get file status fail, resource path: {}", resourceFullName, e); + log.error("Get file status fail, resource path: {}", resourceFullName, e); putMsg(result, Status.RESOURCE_NOT_EXIST); throw new ServiceException((String.format("Get file status fail, resource path: %s", resourceFullName))); } if (!PropertyUtils.getResUploadStartupState()) { - logger.error("Storage does not start up, resource upload startup state: {}.", + log.error("Storage does not start up, resource upload startup state: {}.", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.STORAGE_NOT_STARTUP); return result; @@ -435,13 +434,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // TODO: deal with OSS if (resource.isDirectory() && storageOperate.returnStorageType().equals(ResUploadType.S3) && !resource.getFileName().equals(name)) { - logger.warn("Directory in S3 storage can not be renamed."); + log.warn("Directory in S3 storage can not be renamed."); putMsg(result, Status.S3_CANNOT_RENAME); return result; } if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) { - logger.info("Resource does not need to be updated due to no change, resource full name:{}.", + log.info("Resource does not need to be updated due to no change, resource full name:{}.", resourceFullName); putMsg(result, Status.SUCCESS); return result; @@ -460,7 +459,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!originResourceName.equals(name)) { try { if (checkResourceExists(fullName)) { - logger.error("resource {} already exists, can't recreate", fullName); + log.error("resource {} already exists, can't recreate", fullName); putMsg(result, Status.RESOURCE_EXIST); return result; } @@ -505,8 +504,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe for (ResourcesTask existResource : existResourcesList) { int taskId = existResource.getTaskId(); if (processService.isTaskOnline(taskDefinitionMapper.selectById(taskId).getCode())) { - logger.error("can't be updated,because it is used of process definition that's online"); - logger.error("resource task relation id:{} is used of task code {}", existResource.getId(), + log.error("can't be updated,because it is used of process definition that's online"); + log.error("resource task relation id:{} is used of task code {}", existResource.getId(), taskDefinitionMapper.selectById(taskId).getCode()); putMsg(result, Status.RESOURCE_IS_USED); return result; @@ -576,7 +575,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (file != null) { // fail upload if (!upload(loginUser, fullName, file, type)) { - logger.error("Storage operation error, resourceName:{}, originFileName:{}.", + log.error("Storage operation error, resourceName:{}, originFileName:{}.", name, RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.HDFS_OPERATION_ERROR); throw new ServiceException( @@ -586,7 +585,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { storageOperate.delete(originFullName, false); } catch (IOException e) { - logger.error("Resource delete error, resourceFullName:{}.", originFullName, e); + log.error("Resource delete error, resourceFullName:{}.", originFullName, e); throw new ServiceException(String.format("delete resource: %s failed.", originFullName)); } } @@ -597,10 +596,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // get the path of dest file in hdfs String destHdfsFileName = fullName; try { - logger.info("start copy {} -> {}", originFullName, destHdfsFileName); + log.info("start copy {} -> {}", originFullName, destHdfsFileName); storageOperate.copy(originFullName, destHdfsFileName, true, true); } catch (Exception e) { - logger.error(MessageFormat.format(" copy {0} -> {1} fail", originFullName, destHdfsFileName), e); + log.error(MessageFormat.format(" copy {0} -> {1} fail", originFullName, destHdfsFileName), e); putMsg(result, Status.HDFS_COPY_FAIL); throw new ServiceException(MessageFormat.format( Status.HDFS_COPY_FAIL.getMsg(), originFullName, destHdfsFileName)); @@ -614,13 +613,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe putMsg(result, Status.SUCCESS); if (FileUtils.directoryTraversal(name)) { - logger.warn("Parameter file alias name verify failed, fileAliasName:{}.", RegexUtils.escapeNRT(name)); + log.warn("Parameter file alias name verify failed, fileAliasName:{}.", RegexUtils.escapeNRT(name)); putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); return result; } if (file != null && FileUtils.directoryTraversal(Objects.requireNonNull(file.getOriginalFilename()))) { - logger.warn("File original name verify failed, fileOriginalName:{}.", + log.warn("File original name verify failed, fileOriginalName:{}.", RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); return result; @@ -629,7 +628,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (file != null) { // file is empty if (file.isEmpty()) { - logger.warn("Parameter file is empty, fileOriginalName:{}.", + log.warn("Parameter file is empty, fileOriginalName:{}.", RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); return result; @@ -642,7 +641,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // determine file suffix if (!fileSuffix.equalsIgnoreCase(nameSuffix)) { // rename file suffix and original suffix must be consistent - logger.warn("Rename file suffix and original suffix must be consistent, fileOriginalName:{}.", + log.warn("Rename file suffix and original suffix must be consistent, fileOriginalName:{}.", RegexUtils.escapeNRT(file.getOriginalFilename())); putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); return result; @@ -650,12 +649,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) { - logger.warn(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); + log.warn(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); return result; } if (file.getSize() > Constants.MAX_FILE_SIZE) { - logger.warn( + log.warn( "Resource file size is larger than max file size, fileOriginalName:{}, fileSize:{}, maxFileSize:{}.", RegexUtils.escapeNRT(file.getOriginalFilename()), file.getSize(), Constants.MAX_FILE_SIZE); putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); @@ -686,20 +685,20 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe Result> result = new Result<>(); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); if (storageOperate == null) { - logger.warn("The resource storage is not opened."); + log.warn("The resource storage is not opened."); return Result.success(pageInfo); } User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -707,7 +706,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -734,7 +733,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe visitedTenantEntityCode.add(tenantEntityCode); } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", defaultPath, e); + log.error(e.getMessage() + " Resource path: {}", defaultPath, e); putMsg(result, Status.RESOURCE_NOT_EXIST); throw new ServiceException(String.format(e.getMessage() + " make sure resource path: %s exists in %s", defaultPath, resourceStorageType)); @@ -754,7 +753,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe resourcesList = storageOperate.listFilesStatus(fullName, defaultPath, tenantCode, type); } } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", fullName, e); + log.error(e.getMessage() + " Resource path: {}", fullName, e); putMsg(result, Status.RESOURCE_NOT_EXIST); throw new ServiceException(String.format(e.getMessage() + " make sure resource path: %s exists in %s", defaultPath, resourceStorageType)); @@ -796,12 +795,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe } if (!storageOperate.mkdir(tenantCode, fullName)) { - logger.error("create resource directory {} failed", fullName); + log.error("create resource directory {} failed", fullName); putMsg(result, Status.STORE_OPERATE_CREATE_ERROR); // throw new ServiceException(String.format("create resource directory: %s failed.", fullName)); } } catch (Exception e) { - logger.error("create resource directory {} failed", fullName); + log.error("create resource directory {} failed", fullName); putMsg(result, Status.STORE_OPERATE_CREATE_ERROR); throw new ServiceException(String.format("create resource directory: %s failed.", fullName)); } @@ -841,7 +840,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe storageOperate.upload(tenantCode, localFilename, fullName, true, true); } catch (Exception e) { FileUtils.deleteFile(localFilename); - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); return false; } return true; @@ -866,14 +865,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return null; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return null; } @@ -935,7 +934,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe Result result = new Result<>(); Set resourceIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), log); if (resourceIds.isEmpty()) { result.setData(Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -1009,14 +1008,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1024,7 +1023,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -1034,13 +1033,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { resource = storageOperate.getFileStatus(fullName, defaultPath, resTenantCode, null); } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", fullName, e); + log.error(e.getMessage() + " Resource path: {}", fullName, e); putMsg(result, Status.RESOURCE_NOT_EXIST); throw new ServiceException(String.format(e.getMessage() + " Resource path: %s", fullName)); } if (resource == null) { - logger.error("Resource does not exist, resource full name:{}.", fullName); + log.error("Resource does not exist, resource full name:{}.", fullName); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -1063,7 +1062,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (resource.getType() == (ResourceType.UDF)) { List udfFuncs = udfFunctionMapper.listUdfByResourceFullName(allChildrenFullNameArray); if (CollectionUtils.isNotEmpty(udfFuncs)) { - logger.warn("Resource can not be deleted because it is bound by UDF functions, udfFuncIds:{}", + log.warn("Resource can not be deleted because it is bound by UDF functions, udfFuncIds:{}", udfFuncs); putMsg(result, Status.UDF_RESOURCE_IS_BOUND, udfFuncs.get(0).getFuncName()); return result; @@ -1075,8 +1074,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe for (ResourcesTask resourcesTask : resourcesNeedToDeleteSet) { int taskId = resourcesTask.getTaskId(); if (processService.isTaskOnline(taskDefinitionMapper.selectById(taskId).getCode())) { - logger.error("can't be deleted,because it is used of process definition that's online"); - logger.error("resource task relation id:{} is used of task code {}", resourcesTask.getId(), + log.error("can't be deleted,because it is used of process definition that's online"); + log.error("resource task relation id:{} is used of task code {}", resourcesTask.getId(), taskDefinitionMapper.selectById(taskId).getCode()); putMsg(result, Status.RESOURCE_IS_USED); return result; @@ -1227,7 +1226,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe Result result = new Result<>(); putMsg(result, Status.SUCCESS); if (checkResourceExists(fullName)) { - logger.error("Resource with same name exists so can not create again, resourceType:{}, resourceName:{}.", + log.error("Resource with same name exists so can not create again, resourceType:{}, resourceName:{}.", type, RegexUtils.escapeNRT(fullName)); putMsg(result, Status.RESOURCE_EXIST); } @@ -1255,14 +1254,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1270,7 +1269,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -1284,7 +1283,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { file = storageOperate.getFileStatus(defaultPath + fileName, defaultPath, resTenantCode, type); } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", defaultPath + fileName, e); + log.error(e.getMessage() + " Resource path: {}", defaultPath + fileName, e); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -1308,14 +1307,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1323,7 +1322,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -1337,7 +1336,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { file = storageOperate.getFileStatus(fullName, defaultPath, resTenantCode, type); } catch (Exception e) { - logger.error(e.getMessage() + " Resource path: {}", fullName, e); + log.error(e.getMessage() + " Resource path: {}", fullName, e); putMsg(result, Status.RESOURCE_NOT_EXIST); throw new ServiceException(String.format(e.getMessage() + " Resource path: %s", fullName)); } @@ -1366,14 +1365,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1381,7 +1380,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -1392,7 +1391,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (StringUtils.isNotEmpty(resourceViewSuffixes)) { List strList = Arrays.asList(resourceViewSuffixes.split(",")); if (!strList.contains(nameSuffix)) { - logger.error("Resource suffix does not support view,resourceFullName:{}, suffix:{}.", fullName, + log.error("Resource suffix does not support view,resourceFullName:{}, suffix:{}.", fullName, nameSuffix); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; @@ -1404,13 +1403,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (storageOperate.exists(fullName)) { content = storageOperate.vimFile(tenantCode, fullName, skipLineNum, limit); } else { - logger.error("read file {} not exist in storage", fullName); + log.error("read file {} not exist in storage", fullName); putMsg(result, Status.RESOURCE_FILE_NOT_EXIST, fullName); return result; } } catch (Exception e) { - logger.error("Resource {} read failed", fullName, e); + log.error("Resource {} read failed", fullName, e); putMsg(result, Status.HDFS_OPERATION_ERROR); return result; } @@ -1449,14 +1448,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1464,13 +1463,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, "")) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } if (FileUtils.directoryTraversal(fileName)) { - logger.warn("File name verify failed, fileName:{}.", RegexUtils.escapeNRT(fileName)); + log.warn("File name verify failed, fileName:{}.", RegexUtils.escapeNRT(fileName)); putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED); return result; } @@ -1481,7 +1480,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (StringUtils.isNotEmpty(resourceViewSuffixes)) { List strList = Arrays.asList(resourceViewSuffixes.split(",")); if (!strList.contains(nameSuffix)) { - logger.warn("Resource suffix does not support view, suffix:{}.", nameSuffix); + log.warn("Resource suffix does not support view, suffix:{}.", nameSuffix); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; } @@ -1534,7 +1533,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe AuthorizationType authorizationType = resourceType.equals(ResourceType.FILE) ? AuthorizationType.RESOURCE_FILE_ID : AuthorizationType.UDF_FILE; - permissionPostHandle(authorizationType, loginUser.getId(), Collections.singletonList(resourceId), logger); + permissionPostHandle(authorizationType, loginUser.getId(), Collections.singletonList(resourceId), log); } private Result checkResourceUploadStartupState() { @@ -1542,7 +1541,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe putMsg(result, Status.SUCCESS); // if resource upload startup if (!PropertyUtils.getResUploadStartupState()) { - logger.error("Storage does not start up, resource upload startup state: {}.", + log.error("Storage does not start up, resource upload startup state: {}.", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.STORAGE_NOT_STARTUP); return result; @@ -1564,12 +1563,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (pid != -1) { Resource parentResource = resourcesMapper.selectById(pid); if (parentResource == null) { - logger.error("Parent resource does not exist, parentResourceId:{}.", pid); + log.error("Parent resource does not exist, parentResourceId:{}.", pid); putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST); return result; } if (!canOperator(loginUser, parentResource.getUserId())) { - logger.warn("User does not have operation privilege, loginUserName:{}.", loginUser.getUserName()); + log.warn("User does not have operation privilege, loginUserName:{}.", loginUser.getUserName()); putMsg(result, Status.USER_NO_OPERATION_PERM); return result; } @@ -1597,14 +1596,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1612,7 +1611,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String tenantCode = tenant.getTenantCode(); if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) { - logger.error("current user does not have permission"); + log.error("current user does not have permission"); putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION); return result; } @@ -1621,13 +1620,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe try { resource = storageOperate.getFileStatus(fullName, "", resTenantCode, ResourceType.FILE); } catch (Exception e) { - logger.error("error occurred when fetching resource information , resource full name {}", fullName); + log.error("error occurred when fetching resource information , resource full name {}", fullName); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } if (resource == null) { - logger.error("Resource does not exist, resource full name:{}.", fullName); + log.error("Resource does not exist, resource full name:{}.", fullName); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -1638,7 +1637,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (StringUtils.isNotEmpty(resourceViewSuffixes)) { List strList = Arrays.asList(resourceViewSuffixes.split(",")); if (!strList.contains(nameSuffix)) { - logger.warn("Resource suffix does not support view, resource full name:{}, suffix:{}.", + log.warn("Resource suffix does not support view, resource full name:{}, suffix:{}.", fullName, nameSuffix); putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); return result; @@ -1650,7 +1649,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!result.getCode().equals(Status.SUCCESS.getCode())) { throw new ServiceException(result.getMsg()); } else - logger.info("Update resource content complete, resource full name:{}.", fullName); + log.info("Update resource content complete, resource full name:{}.", fullName); return result; } @@ -1668,7 +1667,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (!FileUtils.writeContent2File(content, localFilename)) { // write file fail - logger.error("Write file error, fileName:{}, content:{}.", localFilename, + log.error("Write file error, fileName:{}, content:{}.", localFilename, RegexUtils.escapeNRT(content)); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; @@ -1676,12 +1675,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe // get resource file path String resourcePath = storageOperate.getResDir(tenantCode); - logger.info("resource path is {}, resource dir is {}", fullName, resourcePath); + log.info("resource path is {}, resource dir is {}", fullName, resourcePath); if (!storageOperate.exists(resourcePath)) { // create if tenant dir not exists storageOperate.createTenantDirIfNotExists(tenantCode); - logger.info("Create tenant dir because path {} does not exist, tenantCode:{}.", resourcePath, + log.info("Create tenant dir because path {} does not exist, tenantCode:{}.", resourcePath, tenantCode); } if (storageOperate.exists(fullName)) { @@ -1690,13 +1689,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe storageOperate.upload(tenantCode, localFilename, fullName, true, true); } catch (Exception e) { - logger.error("Upload content to storage error, tenantCode:{}, destFileName:{}.", tenantCode, localFilename, + log.error("Upload content to storage error, tenantCode:{}, destFileName:{}.", tenantCode, localFilename, e); result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, fullName)); return result; } - logger.info("Upload content to storage complete, tenantCode:{}, destFileName:{}.", tenantCode, localFilename); + log.info("Upload content to storage complete, tenantCode:{}, destFileName:{}.", tenantCode, localFilename); putMsg(result, Status.SUCCESS); return result; } @@ -1711,20 +1710,20 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String fullName) throws IOException { // if resource upload startup if (!PropertyUtils.getResUploadStartupState()) { - logger.warn("Storage does not start up, resource upload startup state: {}.", + log.warn("Storage does not start up, resource upload startup state: {}.", PropertyUtils.getResUploadStartupState()); throw new ServiceException("hdfs not startup"); } if (fullName.endsWith("/")) { - logger.error("resource id {} is directory,can't download it", fullName); + log.error("resource id {} is directory,can't download it", fullName); throw new ServiceException("can't download directory"); } int userId = loginUser.getId(); User user = userMapper.selectById(userId); if (user == null) { - logger.error("User does not exits, userId:{}.", userId); + log.error("User does not exits, userId:{}.", userId); throw new ServiceException(String.format("Resource owner id %d does not exist", userId)); } @@ -1733,7 +1732,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe if (user.getTenantId() != 0) { Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("Tenant id {} not exists", user.getTenantId()); + log.error("Tenant id {} not exists", user.getTenantId()); throw new ServiceException( String.format("The tenant id %d of resource owner not exist", user.getTenantId())); } @@ -1743,13 +1742,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe String[] aliasArr = fullName.split("/"); String alias = aliasArr[aliasArr.length - 1]; String localFileName = FileUtils.getDownloadFilename(alias); - logger.info("Resource path is {}, download local filename is {}", alias, localFileName); + log.info("Resource path is {}, download local filename is {}", alias, localFileName); try { storageOperate.download(tenantCode, fullName, localFileName, false, true); return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName); } catch (IOException e) { - logger.error("Download resource error, the path is {}, and local filename is {}, the error message is {}", + log.error("Download resource error, the path is {}, and local filename is {}, the error message is {}", fullName, localFileName, e.getMessage()); throw new ServiceException("Download the resource file failed ,it may be related to your storage"); } @@ -1811,14 +1810,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe User user = userMapper.selectById(loginUser.getId()); if (user == null) { - logger.error("user {} not exists", loginUser.getId()); + log.error("user {} not exists", loginUser.getId()); putMsg(result, Status.USER_NOT_EXIST, loginUser.getId()); return result; } Tenant tenant = tenantMapper.queryById(user.getTenantId()); if (tenant == null) { - logger.error("tenant not exists"); + log.error("tenant not exists"); putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST); return result; } @@ -1835,7 +1834,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe storageEntities = new ArrayList<>( storageOperate.listFilesStatus(baseFolder, baseFolder, tenantCode, ResourceType.FILE)); } catch (Exception e) { - logger.error("delete data transfer data error", e); + log.error("delete data transfer data error", e); putMsg(result, Status.DELETE_RESOURCE_ERROR); return result; } @@ -1851,7 +1850,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe storageOperate.delete(storageEntity.getFullName(), true); successList.add(storageEntity.getFullName()); } catch (Exception ex) { - logger.error("delete data transfer data {} error, please delete it manually", date, ex); + log.error("delete data transfer data {} error, please delete it manually", date, ex); failList.add(storageEntity.getFullName()); } } @@ -1981,10 +1980,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe .collect(Collectors.toList()); Visitor visitor = new ResourceTreeVisitor(transformedResourceList); String visit = JSONUtils.toJsonString(visitor.visit(""), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); - logger.info(visit); + log.info(visit); String jsonTreeStr = JSONUtils.toJsonString(visitor.visit("").getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS); - logger.info(jsonTreeStr); + log.info(jsonTreeStr); result.put(Constants.DATA_LIST, visitor.visit("").getChildren()); putMsg(result, Status.SUCCESS); return result; @@ -2046,7 +2045,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe */ private List queryAuthoredResourceList(User loginUser, ResourceType type) { Set resourceIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), log); if (resourceIds.isEmpty()) { return Collections.emptyList(); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java index f546bc4b64..1251add8c1 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java @@ -73,10 +73,9 @@ import java.util.TimeZone; import java.util.stream.Collectors; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; import org.quartz.CronExpression; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -86,10 +85,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import com.cronutils.model.Cron; @Service +@Slf4j public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService { - private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class); - @Autowired private ProjectService projectService; @@ -172,12 +170,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end or time can not be null."); + log.warn("The start time must not be the same as the end or time can not be null."); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return result; } if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) { - logger.warn("The start time must smaller than end time"); + log.warn("The start time must smaller than end time"); putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR); return result; } @@ -185,7 +183,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe scheduleObj.setStartTime(scheduleParam.getStartTime()); scheduleObj.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - logger.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab()); + log.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab()); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); return result; } @@ -213,7 +211,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe // return scheduler object with ID result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); putMsg(result, Status.SUCCESS); - logger.info("Schedule create complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", + log.info("Schedule create complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", projectCode, processDefineCode, scheduleObj.getId()); result.put("scheduleId", scheduleObj.getId()); return result; @@ -334,14 +332,14 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe Schedule schedule = scheduleMapper.selectById(id); if (schedule == null) { - logger.error("Schedule does not exist, scheduleId:{}.", id); + log.error("Schedule does not exist, scheduleId:{}.", id); putMsg(result, Status.SCHEDULE_NOT_EXISTS, id); return result; } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode()); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", + log.error("Process definition does not exist, processDefinitionCode:{}.", schedule.getProcessDefinitionCode()); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(schedule.getProcessDefinitionCode())); return result; @@ -438,19 +436,19 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe Schedule scheduleObj = scheduleMapper.selectById(id); if (scheduleObj == null) { - logger.error("Schedule does not exist, scheduleId:{}.", id); + log.error("Schedule does not exist, scheduleId:{}.", id); throw new ServiceException(Status.SCHEDULE_CRON_NOT_EXISTS, id); } // check schedule release state if (scheduleObj.getReleaseState() == scheduleStatus) { - logger.warn("Schedule state does not need to change due to schedule state is already {}, scheduleId:{}.", + log.warn("Schedule state does not need to change due to schedule state is already {}, scheduleId:{}.", scheduleObj.getReleaseState().getDescp(), scheduleObj.getId()); throw new ServiceException(Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(scheduleObj.getProcessDefinitionCode()); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", + log.error("Process definition does not exist, processDefinitionCode:{}.", scheduleObj.getProcessDefinitionCode()); throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(scheduleObj.getProcessDefinitionCode())); @@ -458,14 +456,14 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe List processTaskRelations = processTaskRelationMapper.queryByProcessCode(projectCode, scheduleObj.getProcessDefinitionCode()); if (processTaskRelations.isEmpty()) { - logger.error("Process task relations do not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, + log.error("Process task relations do not exist, projectCode:{}, processDefinitionCode:{}.", projectCode, processDefinition.getCode()); throw new ServiceException(Status.PROCESS_DAG_IS_EMPTY); } if (scheduleStatus == ReleaseState.ONLINE) { // check process definition release state if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { - logger.warn("Only process definition state is {} can change schedule state, processDefinitionCode:{}.", + log.warn("Only process definition state is {} can change schedule state, processDefinitionCode:{}.", ReleaseState.ONLINE.getDescp(), processDefinition.getCode()); throw new ServiceException(Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); } @@ -473,7 +471,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe List subProcessDefineCodes = new ArrayList<>(); processService.recurseFindSubProcess(processDefinition.getCode(), subProcessDefineCodes); if (!subProcessDefineCodes.isEmpty()) { - logger.info( + log.info( "Need to check sub process definition state before change schedule state, subProcessDefineCodes:{}.", org.apache.commons.lang.StringUtils.join(subProcessDefineCodes, ",")); List subProcessDefinitionList = @@ -484,7 +482,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe * if there is no online process, exit directly */ if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { - logger.warn( + log.warn( "Only sub process definition state is {} can change schedule state, subProcessDefinitionCode:{}.", ReleaseState.ONLINE.getDescp(), subProcessDefinition.getCode()); throw new ServiceException(Status.PROCESS_DEFINE_NOT_RELEASE, @@ -499,7 +497,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe List masterServers = monitorService.getServerListFromRegistry(true); if (masterServers.isEmpty()) { - logger.error("Master does not exist."); + log.error("Master does not exist."); throw new ServiceException(Status.MASTER_NOT_EXISTS); } @@ -511,12 +509,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe try { switch (scheduleStatus) { case ONLINE: - logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", + log.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); setSchedule(project.getId(), scheduleObj); break; case OFFLINE: - logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", + log.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); deleteSchedule(project.getId(), id); break; @@ -524,7 +522,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe throw new ServiceException(Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); } } catch (Exception e) { - logger.error("Set schedule state to {} error, projectCode:{}, scheduleId:{}.", scheduleStatus.getDescp(), + log.error("Set schedule state to {} error, projectCode:{}, scheduleId:{}.", scheduleStatus.getDescp(), projectCode, scheduleObj.getId()); Status status = scheduleStatus == ReleaseState.ONLINE ? Status.PUBLISH_SCHEDULE_ONLINE_ERROR : Status.OFFLINE_SCHEDULE_ERROR; @@ -558,7 +556,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefineCode); + log.error("Process definition does not exist, processDefinitionCode:{}.", processDefineCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefineCode)); return result; } @@ -645,7 +643,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe } public void setSchedule(int projectId, Schedule schedule) { - logger.info("Set schedule state {}, project id: {}, scheduleId: {}", schedule.getReleaseState().getDescp(), + log.info("Set schedule state {}, project id: {}, scheduleId: {}", schedule.getReleaseState().getDescp(), projectId, schedule.getId()); schedulerApi.insertOrUpdateScheduleTask(projectId, schedule); } @@ -659,7 +657,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe */ @Override public void deleteSchedule(int projectId, int scheduleId) { - logger.info("Delete schedule of project, projectId:{}, scheduleId:{}", projectId, scheduleId); + log.info("Delete schedule of project, projectId:{}, scheduleId:{}", projectId, scheduleId); schedulerApi.deleteScheduleTask(projectId, scheduleId); } @@ -731,7 +729,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe try { cron = CronUtils.parse2Cron(scheduleParam.getCrontab()); } catch (CronParseException e) { - logger.error("Parse cron to cron expression error, crontab:{}.", scheduleParam.getCrontab(), e); + log.error("Parse cron to cron expression error, crontab:{}.", scheduleParam.getCrontab(), e); putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); return result; } @@ -778,7 +776,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe // check schedule exists Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode); if (schedule == null) { - logger.error("Schedule of process definition does not exist, processDefinitionCode:{}.", + log.error("Schedule of process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, processDefinitionCode); return result; @@ -786,7 +784,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); + log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode)); return result; } @@ -802,7 +800,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe long environmentCode) { if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { - logger.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.", + log.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.", ReleaseState.ONLINE.getDescp(), schedule.getId()); return; } @@ -813,17 +811,17 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe if (!StringUtils.isEmpty(scheduleExpression)) { ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); if (scheduleParam == null) { - logger.warn("Parameter scheduleExpression is invalid, so parse cron error."); + log.warn("Parameter scheduleExpression is invalid, so parse cron error."); putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); return; } if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { - logger.warn("The start time must not be the same as the end or time can not be null."); + log.warn("The start time must not be the same as the end or time can not be null."); putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); return; } if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) { - logger.warn("The start time must smaller than end time"); + log.warn("The start time must smaller than end time"); putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR); return; } @@ -831,7 +829,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe schedule.setStartTime(scheduleParam.getStartTime()); schedule.setEndTime(scheduleParam.getEndTime()); if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { - logger.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab()); + log.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab()); putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); return; } @@ -859,7 +857,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe processDefinitionMapper.updateById(processDefinition); - logger.info("Schedule update complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", + log.info("Schedule update complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.", processDefinition.getProjectCode(), processDefinition.getCode(), schedule.getId()); putMsg(result, Status.SUCCESS); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java index f61f67cafc..baf5088108 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java @@ -34,8 +34,8 @@ import java.util.UUID; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -45,10 +45,9 @@ import org.springframework.web.util.WebUtils; * session service implement */ @Service +@Slf4j public class SessionServiceImpl extends BaseServiceImpl implements SessionService { - private static final Logger logger = LoggerFactory.getLogger(SessionService.class); - @Autowired private SessionMapper sessionMapper; @@ -75,7 +74,7 @@ public class SessionServiceImpl extends BaseServiceImpl implements SessionServic } String ip = BaseController.getClientIpAddress(request); - logger.debug("Get session: {}, ip: {}.", sessionId, ip); + log.debug("Get session: {}, ip: {}.", sessionId, ip); return sessionMapper.selectById(sessionId); } @@ -156,7 +155,7 @@ public class SessionServiceImpl extends BaseServiceImpl implements SessionServic // delete session sessionMapper.deleteById(session.getId()); } catch (Exception e) { - logger.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip, e); + log.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip, e); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java index ab71e9445c..a19f69496c 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java @@ -86,8 +86,8 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -100,10 +100,9 @@ import com.google.common.collect.Lists; * task definition service impl */ @Service +@Slf4j public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDefinitionService { - private static final Logger logger = LoggerFactory.getLogger(TaskDefinitionServiceImpl.class); - private static final String RELEASESTATE = "releaseState"; @Autowired @@ -165,7 +164,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe List taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class); if (CollectionUtils.isEmpty(taskDefinitionLogs)) { - logger.warn("Parameter taskDefinitionJson is invalid."); + log.warn("Parameter taskDefinitionJson is invalid."); putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson); return result; } @@ -175,14 +174,14 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe .taskParams(taskDefinitionLog.getTaskParams()) .dependence(taskDefinitionLog.getDependence()) .build())) { - logger.warn("Task definition {} parameters are invalid.", taskDefinitionLog.getName()); + log.warn("Task definition {} parameters are invalid.", taskDefinitionLog.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName()); return result; } } int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, Boolean.TRUE); if (saveTaskResult == Constants.DEFINITION_FAILURE) { - logger.error("Create task definition error, projectCode:{}.", projectCode); + log.error("Create task definition error, projectCode:{}.", projectCode); putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); } @@ -309,19 +308,19 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null || projectCode != processDefinition.getProjectCode()) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); + log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode)); return result; } if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { - logger.warn("Task definition can not be created due to process definition is {}, processDefinitionCode:{}.", + log.warn("Task definition can not be created due to process definition is {}, processDefinitionCode:{}.", ReleaseState.ONLINE.getDescp(), processDefinition.getCode()); putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, String.valueOf(processDefinitionCode)); return result; } TaskDefinitionLog taskDefinition = JSONUtils.parseObject(taskDefinitionJsonObj, TaskDefinitionLog.class); if (taskDefinition == null) { - logger.warn("Parameter taskDefinitionJsonObj is invalid json."); + log.warn("Parameter taskDefinitionJsonObj is invalid json."); putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj); return result; } @@ -330,7 +329,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe .taskParams(taskDefinition.getTaskParams()) .dependence(taskDefinition.getDependence()) .build())) { - logger.error("Task definition {} parameters are invalid", taskDefinition.getName()); + log.error("Task definition {} parameters are invalid", taskDefinition.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinition.getName()); return result; } @@ -355,7 +354,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe .collect(Collectors.toSet()); if (CollectionUtils.isNotEmpty(diffCode)) { String taskCodes = StringUtils.join(diffCode, Constants.COMMA); - logger.error("Some task definitions with parameter upstreamCodes do not exist, taskDefinitionCodes:{}.", + log.error("Some task definitions with parameter upstreamCodes do not exist, taskDefinitionCodes:{}.", taskCodes); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCodes); return result; @@ -384,25 +383,25 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe processDefinition.getVersion(), processTaskRelationLogList, Lists.newArrayList(), Boolean.TRUE); if (insertResult != Constants.EXIT_CODE_SUCCESS) { - logger.error( + log.error( "Save new version process task relations error, processDefinitionCode:{}, processDefinitionVersion:{}.", processDefinition.getCode(), processDefinition.getVersion()); putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR); throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR); } else - logger.info( + log.info( "Save new version process task relations complete, processDefinitionCode:{}, processDefinitionVersion:{}.", processDefinition.getCode(), processDefinition.getVersion()); int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, Lists.newArrayList(taskDefinition), Boolean.TRUE); if (saveTaskResult == Constants.DEFINITION_FAILURE) { - logger.error("Save task definition error, projectCode:{}, taskDefinitionCode:{}.", projectCode, + log.error("Save task definition error, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskDefinition.getCode()); putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR); } else - logger.info("Save task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode, + log.info("Save task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskDefinition.getCode()); putMsg(result, Status.SUCCESS); result.put(Constants.DATA_LIST, taskDefinition); @@ -430,7 +429,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(project.getCode(), processCode, taskName); if (taskDefinition == null) { - logger.error("Task definition does not exist, taskName:{}.", taskName); + log.error("Task definition does not exist, taskName:{}.", taskName); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskName); } else { result.put(Constants.DATA_LIST, taskDefinition); @@ -498,7 +497,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe List taskRelationList = processTaskRelationMapper.queryUpstreamByCode(taskDefinition.getProjectCode(), taskCode); if (CollectionUtils.isNotEmpty(taskRelationList)) { - logger.debug( + log.debug( "Task definition has upstream tasks, start handle them after delete task, taskDefinitionCode:{}.", taskCode); long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); @@ -515,16 +514,16 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe List taskDefinitionLogs) { ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null) { - logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); + log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode); throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST); } int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE); if (insertVersion <= 0) { - logger.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.", + log.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.", processDefinition.getProjectCode(), processDefinitionCode); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } else - logger.info( + log.info( "Save new version process definition complete, projectCode:{}, processDefinitionCode:{}, newVersion:{}.", processDefinition.getProjectCode(), processDefinitionCode, insertVersion); List relationLogs = @@ -533,11 +532,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe processDefinition.getCode(), insertVersion, relationLogs, taskDefinitionLogs, Boolean.TRUE); if (insertResult == Constants.EXIT_CODE_SUCCESS) { - logger.info( + log.info( "Save new version task relations complete, projectCode:{}, processDefinitionCode:{}, newVersion:{}.", processDefinition.getProjectCode(), processDefinitionCode, insertVersion); } else { - logger.error("Update task relations error, projectCode:{}, processDefinitionCode:{}.", + log.error("Update task relations error, projectCode:{}, processDefinitionCode:{}.", processDefinition.getProjectCode(), processDefinitionCode); throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR); } @@ -564,7 +563,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe List taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); if (CollectionUtils.isNotEmpty(taskRelationList)) { - logger.info( + log.info( "Task definition has upstream tasks, start handle them after update task, taskDefinitionCode:{}.", taskCode); long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); @@ -573,7 +572,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe updateDag(loginUser, processDefinitionCode, processTaskRelations, Lists.newArrayList(taskDefinitionToUpdate)); } - logger.info("Update task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode); + log.info("Update task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode); result.put(Constants.DATA_LIST, taskCode); putMsg(result, Status.SUCCESS); return result; @@ -639,7 +638,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe processTaskRelationMapper.queryUpstreamByCode(taskDefinitionUpdate.getProjectCode(), taskCode); if (CollectionUtils.isNotEmpty(taskRelationList)) { - logger.info( + log.info( "Task definition has upstream tasks, start handle them after update task, taskDefinitionCode:{}.", taskCode); long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); @@ -713,14 +712,14 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (taskDefinition == null) { - logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); + log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode)); return null; } if (processService.isTaskOnline(taskCode) && taskDefinition.getFlag() == Flag.YES) { // if stream, can update task definition without online check if (taskDefinition.getTaskExecuteType() != TaskExecuteType.STREAM) { - logger.warn("Only {} type task can be updated without online check, taskDefinitionCode:{}.", + log.warn("Only {} type task can be updated without online check, taskDefinitionCode:{}.", TaskExecuteType.STREAM, taskCode); putMsg(result, Status.NOT_SUPPORT_UPDATE_TASK_DEFINITION); return null; @@ -732,12 +731,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe taskDefinition.setTimeoutNotifyStrategy(null); } if (taskDefinition.equals(taskDefinitionToUpdate)) { - logger.warn("Task definition does not need update because no change, taskDefinitionCode:{}.", taskCode); + log.warn("Task definition does not need update because no change, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.TASK_DEFINITION_NOT_MODIFY_ERROR, String.valueOf(taskCode)); return null; } if (taskDefinitionToUpdate == null) { - logger.warn("Parameter taskDefinitionJson is invalid."); + log.warn("Parameter taskDefinitionJson is invalid."); putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj); return null; } @@ -746,14 +745,14 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe .taskParams(taskDefinitionToUpdate.getTaskParams()) .dependence(taskDefinitionToUpdate.getDependence()) .build())) { - logger.warn("Task definition parameters are invalid, taskDefinitionName:{}.", + log.warn("Task definition parameters are invalid, taskDefinitionName:{}.", taskDefinitionToUpdate.getName()); putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionToUpdate.getName()); return null; } Integer version = taskDefinitionLogMapper.queryMaxVersionForDefinition(taskCode); if (version == null || version == 0) { - logger.error("Max version task definitionLog can not be found in database, taskDefinitionCode:{}.", + log.error("Max version task definitionLog can not be found in database, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.DATA_IS_NOT_VALID, taskCode); return null; @@ -774,12 +773,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe taskDefinitionToUpdate.setId(null); int insert = taskDefinitionLogMapper.insert(taskDefinitionToUpdate); if ((update & insert) != 1) { - logger.error("Update task definition or definitionLog error, projectCode:{}, taskDefinitionCode:{}.", + log.error("Update task definition or definitionLog error, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode); putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); } else - logger.info( + log.info( "Update task definition and definitionLog complete, projectCode:{}, taskDefinitionCode:{}, newTaskVersion:{}.", projectCode, taskCode, taskDefinitionToUpdate.getVersion()); // update process task relation @@ -794,7 +793,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } int count = processTaskRelationMapper.updateProcessTaskRelationTaskVersion(processTaskRelation); if (count != 1) { - logger.error("batch update process task relation error, projectCode:{}, taskDefinitionCode:{}.", + log.error("batch update process task relation error, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode); putMsg(result, Status.PROCESS_TASK_RELATION_BATCH_UPDATE_ERROR); throw new ServiceException(Status.PROCESS_TASK_RELATION_BATCH_UPDATE_ERROR); @@ -842,7 +841,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe upstreamTaskCodes.removeAll(queryUpStreamTaskCodeMap.keySet()); if (CollectionUtils.isNotEmpty(upstreamTaskCodes)) { String notExistTaskCodes = StringUtils.join(upstreamTaskCodes, Constants.COMMA); - logger.error("Some task definitions in parameter upstreamTaskCodes do not exist, notExistTaskCodes:{}.", + log.error("Some task definitions in parameter upstreamTaskCodes do not exist, notExistTaskCodes:{}.", notExistTaskCodes); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, notExistTaskCodes); return result; @@ -883,7 +882,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe processTaskRelationList.add(processTaskRelationList.get(0)); } } - logger.info( + log.info( "Update task with upstream tasks complete, projectCode:{}, taskDefinitionCode:{}, upstreamTaskCodes:{}.", projectCode, taskCode, upstreamTaskCodes); result.put(Constants.DATA_LIST, taskCode); @@ -996,7 +995,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe return result; } if (processService.isTaskOnline(taskCode)) { - logger.warn( + log.warn( "Task definition version can not be switched due to process definition is {}, taskDefinitionCode:{}.", ReleaseState.ONLINE.getDescp(), taskCode); putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE); @@ -1004,7 +1003,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) { - logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); + log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode)); return result; } @@ -1018,7 +1017,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe List taskRelationList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode); if (CollectionUtils.isNotEmpty(taskRelationList)) { - logger.info( + log.info( "Task definition has upstream tasks, start handle them after switch task, taskDefinitionCode:{}.", taskCode); long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode(); @@ -1027,13 +1026,13 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe updateDag(loginUser, processDefinitionCode, processTaskRelations, Lists.newArrayList(taskDefinitionUpdate)); } else { - logger.info( + log.info( "Task definition version switch complete, switch task version to {}, taskDefinitionCode:{}.", version, taskCode); putMsg(result, Status.SUCCESS); } } else { - logger.error("Task definition version switch error, taskDefinitionCode:{}.", taskCode); + log.error("Task definition version switch error, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.SWITCH_TASK_DEFINITION_VERSION_ERROR); } return result; @@ -1081,11 +1080,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (taskDefinition == null) { - logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); + log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode)); } else { if (taskDefinition.getVersion() == version) { - logger.warn( + log.warn( "Task definition can not be deleted due to version is being used, projectCode:{}, taskDefinitionCode:{}, version:{}.", projectCode, taskCode, version); putMsg(result, Status.MAIN_TABLE_USING_VERSION); @@ -1093,12 +1092,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe } int delete = taskDefinitionLogMapper.deleteByCodeAndVersion(taskCode, version); if (delete > 0) { - logger.info( + log.info( "Task definition version delete complete, projectCode:{}, taskDefinitionCode:{}, version:{}.", projectCode, taskCode, version); putMsg(result, Status.SUCCESS); } else { - logger.error("Task definition version delete error, projectCode:{}, taskDefinitionCode:{}, version:{}.", + log.error("Task definition version delete error, projectCode:{}, taskDefinitionCode:{}, version:{}.", projectCode, taskCode, version); putMsg(result, Status.DELETE_TASK_DEFINITION_VERSION_ERROR); } @@ -1118,7 +1117,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode); if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) { - logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); + log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode)); } else { List taskRelationList = processTaskRelationMapper @@ -1217,7 +1216,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe public Map genTaskCodeList(Integer genNum) { Map result = new HashMap<>(); if (genNum == null || genNum < 1 || genNum > 100) { - logger.warn("Parameter genNum must be great than 1 and less than 100."); + log.warn("Parameter genNum must be great than 1 and less than 100."); putMsg(result, Status.DATA_IS_NOT_VALID, genNum); return result; } @@ -1227,7 +1226,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateException e) { - logger.error("Generate task definition code error.", e); + log.error("Generate task definition code error.", e); putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code"); } putMsg(result, Status.SUCCESS); @@ -1268,7 +1267,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe TaskDefinitionLog taskDefinitionLog = taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, taskDefinition.getVersion()); if (taskDefinitionLog == null) { - logger.error("Task definition does not exist, taskDefinitionCode:{}.", code); + log.error("Task definition does not exist, taskDefinitionCode:{}.", code); putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(code)); return result; } @@ -1283,11 +1282,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new); PermissionCheck permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID, - processService, resourceIdArray, loginUser.getId(), logger); + processService, resourceIdArray, loginUser.getId(), log); try { permissionCheck.checkPermission(); } catch (Exception e) { - logger.error("Resources permission check error, resourceIds:{}.", resourceIds, e); + log.error("Resources permission check error, resourceIds:{}.", resourceIds, e); putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION); return result; } @@ -1296,18 +1295,18 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe taskDefinitionLog.setFlag(Flag.YES); break; default: - logger.warn("Parameter releaseState is invalid."); + log.warn("Parameter releaseState is invalid."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE); return result; } int update = taskDefinitionMapper.updateById(taskDefinition); int updateLog = taskDefinitionLogMapper.updateById(taskDefinitionLog); if ((update == 0 && updateLog == 1) || (update == 1 && updateLog == 0)) { - logger.error("Update taskDefinition state or taskDefinitionLog state error, taskDefinitionCode:{}.", code); + log.error("Update taskDefinition state or taskDefinitionLog state error, taskDefinitionCode:{}.", code); putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR); throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR); } - logger.error("Update taskDefinition state or taskDefinitionLog state to complete, taskDefinitionCode:{}.", + log.error("Update taskDefinition state or taskDefinitionLog state to complete, taskDefinitionCode:{}.", code); putMsg(result, Status.SUCCESS); return result; diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupQueueServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupQueueServiceImpl.java index 89f2c4f133..d33115479f 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupQueueServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupQueueServiceImpl.java @@ -35,8 +35,8 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -47,6 +47,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * task group queue service */ @Service +@Slf4j public class TaskGroupQueueServiceImpl extends BaseServiceImpl implements TaskGroupQueueService { @Autowired @@ -55,8 +56,6 @@ public class TaskGroupQueueServiceImpl extends BaseServiceImpl implements TaskGr @Autowired private ProjectMapper projectMapper; - private static final Logger logger = LoggerFactory.getLogger(TaskGroupQueueServiceImpl.class); - /** * query tasks in task group queue by group id * @@ -73,7 +72,7 @@ public class TaskGroupQueueServiceImpl extends BaseServiceImpl implements TaskGr Page page = new Page<>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Set projectIds = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log); if (projectIds.isEmpty()) { result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java index 232268970a..bf88ba7d5b 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java @@ -42,8 +42,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -56,6 +56,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * task Group Service */ @Service +@Slf4j public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupService { @Autowired @@ -67,8 +68,6 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe @Autowired private ExecutorService executorService; - private static final Logger logger = LoggerFactory.getLogger(TaskGroupServiceImpl.class); - /** * create a Task group * @@ -90,23 +89,23 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe return result; } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } if (name == null) { - logger.warn("Parameter name can ot be null."); + log.warn("Parameter name can ot be null."); putMsg(result, Status.NAME_NULL); return result; } if (groupSize <= 0) { - logger.warn("Parameter task group size is must bigger than 1."); + log.warn("Parameter task group size is must bigger than 1."); putMsg(result, Status.TASK_GROUP_SIZE_ERROR); return result; } TaskGroup taskGroup1 = taskGroupMapper.queryByName(loginUser.getId(), name); if (taskGroup1 != null) { - logger.warn("Task group with the same name already exists, taskGroupName:{}.", taskGroup1.getName()); + log.warn("Task group with the same name already exists, taskGroupName:{}.", taskGroup1.getName()); putMsg(result, Status.TASK_GROUP_NAME_EXSIT); return result; } @@ -124,11 +123,11 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe if (taskGroupMapper.insert(taskGroup) > 0) { permissionPostHandle(AuthorizationType.TASK_GROUP, loginUser.getId(), - Collections.singletonList(taskGroup.getId()), logger); - logger.info("Create task group complete, taskGroupName:{}.", taskGroup.getName()); + Collections.singletonList(taskGroup.getId()), log); + log.info("Create task group complete, taskGroupName:{}.", taskGroup.getName()); putMsg(result, Status.SUCCESS); } else { - logger.error("Create task group error, taskGroupName:{}.", taskGroup.getName()); + log.error("Create task group error, taskGroupName:{}.", taskGroup.getName()); putMsg(result, Status.CREATE_TASK_GROUP_ERROR); return result; } @@ -155,17 +154,17 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe return result; } if (checkDescriptionLength(description)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } if (name == null) { - logger.warn("Parameter name can ot be null."); + log.warn("Parameter name can ot be null."); putMsg(result, Status.NAME_NULL); return result; } if (groupSize <= 0) { - logger.warn("Parameter task group size is must bigger than 1."); + log.warn("Parameter task group size is must bigger than 1."); putMsg(result, Status.TASK_GROUP_SIZE_ERROR); return result; } @@ -175,13 +174,13 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe .ne(TaskGroup::getId, id)); if (exists > 0) { - logger.error("Task group with the same name already exists."); + log.error("Task group with the same name already exists."); putMsg(result, Status.TASK_GROUP_NAME_EXSIT); return result; } TaskGroup taskGroup = taskGroupMapper.selectById(id); if (taskGroup.getStatus() != Flag.YES.getCode()) { - logger.warn("Task group has been closed, taskGroupId:{}.", id); + log.warn("Task group has been closed, taskGroupId:{}.", id); putMsg(result, Status.TASK_GROUP_STATUS_ERROR); return result; } @@ -193,10 +192,10 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe } int i = taskGroupMapper.updateById(taskGroup); if (i > 0) { - logger.info("Update task group complete, taskGroupId:{}.", id); + log.info("Update task group complete, taskGroupId:{}.", id); putMsg(result, Status.SUCCESS); } else { - logger.error("Update task group error, taskGroupId:{}.", id); + log.error("Update task group error, taskGroupId:{}.", id); putMsg(result, Status.UPDATE_TASK_GROUP_ERROR); } return result; @@ -256,7 +255,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe Page page = new Page<>(pageNo, pageSize); PageInfo emptyPageInfo = new PageInfo<>(pageNo, pageSize); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TASK_GROUP, - loginUser.getId(), logger); + loginUser.getId(), log); if (ids.isEmpty()) { result.put(Constants.DATA_LIST, emptyPageInfo); putMsg(result, Status.SUCCESS); @@ -314,7 +313,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe Page page = new Page<>(pageNo, pageSize); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TASK_GROUP, - userId, logger); + userId, log); if (ids.isEmpty()) { result.put(Constants.DATA_LIST, pageInfo); putMsg(result, Status.SUCCESS); @@ -345,16 +344,16 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe } TaskGroup taskGroup = taskGroupMapper.selectById(id); if (taskGroup.getStatus() == Flag.NO.getCode()) { - logger.info("Task group has been closed, taskGroupId:{}.", id); + log.info("Task group has been closed, taskGroupId:{}.", id); putMsg(result, Status.TASK_GROUP_STATUS_CLOSED); return result; } taskGroup.setStatus(Flag.NO.getCode()); int update = taskGroupMapper.updateById(taskGroup); if (update > 0) - logger.info("Task group close complete, taskGroupId:{}.", id); + log.info("Task group close complete, taskGroupId:{}.", id); else - logger.error("Task group close error, taskGroupId:{}.", id); + log.error("Task group close error, taskGroupId:{}.", id); putMsg(result, Status.SUCCESS); return result; } @@ -378,7 +377,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe } TaskGroup taskGroup = taskGroupMapper.selectById(id); if (taskGroup.getStatus() == Flag.YES.getCode()) { - logger.info("Task group has been started, taskGroupId:{}.", id); + log.info("Task group has been started, taskGroupId:{}.", id); putMsg(result, Status.TASK_GROUP_STATUS_OPENED); return result; } @@ -386,9 +385,9 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe taskGroup.setUpdateTime(new Date(System.currentTimeMillis())); int update = taskGroupMapper.updateById(taskGroup); if (update > 0) - logger.info("Task group start complete, taskGroupId:{}.", id); + log.info("Task group start complete, taskGroupId:{}.", id); else - logger.error("Task group start error, taskGroupId:{}.", id); + log.error("Task group start error, taskGroupId:{}.", id); putMsg(result, Status.SUCCESS); return result; } @@ -423,7 +422,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe return result; } taskGroupQueueService.modifyPriority(queueId, priority); - logger.info("Modify task group queue priority complete, queueId:{}, priority:{}.", queueId, priority); + log.info("Modify task group queue priority complete, queueId:{}, priority:{}.", queueId, priority); putMsg(result, Status.SUCCESS); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java index 5738d29555..8ab4557635 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java @@ -61,8 +61,8 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -74,10 +74,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * task instance service impl */ @Service +@Slf4j public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInstanceService { - private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceImpl.class); - @Autowired ProjectMapper projectMapper; @@ -232,7 +231,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst // check whether the task instance can be found TaskInstance task = taskInstanceMapper.selectById(taskInstanceId); if (task == null) { - logger.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, + log.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstanceId); putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); return result; @@ -240,7 +239,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(task.getTaskCode()); if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) { - logger.error("Task definition can not be found, projectCode:{}, taskDefinitionCode:{}.", projectCode, + log.error("Task definition can not be found, projectCode:{}, taskDefinitionCode:{}.", projectCode, task.getTaskCode()); putMsg(result, Status.TASK_INSTANCE_NOT_FOUND, taskInstanceId); return result; @@ -248,7 +247,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst // check whether the task instance state type is failure or cancel if (!task.getState().isFailure() && !task.getState().isKill()) { - logger.warn("{} type task instance can not perform force success, projectCode:{}, taskInstanceId:{}.", + log.warn("{} type task instance can not perform force success, projectCode:{}, taskInstanceId:{}.", task.getState().getDesc(), projectCode, taskInstanceId); putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString()); return result; @@ -259,11 +258,11 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst int changedNum = taskInstanceMapper.updateById(task); if (changedNum > 0) { processService.forceProcessInstanceSuccessByTaskInstanceId(taskInstanceId); - logger.info("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", projectCode, + log.info("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", projectCode, taskInstanceId); putMsg(result, Status.SUCCESS); } else { - logger.error("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", + log.error("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", projectCode, taskInstanceId); putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR); } @@ -286,7 +285,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId); if (taskInstance == null) { - logger.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, + log.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstanceId); putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); return result; @@ -317,7 +316,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId); if (taskInstance == null) { - logger.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, + log.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstanceId); putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); return result; @@ -338,7 +337,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst projectService.checkProjectAndAuthThrowException(loginUser, project, FORCED_SUCCESS); TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId); if (taskInstance == null) { - logger.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, + log.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstanceId); } return taskInstance; @@ -354,7 +353,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId); if (taskInstance == null) { - logger.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, + log.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode, taskInstanceId); putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); return new TaskInstanceRemoveCacheResponse(result); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java index a0bfd599d4..c2d9be0d42 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java @@ -54,8 +54,8 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -67,10 +67,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * tenant service impl */ @Service +@Slf4j public class TenantServiceImpl extends BaseServiceImpl implements TenantService { - private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class); - @Autowired private TenantMapper tenantMapper; @@ -115,7 +114,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService private void updateTenantValid(Tenant existsTenant, Tenant updateTenant) throws ServiceException { // Check the exists tenant if (Objects.isNull(existsTenant)) { - logger.error("Tenant does not exist."); + log.error("Tenant does not exist."); throw new ServiceException(Status.TENANT_NOT_EXIST); } // Check the update tenant parameters @@ -153,7 +152,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService throw new ServiceException(Status.USER_NO_OPERATION_PERM); } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -166,7 +165,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService storageOperate.createTenantDirIfNotExists(tenantCode); } permissionPostHandle(AuthorizationType.TENANT, loginUser.getId(), Collections.singletonList(tenant.getId()), - logger); + log); result.put(Constants.DATA_LIST, tenant); putMsg(result, Status.SUCCESS); return result; @@ -187,7 +186,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService Result result = new Result<>(); PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TENANT, - loginUser.getId(), logger); + loginUser.getId(), log); if (ids.isEmpty()) { result.setData(pageInfo); putMsg(result, Status.SUCCESS); @@ -224,7 +223,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService throw new ServiceException(Status.USER_NO_OPERATION_PERM); } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -240,10 +239,10 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService } int update = tenantMapper.updateById(updateTenant); if (update > 0) { - logger.info("Tenant is updated and id is {}.", updateTenant.getId()); + log.info("Tenant is updated and id is {}.", updateTenant.getId()); putMsg(result, Status.SUCCESS); } else { - logger.error("Tenant update error, id:{}.", updateTenant.getId()); + log.error("Tenant update error, id:{}.", updateTenant.getId()); putMsg(result, Status.UPDATE_TENANT_ERROR); } return result; @@ -268,13 +267,13 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService Tenant tenant = tenantMapper.queryById(id); if (Objects.isNull(tenant)) { - logger.error("Tenant does not exist, userId:{}.", id); + log.error("Tenant does not exist, userId:{}.", id); throw new ServiceException(Status.TENANT_NOT_EXIST); } List processInstances = getProcessInstancesByTenant(tenant); if (CollectionUtils.isNotEmpty(processInstances)) { - logger.warn("Delete tenant failed, because there are {} executing process instances using it.", + log.warn("Delete tenant failed, because there are {} executing process instances using it.", processInstances.size()); throw new ServiceException(Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size()); } @@ -282,14 +281,14 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService List processDefinitions = processDefinitionMapper.queryDefinitionListByTenant(tenant.getId()); if (CollectionUtils.isNotEmpty(processDefinitions)) { - logger.warn("Delete tenant failed, because there are {} process definitions using it.", + log.warn("Delete tenant failed, because there are {} process definitions using it.", processDefinitions.size()); throw new ServiceException(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size()); } List userList = userMapper.queryUserListByTenant(tenant.getId()); if (CollectionUtils.isNotEmpty(userList)) { - logger.warn("Delete tenant failed, because there are {} users using it.", userList.size()); + log.warn("Delete tenant failed, because there are {} users using it.", userList.size()); throw new ServiceException(Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size()); } @@ -301,10 +300,10 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService int delete = tenantMapper.deleteById(id); if (delete > 0) { processInstanceMapper.updateProcessInstanceByTenantId(id, -1); - logger.info("Tenant is deleted and id is {}.", id); + log.info("Tenant is deleted and id is {}.", id); putMsg(result, Status.SUCCESS); } else { - logger.error("Tenant delete failed, tenantId:{}.", id); + log.error("Tenant delete failed, tenantId:{}.", id); putMsg(result, Status.DELETE_TENANT_BY_ID_ERROR); } @@ -327,7 +326,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService Map result = new HashMap<>(); Set ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TENANT, - loginUser.getId(), logger); + loginUser.getId(), log); if (ids.isEmpty()) { result.put(Constants.DATA_LIST, Collections.emptyList()); putMsg(result, Status.SUCCESS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java index fcee1608f8..798386c002 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java @@ -41,8 +41,8 @@ import java.util.Date; import java.util.List; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -54,10 +54,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * udf func service impl */ @Service +@Slf4j public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService { - private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceImpl.class); - @Autowired private ResourceMapper resourceMapper; @@ -101,13 +100,13 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic return result; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } // if resource upload startup if (!PropertyUtils.getResUploadStartupState()) { - logger.error("Storage does not start up, resource upload startup state: {}.", + log.error("Storage does not start up, resource upload startup state: {}.", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.HDFS_NOT_STARTUP); return result; @@ -115,7 +114,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic // verify udf func name exist if (checkUdfFuncNameExists(funcName)) { - logger.warn("Udf function with the same name already exists."); + log.warn("Udf function with the same name already exists."); putMsg(result, Status.UDF_FUNCTION_EXISTS); return result; } @@ -124,11 +123,11 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic try { existResource = storageOperate.exists(fullName); } catch (IOException e) { - logger.error("Check resource error: {}", fullName, e); + log.error("Check resource error: {}", fullName, e); } if (!existResource) { - logger.error("resource full name {} is not exist", fullName); + log.error("resource full name {} is not exist", fullName); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -155,9 +154,9 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic udf.setUpdateTime(now); udfFuncMapper.insert(udf); - logger.info("UDF function create complete, udfFuncName:{}.", udf.getFuncName()); + log.info("UDF function create complete, udfFuncName:{}.", udf.getFuncName()); putMsg(result, Status.SUCCESS); - permissionPostHandle(AuthorizationType.UDF, loginUser.getId(), Collections.singletonList(udf.getId()), logger); + permissionPostHandle(AuthorizationType.UDF, loginUser.getId(), Collections.singletonList(udf.getId()), log); return result; } @@ -188,7 +187,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic } UdfFunc udfFunc = udfFuncMapper.selectById(id); if (udfFunc == null) { - logger.error("Resource does not exist, udf func id:{}.", id); + log.error("Resource does not exist, udf func id:{}.", id); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -229,7 +228,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic return result; } if (checkDescriptionLength(desc)) { - logger.warn("Parameter description is too long."); + log.warn("Parameter description is too long."); putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR); return result; } @@ -237,7 +236,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId); if (udf == null) { - logger.error("UDF function does not exist, udfFuncId:{}.", udfFuncId); + log.error("UDF function does not exist, udfFuncId:{}.", udfFuncId); result.setCode(Status.UDF_FUNCTION_NOT_EXIST.getCode()); result.setMsg(Status.UDF_FUNCTION_NOT_EXIST.getMsg()); return result; @@ -245,7 +244,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic // if resource upload startup if (!PropertyUtils.getResUploadStartupState()) { - logger.error("Storage does not start up, resource upload startup state: {}.", + log.error("Storage does not start up, resource upload startup state: {}.", PropertyUtils.getResUploadStartupState()); putMsg(result, Status.HDFS_NOT_STARTUP); return result; @@ -254,7 +253,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic // verify udfFuncName is exist if (!funcName.equals(udf.getFuncName())) { if (checkUdfFuncNameExists(funcName)) { - logger.warn("Udf function exists, can not create again, udfFuncName:{}.", funcName); + log.warn("Udf function exists, can not create again, udfFuncName:{}.", funcName); result.setCode(Status.UDF_FUNCTION_EXISTS.getCode()); result.setMsg(Status.UDF_FUNCTION_EXISTS.getMsg()); return result; @@ -265,14 +264,14 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic try { doesResExist = storageOperate.exists(fullName); } catch (Exception e) { - logger.error("udf resource checking error", fullName); + log.error("udf resource checking error", fullName); result.setCode(Status.RESOURCE_NOT_EXIST.getCode()); result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg()); return result; } if (!doesResExist) { - logger.error("resource full name {} is not exist", fullName); + log.error("resource full name {} is not exist", fullName); result.setCode(Status.RESOURCE_NOT_EXIST.getCode()); result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg()); return result; @@ -294,7 +293,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic udf.setUpdateTime(now); udfFuncMapper.updateById(udf); - logger.info("UDF function update complete, udfFuncId:{}, udfFuncName:{}.", udfFuncId, funcName); + log.info("UDF function update complete, udfFuncId:{}, udfFuncName:{}.", udfFuncId, funcName); putMsg(result, Status.SUCCESS); return result; } @@ -337,7 +336,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic */ private IPage getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { Set udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, - loginUser.getId(), logger); + loginUser.getId(), log); Page page = new Page<>(pageNo, pageSize); if (udfFuncIds.isEmpty()) { return page; @@ -363,7 +362,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic return result; } Set udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF, - loginUser.getId(), logger); + loginUser.getId(), log); if (udfFuncIds.isEmpty()) { result.setData(Collections.emptyList()); putMsg(result, Status.SUCCESS); @@ -395,7 +394,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic } udfFuncMapper.deleteById(id); udfUserMapper.deleteByUdfFuncId(id); - logger.info("UDF function delete complete, udfFuncId:{}.", id); + log.info("UDF function delete complete, udfFuncId:{}.", id); putMsg(result, Status.SUCCESS); return result; } @@ -417,7 +416,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic } if (checkUdfFuncNameExists(name)) { - logger.warn("Udf function with the same already exists."); + log.warn("Udf function with the same already exists."); putMsg(result, Status.UDF_FUNCTION_EXISTS); } else { putMsg(result, Status.SUCCESS); diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java index e6f7bb169d..a444d11c99 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java @@ -30,8 +30,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -39,10 +39,9 @@ import org.springframework.stereotype.Service; * ui plugin service impl */ @Service +@Slf4j public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginService { - private static final Logger logger = LoggerFactory.getLogger(UiPluginServiceImpl.class); - @Autowired PluginDefineMapper pluginDefineMapper; @@ -50,14 +49,14 @@ public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginServ public Map queryUiPluginsByType(PluginType pluginType) { Map result = new HashMap<>(); if (!pluginType.getHasUi()) { - logger.warn("Plugin does not have UI."); + log.warn("Plugin does not have UI."); putMsg(result, Status.PLUGIN_NOT_A_UI_COMPONENT); return result; } List pluginDefines = pluginDefineMapper.queryByPluginType(pluginType.getDesc()); if (CollectionUtils.isEmpty(pluginDefines)) { - logger.warn("Query plugins result is null, check status of plugins."); + log.warn("Query plugins result is null, check status of plugins."); putMsg(result, Status.QUERY_PLUGINS_RESULT_IS_NULL); return result; } @@ -72,7 +71,7 @@ public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginServ Map result = new HashMap<>(); PluginDefine pluginDefine = pluginDefineMapper.queryDetailById(id); if (null == pluginDefine) { - logger.warn("Query plugins result is empty, pluginId:{}.", id); + log.warn("Query plugins result is empty, pluginId:{}.", id); putMsg(result, Status.QUERY_PLUGIN_DETAIL_RESULT_IS_NULL); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java index 54dc99bc71..a06c69b1ef 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java @@ -74,8 +74,8 @@ import java.util.Set; import java.util.TimeZone; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -87,10 +87,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page; * users service impl */ @Service +@Slf4j public class UsersServiceImpl extends BaseServiceImpl implements UsersService { - private static final Logger logger = LoggerFactory.getLogger(UsersServiceImpl.class); - @Autowired private AccessTokenMapper accessTokenMapper; @@ -173,7 +172,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } if (!checkTenantExists(tenantId)) { - logger.warn("Tenant does not exist, tenantId:{}.", tenantId); + log.warn("Tenant does not exist, tenantId:{}.", tenantId); putMsg(result, Status.TENANT_NOT_EXIST); return result; } @@ -186,7 +185,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { storageOperate.createTenantDirIfNotExists(tenant.getTenantCode()); } - logger.info("User is created and id is {}.", user.getId()); + log.info("User is created and id is {}.", user.getId()); result.put(Constants.DATA_LIST, user); putMsg(result, Status.SUCCESS); return result; @@ -340,7 +339,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { return result; } if (!isAdmin(loginUser)) { - logger.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(), + log.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(), loginUser.getUserName()); putMsg(result, Status.USER_NO_OPERATION_PERM); return result; @@ -392,27 +391,27 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { return result; } if (check(result, !canOperator(loginUser, userId), Status.USER_NO_OPERATION_PERM)) { - logger.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(), + log.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(), loginUser.getUserName()); return result; } User user = userMapper.selectById(userId); if (user == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); putMsg(result, Status.USER_NOT_EXIST, userId); return result; } if (StringUtils.isNotEmpty(userName)) { if (!CheckUtils.checkUserName(userName)) { - logger.warn("Parameter userName check failed."); + log.warn("Parameter userName check failed."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName); return result; } User tempUser = userMapper.queryByUserNameAccurately(userName); if (tempUser != null && tempUser.getId() != userId) { - logger.warn("User name already exists, userName:{}.", tempUser.getUserName()); + log.warn("User name already exists, userName:{}.", tempUser.getUserName()); putMsg(result, Status.USER_NAME_EXIST); return result; } @@ -421,7 +420,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { if (StringUtils.isNotEmpty(userPassword)) { if (!CheckUtils.checkPasswordLength(userPassword)) { - logger.warn("Parameter userPassword check failed."); + log.warn("Parameter userPassword check failed."); putMsg(result, Status.USER_PASSWORD_LENGTH_ERROR); return result; } @@ -430,7 +429,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { if (StringUtils.isNotEmpty(email)) { if (!CheckUtils.checkEmail(email)) { - logger.warn("Parameter email check failed."); + log.warn("Parameter email check failed."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email); return result; } @@ -438,13 +437,13 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) { - logger.warn("Parameter phone check failed."); + log.warn("Parameter phone check failed."); putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone); return result; } if (state == 0 && user.getState() != state && Objects.equals(loginUser.getId(), user.getId())) { - logger.warn("Not allow to disable your own account, userId:{}, userName:{}.", user.getId(), + log.warn("Not allow to disable your own account, userId:{}, userName:{}.", user.getId(), user.getUserName()); putMsg(result, Status.NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT); return result; @@ -452,7 +451,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { if (StringUtils.isNotEmpty(timeZone)) { if (!CheckUtils.checkTimeZone(timeZone)) { - logger.warn("Parameter time zone is illegal."); + log.warn("Parameter time zone is illegal."); putMsg(result, Status.TIME_ZONE_ILLEGAL, timeZone); return result; } @@ -468,10 +467,10 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // updateProcessInstance user int update = userMapper.updateById(user); if (update > 0) { - logger.info("User is updated and id is :{}.", userId); + log.info("User is updated and id is :{}.", userId); putMsg(result, Status.SUCCESS); } else { - logger.error("User update error, userId:{}.", userId); + log.error("User update error, userId:{}.", userId); putMsg(result, Status.UPDATE_USER_ERROR); } @@ -496,7 +495,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } // only admin can operate if (!isAdmin(loginUser)) { - logger.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(), + log.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(), loginUser.getUserName()); putMsg(result, Status.USER_NO_OPERATION_PERM, id); return result; @@ -504,7 +503,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // check exist User tempUser = userMapper.selectById(id); if (tempUser == null) { - logger.error("User does not exist, userId:{}.", id); + log.error("User does not exist, userId:{}.", id); putMsg(result, Status.USER_NOT_EXIST, id); return result; } @@ -513,7 +512,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { if (CollectionUtils.isNotEmpty(projects)) { String projectNames = projects.stream().map(Project::getName).collect(Collectors.joining(",")); putMsg(result, Status.TRANSFORM_PROJECT_OWNERSHIP, projectNames); - logger.warn("Please transfer the project ownership before deleting the user, userId:{}, projects:{}.", id, + log.warn("Please transfer the project ownership before deleting the user, userId:{}, projects:{}.", id, projectNames); return result; } @@ -523,11 +522,11 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { accessTokenMapper.deleteAccessTokenByUserId(id); if (userMapper.deleteById(id) > 0) { - logger.info("User is deleted and id is :{}.", id); + log.info("User is deleted and id is :{}.", id); putMsg(result, Status.SUCCESS); return result; } else { - logger.error("User delete error, userId:{}.", id); + log.error("User delete error, userId:{}.", id); putMsg(result, Status.DELETE_USER_BY_ID_ERROR); return result; } @@ -645,13 +644,13 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // check exist User tempUser = userMapper.selectById(userId); if (tempUser == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); putMsg(result, Status.USER_NOT_EXIST, userId); return result; } if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) { - logger.warn("Parameter projectIds is empty."); + log.warn("Parameter projectIds is empty."); return result; } Arrays.stream(projectIds.split(",")).distinct().forEach(projectId -> { @@ -694,7 +693,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // 1. check if user is existed User tempUser = this.userMapper.selectById(userId); if (tempUser == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); this.putMsg(result, Status.USER_NOT_EXIST, userId); return result; } @@ -702,14 +701,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // 2. check if project is existed Project project = this.projectMapper.queryByCode(projectCode); if (project == null) { - logger.error("Project does not exist, projectCode:{}.", projectCode); + log.error("Project does not exist, projectCode:{}.", projectCode); this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode); return result; } // 3. only project owner can operate if (!this.canOperator(loginUser, project.getUserId())) { - logger.warn("User does not have permission for project, userId:{}, userName:{}, projectCode:{}.", + log.warn("User does not have permission for project, userId:{}, userName:{}, projectCode:{}.", loginUser.getId(), loginUser.getUserName(), projectCode); this.putMsg(result, Status.USER_NO_OPERATION_PERM); return result; @@ -727,7 +726,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { projectUser.setUpdateTime(today); this.projectUserMapper.insert(projectUser); } - logger.info("User is granted permission for projects, userId:{}, projectCode:{}.", userId, projectCode); + log.info("User is granted permission for projects, userId:{}, projectCode:{}.", userId, projectCode); this.putMsg(result, Status.SUCCESS); return result; } @@ -751,14 +750,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } // 1. only admin can operate if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - logger.warn("Only admin can revoke the project permission."); + log.warn("Only admin can revoke the project permission."); return result; } // 2. check if user is existed User user = this.userMapper.selectById(userId); if (user == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); this.putMsg(result, Status.USER_NOT_EXIST, userId); return result; } @@ -766,14 +765,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // 3. check if project is existed Project project = this.projectMapper.queryByCode(projectCode); if (project == null) { - logger.error("Project does not exist, projectCode:{}.", projectCode); + log.error("Project does not exist, projectCode:{}.", projectCode); this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode); return result; } // 4. delete th relationship between project and user this.projectUserMapper.deleteProjectRelation(project.getId(), user.getId()); - logger.info("User is revoked permission for projects, userId:{}, projectCode:{}.", userId, projectCode); + log.info("User is revoked permission for projects, userId:{}, projectCode:{}.", userId, projectCode); this.putMsg(result, Status.SUCCESS); return result; } @@ -797,7 +796,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } User user = userMapper.selectById(userId); if (user == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); putMsg(result, Status.USER_NOT_EXIST, userId); return result; } @@ -837,7 +836,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { resourceIdSet.retainAll(oldAuthorizedResIds); if (CollectionUtils.isNotEmpty(resourceIdSet)) { for (Integer resId : resourceIdSet) { - logger.error("Resource id:{} is used by process definition {}", resId, + log.error("Resource id:{} is used by process definition {}", resId, resourceProcessMap.get(resId)); } putMsg(result, Status.RESOURCE_IS_USED); @@ -849,14 +848,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { resourceUserMapper.deleteResourceUser(userId, 0); if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) { - logger.warn("Parameter resourceIds is empty."); + log.warn("Parameter resourceIds is empty."); return result; } for (int resourceIdValue : needAuthorizeResIds) { Resource resource = resourceMapper.selectById(resourceIdValue); if (resource == null) { - logger.error("Resource does not exist, resourceId:{}.", resourceIdValue); + log.error("Resource does not exist, resourceId:{}.", resourceIdValue); putMsg(result, Status.RESOURCE_NOT_EXIST); return result; } @@ -877,7 +876,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } - logger.info("User is granted permission for resources, userId:{}, resourceIds:{}.", user.getId(), + log.info("User is granted permission for resources, userId:{}, resourceIds:{}.", user.getId(), needAuthorizeResIds); putMsg(result, Status.SUCCESS); @@ -904,7 +903,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } User user = userMapper.selectById(userId); if (user == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); putMsg(result, Status.USER_NOT_EXIST, userId); return result; } @@ -912,7 +911,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { udfUserMapper.deleteByUserId(userId); if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) { - logger.warn("Parameter udfIds is empty."); + log.warn("Parameter udfIds is empty."); return result; } @@ -929,7 +928,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { udfUserMapper.insert(udfUser); } - logger.info("User is granted permission for UDF, userName:{}.", user.getUserName()); + log.info("User is granted permission for UDF, userName:{}.", user.getUserName()); putMsg(result, Status.SUCCESS); @@ -955,14 +954,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } // only admin can operate if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - logger.warn("Only admin can grant namespaces."); + log.warn("Only admin can grant namespaces."); return result; } // check exist User tempUser = userMapper.selectById(userId); if (tempUser == null) { - logger.error("User does not exist, userId:{}.", userId); + log.error("User does not exist, userId:{}.", userId); putMsg(result, Status.USER_NOT_EXIST, userId); return result; } @@ -982,7 +981,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } } - logger.info("User is granted permission for namespace, userId:{}.", tempUser.getId()); + log.info("User is granted permission for namespace, userId:{}.", tempUser.getId()); putMsg(result, Status.SUCCESS); @@ -1098,7 +1097,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } // only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - logger.warn("Only admin can query all general users."); + log.warn("Only admin can query all general users."); return result; } @@ -1167,7 +1166,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } // only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - logger.warn("Only admin can deauthorize user."); + log.warn("Only admin can deauthorize user."); return result; } @@ -1208,7 +1207,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } // only admin can operate if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) { - logger.warn("Only admin can authorize user."); + log.warn("Only admin can authorize user."); return result; } List userList = userMapper.queryUserListByAlertGroupId(alertGroupId); @@ -1233,16 +1232,16 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { String msg = null; if (!CheckUtils.checkUserName(userName)) { - logger.warn("Parameter userName check failed."); + log.warn("Parameter userName check failed."); msg = userName; } else if (!CheckUtils.checkPassword(password)) { - logger.warn("Parameter password check failed."); + log.warn("Parameter password check failed."); msg = password; } else if (!CheckUtils.checkEmail(email)) { - logger.warn("Parameter email check failed."); + log.warn("Parameter email check failed."); msg = email; } else if (!CheckUtils.checkPhone(phone)) { - logger.warn("Parameter phone check failed."); + log.warn("Parameter phone check failed."); msg = phone; } @@ -1268,7 +1267,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { // verify whether exist if (!storageOperate.exists( String.format(Constants.FORMAT_S_S, srcBasePath, component.getFullName()))) { - logger.error("Resource file: {} does not exist, copy error.", component.getFullName()); + log.error("Resource file: {} does not exist, copy error.", component.getFullName()); throw new ServiceException(Status.RESOURCE_NOT_EXIST); } @@ -1293,7 +1292,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService { } } catch (IOException e) { - logger.error("copy the resources failed,the error message is {}", e.getMessage()); + log.error("copy the resources failed,the error message is {}", e.getMessage()); } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java index 4d90d05552..35a28eccf8 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java @@ -52,8 +52,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.util.CollectionUtils; @@ -61,11 +61,10 @@ import org.springframework.util.CollectionUtils; /** * work flow lineage service impl */ +@Slf4j @Service public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkFlowLineageService { - private static final Logger logger = LoggerFactory.getLogger(WorkFlowLineageServiceImpl.class); - @Autowired private WorkFlowLineageMapper workFlowLineageMapper; @@ -83,7 +82,7 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF Map result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); if (project == null) { - logger.error("Project does not exist, projectCode:{}.", projectCode); + log.error("Project does not exist, projectCode:{}.", projectCode); putMsg(result, Status.PROJECT_NOT_FOUND, projectCode); return result; } @@ -99,7 +98,7 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF Map result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); if (project == null) { - logger.error("Project does not exist, projectCode:{}.", projectCode); + log.error("Project does not exist, projectCode:{}.", projectCode); putMsg(result, Status.PROJECT_NOT_FOUND, projectCode); return result; } @@ -172,7 +171,7 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF Map result = new HashMap<>(); Project project = projectMapper.queryByCode(projectCode); if (project == null) { - logger.error("Project does not exist, projectCode:{}.", projectCode); + log.error("Project does not exist, projectCode:{}.", projectCode); putMsg(result, Status.PROJECT_NOT_FOUND, projectCode); return result; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java index daf8c06338..adadf66529 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java @@ -55,8 +55,8 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -67,10 +67,9 @@ import com.facebook.presto.jdbc.internal.guava.base.Strings; * worker group service impl */ @Service +@Slf4j public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGroupService { - private static final Logger logger = LoggerFactory.getLogger(WorkerGroupServiceImpl.class); - @Autowired private WorkerGroupMapper workerGroupMapper; @@ -108,7 +107,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro return result; } if (StringUtils.isEmpty(name)) { - logger.warn("Parameter name can ot be null."); + log.warn("Parameter name can ot be null."); putMsg(result, Status.NAME_NULL); return result; } @@ -127,18 +126,18 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro workerGroup.setDescription(description); if (checkWorkerGroupNameExists(workerGroup)) { - logger.warn("Worker group with the same name already exists, name:{}.", workerGroup.getName()); + log.warn("Worker group with the same name already exists, name:{}.", workerGroup.getName()); putMsg(result, Status.NAME_EXIST, workerGroup.getName()); return result; } String invalidAddr = checkWorkerGroupAddrList(workerGroup); if (invalidAddr != null) { - logger.warn("Worker group address is invalid, invalidAddr:{}.", invalidAddr); + log.warn("Worker group address is invalid, invalidAddr:{}.", invalidAddr); putMsg(result, Status.WORKER_ADDRESS_INVALID, invalidAddr); return result; } handleDefaultWorkGroup(workerGroupMapper, workerGroup, loginUser, otherParamsJson); - logger.info("Worker group save complete, workerGroupName:{}.", workerGroup.getName()); + log.info("Worker group save complete, workerGroupName:{}.", workerGroup.getName()); putMsg(result, Status.SUCCESS); return result; } @@ -150,7 +149,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro } else { workerGroupMapper.insert(workerGroup); permissionPostHandle(AuthorizationType.WORKER_GROUP, loginUser.getId(), - Collections.singletonList(workerGroup.getId()), logger); + Collections.singletonList(workerGroup.getId()), log); } } @@ -219,7 +218,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro workerGroups = getWorkerGroups(null); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), log); workerGroups = getWorkerGroups(ids.isEmpty() ? Collections.emptyList() : new ArrayList<>(ids)); } List resultDataList = new ArrayList<>(); @@ -269,7 +268,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro workerGroups = getWorkerGroups(null); } else { Set ids = resourcePermissionCheckService - .userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), logger); + .userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), log); workerGroups = getWorkerGroups(ids.isEmpty() ? Collections.emptyList() : new ArrayList<>(ids)); } List availableWorkerGroupList = workerGroups.stream() @@ -326,7 +325,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro } WorkerGroup workerGroup = workerGroupMapper.selectById(id); if (workerGroup == null) { - logger.error("Worker group does not exist, workerGroupId:{}.", id); + log.error("Worker group does not exist, workerGroupId:{}.", id); putMsg(result, Status.DELETE_WORKER_GROUP_NOT_EXIST); return result; } @@ -336,7 +335,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro if (CollectionUtils.isNotEmpty(processInstances)) { List processInstanceIds = processInstances.stream().map(ProcessInstance::getId).collect(Collectors.toList()); - logger.warn( + log.warn( "Delete worker group failed because there are {} processInstances are using it, processInstanceIds:{}.", processInstances.size(), processInstanceIds); putMsg(result, Status.DELETE_WORKER_GROUP_BY_ID_FAIL, processInstances.size()); @@ -351,7 +350,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro } workerGroupMapper.deleteById(id); processInstanceMapper.updateProcessInstanceByWorkerGroupName(workerGroup.getName(), ""); - logger.info("Delete worker group complete, workerGroupName:{}.", workerGroup.getName()); + log.info("Delete worker group complete, workerGroupName:{}.", workerGroup.getName()); putMsg(result, Status.SUCCESS); return result; } @@ -387,7 +386,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro if (processInstance != null) { return processInstance.getWorkerGroup(); } - logger.info("task : {} will use default worker group", taskInstance.getId()); + log.info("task : {} will use default worker group", taskInstance.getId()); return Constants.DEFAULT_WORKER_GROUP; } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java index 94771a4191..652e40f168 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java @@ -26,8 +26,8 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.core.io.Resource; import org.springframework.core.io.UrlResource; import org.springframework.web.multipart.MultipartFile; @@ -35,10 +35,9 @@ import org.springframework.web.multipart.MultipartFile; /** * file utils */ +@Slf4j public class FileUtils { - private static final Logger logger = LoggerFactory.getLogger(FileUtils.class); - /** * copy source InputStream to target file * @param file @@ -48,7 +47,7 @@ public class FileUtils { try { org.apache.commons.io.FileUtils.copyInputStreamToFile(file.getInputStream(), new File(destFilename)); } catch (IOException e) { - logger.error("failed to copy file , {} is empty file", file.getOriginalFilename(), e); + log.error("failed to copy file , {} is empty file", file.getOriginalFilename(), e); } } @@ -66,7 +65,7 @@ public class FileUtils { if (resource.exists() || resource.isReadable()) { return resource; } else { - logger.error("File can not be read, fileName:{}", filename); + log.error("File can not be read, fileName:{}", filename); } return null; } @@ -80,7 +79,7 @@ public class FileUtils { try (InputStream inputStream = file.getInputStream()) { return IOUtils.toString(inputStream, StandardCharsets.UTF_8); } catch (IOException e) { - logger.error("file convert to string failed: {}", file.getName()); + log.error("file convert to string failed: {}", file.getName()); } return ""; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java index 7b3df72e9d..e1c6c12cf3 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java @@ -32,8 +32,7 @@ import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * analysis of DAG @@ -41,10 +40,9 @@ import org.slf4j.LoggerFactory; * NodeInfo:node description information * EdgeInfo: edge description information */ +@Slf4j public class DAG { - private static final Logger logger = LoggerFactory.getLogger(DAG.class); - private final ReadWriteLock lock = new ReentrantReadWriteLock(); /** @@ -123,7 +121,7 @@ public class DAG { try { // Whether an edge can be successfully added(fromNode -> toNode) if (!isLegalAddEdge(fromNode, toNode, createNode)) { - logger.error("serious error: add edge({} -> {}) is invalid, cause cycle!", fromNode, toNode); + log.error("serious error: add edge({} -> {}) is invalid, cause cycle!", fromNode, toNode); return false; } @@ -381,13 +379,13 @@ public class DAG { */ private boolean isLegalAddEdge(Node fromNode, Node toNode, boolean createNode) { if (fromNode.equals(toNode)) { - logger.error("edge fromNode({}) can't equals toNode({})", fromNode, toNode); + log.error("edge fromNode({}) can't equals toNode({})", fromNode, toNode); return false; } if (!createNode) { if (!containsNode(fromNode) || !containsNode(toNode)) { - logger.error("edge fromNode({}) or toNode({}) is not in vertices map", fromNode, toNode); + log.error("edge fromNode({}) or toNode({}) is not in vertices map", fromNode, toNode); return false; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java index 913f4500e6..6477445c6e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java @@ -28,8 +28,7 @@ import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * A base class for running a Unix command. @@ -38,10 +37,9 @@ import org.slf4j.LoggerFactory; * df. It also offers facilities to gate commands by * time-intervals. */ +@Slf4j public abstract class AbstractShell { - private static final Logger logger = LoggerFactory.getLogger(AbstractShell.class); - /** * Time after which the executing script would be timedout */ @@ -168,7 +166,7 @@ public abstract class AbstractShell { line = errReader.readLine(); } } catch (IOException ioe) { - logger.warn("Error reading the error stream", ioe); + log.warn("Error reading the error stream", ioe); } } }; @@ -179,7 +177,7 @@ public abstract class AbstractShell { try { parseExecResult(inReader); } catch (IOException ioe) { - logger.warn("Error reading the in stream", ioe); + log.warn("Error reading the in stream", ioe); } super.run(); } @@ -188,7 +186,7 @@ public abstract class AbstractShell { errThread.start(); inThread.start(); } catch (IllegalStateException ise) { - logger.warn("Illegal while starting the error and in thread", ise); + log.warn("Illegal while starting the error and in thread", ise); } try { // parse the output @@ -198,7 +196,7 @@ public abstract class AbstractShell { errThread.join(); inThread.join(); } catch (InterruptedException ie) { - logger.warn("Interrupted while reading the error and in stream", ie); + log.warn("Interrupted while reading the error and in stream", ie); } completed.compareAndSet(false, true); // the timeout thread handling @@ -216,7 +214,7 @@ public abstract class AbstractShell { try { inReader.close(); } catch (IOException ioe) { - logger.warn("Error while closing the input stream", ioe); + log.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); @@ -224,7 +222,7 @@ public abstract class AbstractShell { try { errReader.close(); } catch (IOException ioe) { - logger.warn("Error while closing the error stream", ioe); + log.warn("Error while closing the error stream", ioe); } ProcessContainer.removeProcess(process); process.destroy(); @@ -347,11 +345,11 @@ public abstract class AbstractShell { try { entry.getValue().destroy(); } catch (Exception e) { - logger.error("Destroy All Processes error", e); + log.error("Destroy All Processes error", e); } } - logger.info("close " + set.size() + " executing process tasks"); + log.info("close " + set.size() + " executing process tasks"); } } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java index f73c40b960..3b66552274 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java @@ -23,17 +23,14 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import lombok.experimental.UtilityClass; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.util.concurrent.ThreadFactoryBuilder; @UtilityClass +@Slf4j public class ThreadUtils { - private static final Logger logger = LoggerFactory.getLogger(ThreadUtils.class); - /** * Wrapper over newDaemonFixedThreadExecutor. * @@ -62,7 +59,7 @@ public class ThreadUtils { Thread.sleep(millis); } catch (final InterruptedException interruptedException) { Thread.currentThread().interrupt(); - logger.error("Current thread sleep error", interruptedException); + log.error("Current thread sleep error", interruptedException); } } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java index f0cd8f2fe0..1c1f464c1d 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java @@ -20,13 +20,11 @@ package org.apache.dolphinscheduler.common.utils; import java.util.Arrays; import java.util.Objects; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class ConnectionUtils { - public static final Logger logger = LoggerFactory.getLogger(ConnectionUtils.class); - private ConnectionUtils() { throw new UnsupportedOperationException("Construct ConnectionUtils"); } @@ -46,7 +44,7 @@ public class ConnectionUtils { try { resource.close(); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } }); } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java index 5eaccfb831..de16f0e252 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java @@ -35,9 +35,9 @@ import java.util.TimeZone; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class DateUtils { static final long C0 = 1L; @@ -48,7 +48,6 @@ public final class DateUtils { static final long C5 = C4 * 60L; static final long C6 = C5 * 24L; - private static final Logger logger = LoggerFactory.getLogger(DateUtils.class); private static final DateTimeFormatter YYYY_MM_DD_HH_MM_SS = DateTimeFormatter.ofPattern(DateConstants.YYYY_MM_DD_HH_MM_SS); @@ -218,7 +217,7 @@ public final class DateUtils { } return localDateTime2Date(ldt, ZoneId.of(timezone)); } catch (Exception e) { - logger.error("error while parse date:" + date, e); + log.error("error while parse date:" + date, e); } return null; } @@ -357,7 +356,7 @@ public final class DateUtils { end = new Date(); } if (start.after(end)) { - logger.warn("start Time {} is later than end Time {}", start, end); + log.warn("start Time {} is later than end Time {}", start, end); return null; } return format2Duration(differMs(start, end)); @@ -721,7 +720,7 @@ public final class DateUtils { LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format)); return localDateTime2Date(ldt); } catch (Exception e) { - logger.error("error while parse date:" + date, e); + log.error("error while parse date:" + date, e); } return null; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java index 3514117d82..36f075e00e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java @@ -37,16 +37,14 @@ import java.nio.file.NoSuchFileException; import java.util.zip.CRC32; import java.util.zip.CheckedInputStream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * file utils */ +@Slf4j public class FileUtils { - public static final Logger logger = LoggerFactory.getLogger(FileUtils.class); - public static final String DATA_BASEDIR = PropertyUtils.getString(DATA_BASEDIR_PATH, "/tmp/dolphinscheduler"); public static final String APPINFO_PATH = "appInfo.log"; @@ -160,7 +158,7 @@ public class FileUtils { // create work dir org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile); String mkdirLog = "create dir success " + execLocalPath; - logger.info(mkdirLog); + log.info(mkdirLog); } /** @@ -175,13 +173,13 @@ public class FileUtils { try { File distFile = new File(filePath); if (!distFile.getParentFile().exists() && !distFile.getParentFile().mkdirs()) { - logger.error("mkdir parent failed"); + log.error("mkdir parent failed"); return false; } fos = new FileOutputStream(filePath); IOUtils.write(content, fos, StandardCharsets.UTF_8); } catch (IOException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); return false; } finally { IOUtils.closeQuietly(fos); @@ -241,7 +239,7 @@ public class FileUtils { } return output.toString(UTF_8); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e); } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java index 17d6fdfb55..b9a7653063 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java @@ -48,16 +48,14 @@ import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * http utils */ +@Slf4j public class HttpUtils { - public static final Logger logger = LoggerFactory.getLogger(HttpUtils.class); - private HttpUtils() { throw new UnsupportedOperationException("Construct HttpUtils"); } @@ -106,9 +104,9 @@ public class HttpUtils { ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS); ctx.init(null, new TrustManager[]{xtm}, null); } catch (NoSuchAlgorithmException e) { - logger.error("SSLContext init with NoSuchAlgorithmException", e); + log.error("SSLContext init with NoSuchAlgorithmException", e); } catch (KeyManagementException e) { - logger.error("SSLContext init with KeyManagementException", e); + log.error("SSLContext init with KeyManagementException", e); } socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE); /** set timeout、request time、socket timeout */ @@ -149,7 +147,7 @@ public class HttpUtils { */ public static String getResponseContentString(HttpGet httpget, CloseableHttpClient httpClient) { if (Objects.isNull(httpget) || Objects.isNull(httpClient)) { - logger.error("HttpGet or HttpClient parameter is null"); + log.error("HttpGet or HttpClient parameter is null"); return null; } String responseContent = null; @@ -162,13 +160,13 @@ public class HttpUtils { if (entity != null) { responseContent = EntityUtils.toString(entity, Constants.UTF_8); } else { - logger.warn("http entity is null"); + log.warn("http entity is null"); } } else { - logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode()); + log.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode()); } } catch (IOException ioe) { - logger.error(ioe.getMessage(), ioe); + log.error(ioe.getMessage(), ioe); } finally { try { if (response != null) { @@ -176,7 +174,7 @@ public class HttpUtils { response.close(); } } catch (IOException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } if (!httpget.isAborted()) { httpget.releaseConnection(); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index 55d548ca23..5e216ff27e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -39,8 +39,7 @@ import java.util.TimeZone; import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; @@ -65,12 +64,11 @@ import com.google.common.base.Strings; /** * json utils */ +@Slf4j public class JSONUtils { - private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class); - static { - logger.info("init timezone: {}", TimeZone.getDefault()); + log.info("init timezone: {}", TimeZone.getDefault()); } private static final ObjectMapper objectMapper = JsonMapper.builder() @@ -117,7 +115,7 @@ public class JSONUtils { ObjectWriter writer = objectMapper.writer(feature); return writer.writeValueAsString(object); } catch (Exception e) { - logger.error("object to json exception!", e); + log.error("object to json exception!", e); } return null; @@ -145,7 +143,7 @@ public class JSONUtils { try { return objectMapper.readValue(json, clazz); } catch (Exception e) { - logger.error("Parse object exception, jsonStr: {}, class: {}", json, clazz, e); + log.error("Parse object exception, jsonStr: {}, class: {}", json, clazz, e); } return null; } @@ -183,7 +181,7 @@ public class JSONUtils { CollectionType listType = objectMapper.getTypeFactory().constructCollectionType(ArrayList.class, clazz); return objectMapper.readValue(json, listType); } catch (Exception e) { - logger.error("parse list exception!", e); + log.error("parse list exception!", e); } return Collections.emptyList(); @@ -205,7 +203,7 @@ public class JSONUtils { objectMapper.readTree(json); return true; } catch (IOException e) { - logger.error("check json object valid exception!", e); + log.error("check json object valid exception!", e); } return false; @@ -261,7 +259,7 @@ public class JSONUtils { return objectMapper.readValue(json, new TypeReference>() { }); } catch (Exception e) { - logger.error("json to map exception!", e); + log.error("json to map exception!", e); } return Collections.emptyMap(); @@ -302,7 +300,7 @@ public class JSONUtils { try { return objectMapper.readValue(json, type); } catch (Exception e) { - logger.error("json to map exception!", e); + log.error("json to map exception!", e); } return null; @@ -345,7 +343,7 @@ public class JSONUtils { try { json = toJsonString(obj); } catch (Exception e) { - logger.error("json serialize exception.", e); + log.error("json serialize exception.", e); } return json.getBytes(UTF_8); diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java index 891655428a..fbcc3a9f57 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java @@ -40,16 +40,14 @@ import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * kerberos http client */ +@Slf4j public class KerberosHttpClient { - public static final Logger logger = LoggerFactory.getLogger(KerberosHttpClient.class); - private String principal; private String keyTabLocation; @@ -92,7 +90,7 @@ public class KerberosHttpClient { } public String get(final String url, final String userId) { - logger.info("Calling KerberosHttpClient {} {} {}", this.principal, this.keyTabLocation, url); + log.info("Calling KerberosHttpClient {} {} {}", this.principal, this.keyTabLocation, url); Configuration config = new Configuration() { @SuppressWarnings("serial") @@ -128,7 +126,7 @@ public class KerberosHttpClient { return HttpUtils.getResponseContentString(httpget, httpClient); }); } catch (LoginException le) { - logger.error("Kerberos authentication failed ", le); + log.error("Kerberos authentication failed ", le); } return null; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java index e1d39121b2..9445e71d89 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java @@ -36,18 +36,17 @@ import java.util.List; import java.util.Objects; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * NetUtils */ +@Slf4j public class NetUtils { private static final String NETWORK_PRIORITY_DEFAULT = "default"; private static final String NETWORK_PRIORITY_INNER = "inner"; private static final String NETWORK_PRIORITY_OUTER = "outer"; - private static final Logger logger = LoggerFactory.getLogger(NetUtils.class); private static InetAddress LOCAL_ADDRESS = null; private static volatile String HOST_ADDRESS; @@ -134,7 +133,7 @@ public class NetUtils { return LOCAL_ADDRESS; } } catch (IOException e) { - logger.warn("test address id reachable io exception", e); + log.warn("test address id reachable io exception", e); } } } @@ -142,7 +141,7 @@ public class NetUtils { localAddress = InetAddress.getLocalHost(); } catch (UnknownHostException e) { - logger.warn("InetAddress get LocalHost exception", e); + log.warn("InetAddress get LocalHost exception", e); } Optional addressOp = toValidAddress(localAddress); if (addressOp.isPresent()) { @@ -171,7 +170,7 @@ public class NetUtils { try { return InetAddress.getByName(addr.substring(0, i) + '%' + address.getScopeId()); } catch (UnknownHostException e) { - logger.debug("Unknown IPV6 address: ", e); + log.debug("Unknown IPV6 address: ", e); } } return address; @@ -210,7 +209,7 @@ public class NetUtils { try { validNetworkInterfaces = getValidNetworkInterfaces(); } catch (SocketException e) { - logger.warn("ValidNetworkInterfaces exception", e); + log.warn("ValidNetworkInterfaces exception", e); } NetworkInterface result = null; @@ -279,7 +278,7 @@ public class NetUtils { } else if (NETWORK_PRIORITY_OUTER.equalsIgnoreCase(networkPriority)) { return findOuterAddress(validNetworkInterfaces); } else { - logger.error("There is no matching network card acquisition policy!"); + log.error("There is no matching network card acquisition policy!"); return null; } } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java index 59f9adbdec..e95886ba48 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java @@ -45,16 +45,14 @@ import java.util.List; import java.util.StringTokenizer; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * os utils */ +@Slf4j public class OSUtils { - private static final Logger logger = LoggerFactory.getLogger(OSUtils.class); - private static final SystemInfo SI = new SystemInfo(); public static final String TWO_DECIMAL = "0.00"; @@ -138,7 +136,7 @@ public class OSUtils { OperatingSystemMXBean osBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class); loadAverage = osBean.getSystemLoadAverage(); } catch (Exception e) { - logger.error("get operation system load average exception, try another method ", e); + log.error("get operation system load average exception, try another method ", e); loadAverage = hal.getProcessor().getSystemLoadAverage(1)[0]; if (Double.isNaN(loadAverage)) { return NEGATIVE_ONE; @@ -185,7 +183,7 @@ public class OSUtils { return getUserListFromLinux(); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } return Collections.emptyList(); @@ -283,7 +281,7 @@ public class OSUtils { } catch (Exception e) { // because ShellExecutor method throws exception to the linux return status is not 0 // not exist user return status is 1 - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } return false; } @@ -297,7 +295,7 @@ public class OSUtils { // if not exists this user, then create if (!getUserList().contains(userName)) { boolean isSuccess = createUser(userName); - logger.info("create user {} {}", userName, isSuccess ? "success" : "fail"); + log.info("create user {} {}", userName, isSuccess ? "success" : "fail"); } } @@ -312,7 +310,7 @@ public class OSUtils { String userGroup = getGroup(); if (StringUtils.isEmpty(userGroup)) { String errorLog = String.format("%s group does not exist for this operating system.", userGroup); - logger.error(errorLog); + log.error(errorLog); return false; } if (SystemUtils.IS_OS_MAC) { @@ -324,7 +322,7 @@ public class OSUtils { } return true; } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } return false; @@ -338,9 +336,9 @@ public class OSUtils { * @throws IOException in case of an I/O error */ private static void createLinuxUser(String userName, String userGroup) throws IOException { - logger.info("create linux os user: {}", userName); + log.info("create linux os user: {}", userName); String cmd = String.format("sudo useradd -g %s %s", userGroup, userName); - logger.info("execute cmd: {}", cmd); + log.info("execute cmd: {}", cmd); exeCmd(cmd); } @@ -352,14 +350,14 @@ public class OSUtils { * @throws IOException in case of an I/O error */ private static void createMacUser(String userName, String userGroup) throws IOException { - logger.info("create mac os user: {}", userName); + log.info("create mac os user: {}", userName); String createUserCmd = String.format("sudo sysadminctl -addUser %s -password %s", userName, userName); - logger.info("create user command: {}", createUserCmd); + log.info("create user command: {}", createUserCmd); exeCmd(createUserCmd); String appendGroupCmd = String.format("sudo dseditgroup -o edit -a %s -t user %s", userName, userGroup); - logger.info("append user to group: {}", appendGroupCmd); + log.info("append user to group: {}", appendGroupCmd); exeCmd(appendGroupCmd); } @@ -371,14 +369,14 @@ public class OSUtils { * @throws IOException in case of an I/O error */ private static void createWindowsUser(String userName, String userGroup) throws IOException { - logger.info("create windows os user: {}", userName); + log.info("create windows os user: {}", userName); String userCreateCmd = String.format("net user \"%s\" /add", userName); - logger.info("execute create user command: {}", userCreateCmd); + log.info("execute create user command: {}", userCreateCmd); exeCmd(userCreateCmd); String appendGroupCmd = String.format("net localgroup \"%s\" \"%s\" /add", userGroup, userName); - logger.info("execute append user to group: {}", appendGroupCmd); + log.info("execute append user to group: {}", appendGroupCmd); exeCmd(appendGroupCmd); } @@ -478,12 +476,12 @@ public class OSUtils { // system available physical memory double availablePhysicalMemorySize = availablePhysicalMemorySize(); if (loadAverage > maxCpuLoadAvg) { - logger.warn("Current cpu load average {} is too high, max.cpuLoad.avg={}", loadAverage, maxCpuLoadAvg); + log.warn("Current cpu load average {} is too high, max.cpuLoad.avg={}", loadAverage, maxCpuLoadAvg); return true; } if (availablePhysicalMemorySize < reservedMemory) { - logger.warn( + log.warn( "Current available memory {}G is too low, reserved.memory={}G", maxCpuLoadAvg, reservedMemory); return true; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java index 66285365d2..87278cdfae 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java @@ -31,15 +31,13 @@ import java.util.Map; import java.util.Properties; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Strings; +@Slf4j public class PropertyUtils { - private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class); - private static final Properties properties = new Properties(); private PropertyUtils() { @@ -56,11 +54,11 @@ public class PropertyUtils { Properties subProperties = new Properties(); subProperties.load(fis); subProperties.forEach((k, v) -> { - logger.debug("Get property {} -> {}", k, v); + log.debug("Get property {} -> {}", k, v); }); properties.putAll(subProperties); } catch (IOException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); System.exit(1); } } @@ -68,7 +66,7 @@ public class PropertyUtils { // Override from system properties System.getProperties().forEach((k, v) -> { final String key = String.valueOf(k); - logger.info("Overriding property from system property: {}", key); + log.info("Overriding property from system property: {}", key); PropertyUtils.setValue(key, String.valueOf(v)); }); } @@ -140,7 +138,7 @@ public class PropertyUtils { try { return Integer.parseInt(value); } catch (NumberFormatException e) { - logger.info(e.getMessage(), e); + log.info(e.getMessage(), e); } return defaultValue; } @@ -183,7 +181,7 @@ public class PropertyUtils { try { return Long.parseLong(value); } catch (NumberFormatException e) { - logger.info(e.getMessage(), e); + log.info(e.getMessage(), e); } return defaultValue; } @@ -210,7 +208,7 @@ public class PropertyUtils { try { return Double.parseDouble(value); } catch (NumberFormatException e) { - logger.info(e.getMessage(), e); + log.info(e.getMessage(), e); } return defaultValue; } @@ -247,7 +245,7 @@ public class PropertyUtils { try { return Enum.valueOf(type, value); } catch (IllegalArgumentException e) { - logger.info(e.getMessage(), e); + log.info(e.getMessage(), e); } return defaultValue; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java index e3212931f2..6bef7ce894 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java @@ -27,16 +27,14 @@ import java.sql.Statement; import java.util.ArrayList; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * Tool to run database scripts */ +@Slf4j public class ScriptRunner { - public static final Logger logger = LoggerFactory.getLogger(ScriptRunner.class); - private static final String DEFAULT_DELIMITER = ";"; private final Connection connection; @@ -103,7 +101,7 @@ public class ScriptRunner { } String trimmedLine = line.trim(); if (trimmedLine.startsWith("--")) { - logger.info("\n{}", trimmedLine); + log.info("\n{}", trimmedLine); } else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) { // Do nothing } else if (trimmedLine.startsWith("delimiter")) { @@ -113,7 +111,7 @@ public class ScriptRunner { } else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter()) || fullLineDelimiter && trimmedLine.equals(getDelimiter())) { command.add(line.substring(0, line.lastIndexOf(getDelimiter()))); - logger.info("\n{}", String.join("\n", command)); + log.info("\n{}", String.join("\n", command)); try (Statement statement = conn.createStatement()) { statement.execute(String.join(" ", command)); @@ -123,20 +121,20 @@ public class ScriptRunner { int cols = md.getColumnCount(); for (int i = 1; i < cols; i++) { String name = md.getColumnLabel(i); - logger.info("{} \t", name); + log.info("{} \t", name); } - logger.info(""); + log.info(""); while (rs.next()) { for (int i = 1; i < cols; i++) { String value = rs.getString(i); - logger.info("{} \t", value); + log.info("{} \t", value); } - logger.info(""); + log.info(""); } } } } catch (SQLException e) { - logger.error("SQLException", e); + log.error("SQLException", e); throw e; } @@ -148,11 +146,11 @@ public class ScriptRunner { } } catch (SQLException e) { - logger.error("Error executing: {}", command); + log.error("Error executing: {}", command); throw e; } catch (IOException e) { e.fillInStackTrace(); - logger.error("Error executing: {}", command); + log.error("Error executing: {}", command); throw e; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java index 6d7a26ee1c..c2a25d758e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java @@ -49,8 +49,8 @@ import java.util.List; import java.util.Optional; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @@ -60,13 +60,9 @@ import com.google.common.base.Strings; import com.google.common.collect.Lists; @Component +@Slf4j public class AlertDao { - /** - * logger of AlertDao - */ - private static final Logger logger = LoggerFactory.getLogger(AlertDao.class); - private static final int QUERY_ALERT_THRESHOLD = 100; @Value("${alert.alarm-suppression.crash:60}") @@ -92,14 +88,14 @@ public class AlertDao { */ public int addAlert(Alert alert) { if (null == alert.getAlertGroupId() || NumberUtils.INTEGER_ZERO.equals(alert.getAlertGroupId())) { - logger.warn("the value of alertGroupId is null or 0 "); + log.warn("the value of alertGroupId is null or 0 "); return 0; } String sign = generateSign(alert); alert.setSign(sign); int count = alertMapper.insert(alert); - logger.info("add alert to db , alert: {}", alert); + log.info("add alert to db , alert: {}", alert); return count; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java index dcdb1f8015..54b9809eea 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java @@ -30,16 +30,15 @@ import java.util.List; import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class MonitorDBDao { - private static final Logger logger = LoggerFactory.getLogger(MonitorDBDao.class); - public static final String VARIABLE_NAME = "variable_name"; @Autowired @@ -56,7 +55,7 @@ public class MonitorDBDao { return new H2Performance().getMonitorRecord(conn); } } catch (Exception e) { - logger.error("SQLException: {}", e.getMessage(), e); + log.error("SQLException: {}", e.getMessage(), e); } return null; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java index 06b960eb2f..cb0eeb4f24 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java @@ -34,8 +34,8 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; @@ -45,10 +45,9 @@ import com.google.common.collect.Lists; * Task Definition DAO Implementation */ @Repository +@Slf4j public class TaskDefinitionDaoImpl implements TaskDefinitionDao { - private final Logger logger = LoggerFactory.getLogger(TaskDefinitionDaoImpl.class); - @Autowired private ProcessDefinitionMapper processDefinitionMapper; @@ -65,7 +64,7 @@ public class TaskDefinitionDaoImpl implements TaskDefinitionDao { public List getTaskDefinitionListByDefinition(long processDefinitionCode) { ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode); if (processDefinition == null) { - logger.error("Cannot find process definition, code: {}", processDefinitionCode); + log.error("Cannot find process definition, code: {}", processDefinitionCode); return Lists.newArrayList(); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java index 8daef54308..e1e27f5c7e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java @@ -33,8 +33,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; @@ -46,8 +44,6 @@ import com.google.common.collect.Lists; @Repository public class TaskDefinitionLogDaoImpl implements TaskDefinitionLogDao { - private final Logger logger = LoggerFactory.getLogger(TaskDefinitionLogDaoImpl.class); - @Autowired private TaskDefinitionDao taskDefinitionDao; diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java index d9ce044b88..48ec1cd15e 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java @@ -35,8 +35,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; @@ -44,10 +44,9 @@ import org.springframework.stereotype.Repository; * Task Instance DAO implementation */ @Repository +@Slf4j public class TaskInstanceDaoImpl implements TaskInstanceDao { - private final Logger logger = LoggerFactory.getLogger(TaskInstanceDaoImpl.class); - @Autowired private TaskInstanceMapper taskInstanceMapper; @@ -82,7 +81,7 @@ public class TaskInstanceDaoImpl implements TaskInstanceDao { public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) { WorkflowExecutionStatus processInstanceState = processInstance.getState(); if (processInstanceState.isFinished() || processInstanceState == WorkflowExecutionStatus.READY_STOP) { - logger.warn("processInstance: {} state was: {}, skip submit this task, taskCode: {}", + log.warn("processInstance: {} state was: {}, skip submit this task, taskCode: {}", processInstance.getId(), processInstanceState, taskInstance.getTaskCode()); @@ -180,7 +179,7 @@ public class TaskInstanceDaoImpl implements TaskInstanceDao { taskInstanceMapper.clearCacheByCacheKey(cacheKey); return true; } catch (Exception e) { - logger.error("clear cache by cacheKey failed", e); + log.error("clear cache by cacheKey failed", e); return false; } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java index 25bdb8b3cc..0388e741de 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java @@ -26,13 +26,11 @@ import java.sql.Date; import java.sql.PreparedStatement; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class JsonSplitDao { - public static final Logger logger = LoggerFactory.getLogger(JsonSplitDao.class); - /** * executeJsonSplitProcessDefinition * @@ -89,7 +87,7 @@ public class JsonSplitDao { processUpdate.close(); insertLog.close(); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e); } } @@ -155,7 +153,7 @@ public class JsonSplitDao { insert.close(); insertLog.close(); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e); } } @@ -243,7 +241,7 @@ public class JsonSplitDao { insert.close(); insertLog.close(); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java index 61fbf81900..2c66b67414 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java @@ -31,13 +31,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class ProcessDefinitionDao { - public static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDao.class); - /** * queryAllProcessDefinition * @@ -60,7 +58,7 @@ public class ProcessDefinitionDao { } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } @@ -84,7 +82,7 @@ public class ProcessDefinitionDao { } } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } } @@ -116,7 +114,7 @@ public class ProcessDefinitionDao { processDefinitions.add(processDefinition); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } return processDefinitions; @@ -151,7 +149,7 @@ public class ProcessDefinitionDao { } } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java index e8e3f9ad2c..3b267f8b5f 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java @@ -25,13 +25,11 @@ import java.sql.ResultSet; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class ProjectDao { - public static final Logger logger = LoggerFactory.getLogger(ProjectDao.class); - /** * queryAllProject * @@ -53,7 +51,7 @@ public class ProjectDao { projectMap.put(id, code); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } return projectMap; @@ -76,7 +74,7 @@ public class ProjectDao { } } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java index 3717df0a6b..b79e40b0ce 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java @@ -24,13 +24,11 @@ import java.time.Clock; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class ScheduleDao { - public static final Logger logger = LoggerFactory.getLogger(ScheduleDao.class); - /** * queryAllSchedule * @@ -49,7 +47,7 @@ public class ScheduleDao { scheduleMap.put(id, processDefinitionCode); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } return scheduleMap; @@ -84,7 +82,7 @@ public class ScheduleDao { } } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java index e6a7b00104..25164a6059 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java @@ -28,8 +28,8 @@ import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.core.io.ClassPathResource; import com.google.common.base.Strings; @@ -37,10 +37,9 @@ import com.google.common.base.Strings; /** * Metadata related common classes */ +@Slf4j public class SchemaUtils { - private static final Logger logger = LoggerFactory.getLogger(SchemaUtils.class); - private SchemaUtils() { throw new UnsupportedOperationException("Construct SchemaUtils"); } @@ -67,7 +66,7 @@ public class SchemaUtils { return -1; } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e); } }).collect(Collectors.toList()); @@ -113,7 +112,7 @@ public class SchemaUtils { softVersion = FileUtils.readFile2Str(inputStream); softVersion = Strings.nullToEmpty(softVersion).replaceAll("\\s+|\r|\n", ""); } catch (FileNotFoundException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException( "Failed to get the product version description file. The file could not be found", e); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDao.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDao.java index 797e4b94ea..44b5d85ad2 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDao.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDao.java @@ -23,13 +23,11 @@ import java.sql.ResultSet; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class WorkerGroupDao { - public static final Logger logger = LoggerFactory.getLogger(WorkerGroupDao.class); - /** * query all old worker group * @param conn jdbc connection @@ -50,7 +48,7 @@ public class WorkerGroupDao { } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/H2Performance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/H2Performance.java index f32c9cb357..70d68dea9b 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/H2Performance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/H2Performance.java @@ -27,16 +27,14 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Date; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * H2 MEMORY DB Performance Monitor */ +@Slf4j public class H2Performance extends BaseDBPerformance { - private static final Logger logger = LoggerFactory.getLogger(H2Performance.class); - /** * return the current database performance * @@ -60,7 +58,7 @@ public class H2Performance extends BaseDBPerformance { } } catch (SQLException e) { monitorRecord.setState(Flag.NO); - logger.error("SQLException ", e); + log.error("SQLException ", e); } return monitorRecord; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MySQLPerformance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MySQLPerformance.java index e8591364d0..f09483f5e7 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MySQLPerformance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/MySQLPerformance.java @@ -28,16 +28,14 @@ import java.sql.ResultSet; import java.sql.Statement; import java.util.Date; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * MySQL performance */ +@Slf4j public class MySQLPerformance extends BaseDBPerformance { - private static Logger logger = LoggerFactory.getLogger(MySQLPerformance.class); - /** * get monitor record * @param conn connection @@ -72,7 +70,7 @@ public class MySQLPerformance extends BaseDBPerformance { } } catch (Exception e) { monitorRecord.setState(Flag.NO); - logger.error("SQLException ", e); + log.error("SQLException ", e); } return monitorRecord; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgreSQLPerformance.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgreSQLPerformance.java index 8440636ae8..08454ff5ac 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgreSQLPerformance.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/PostgreSQLPerformance.java @@ -26,13 +26,11 @@ import java.sql.ResultSet; import java.sql.Statement; import java.util.Date; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class PostgreSQLPerformance extends BaseDBPerformance { - private static final Logger logger = LoggerFactory.getLogger(PostgreSQLPerformance.class); - /** * get monitor record * @@ -68,7 +66,7 @@ public class PostgreSQLPerformance extends BaseDBPerformance { } } catch (Exception e) { monitorRecord.setState(Flag.NO); - logger.error("SQLException ", e); + log.error("SQLException ", e); } return monitorRecord; } diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java index 2c6a7d0960..0b35e58d53 100644 --- a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/TaskCacheUtils.java @@ -44,15 +44,13 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.databind.JsonNode; +@Slf4j public class TaskCacheUtils { - protected static final Logger logger = LoggerFactory.getLogger(TaskCacheUtils.class); - private TaskCacheUtils() { throw new IllegalStateException("Utility class"); } @@ -182,7 +180,7 @@ public class TaskCacheUtils { String resourceCRCPath = fileProperty.getValue() + CRC_SUFFIX; String resourceCRCWholePath = storageOperate.getResourceFileName(context.getTenantCode(), resourceCRCPath); String targetPath = String.format("%s/%s", context.getExecutePath(), resourceCRCPath); - logger.info("{} --- Remote:{} to Local:{}", "CRC file", resourceCRCWholePath, targetPath); + log.info("{} --- Remote:{} to Local:{}", "CRC file", resourceCRCWholePath, targetPath); String crcString = ""; try { storageOperate.download(context.getTenantCode(), resourceCRCWholePath, targetPath, false, @@ -190,7 +188,7 @@ public class TaskCacheUtils { crcString = FileUtils.readFile2Str(new FileInputStream(targetPath)); fileProperty.setValue(crcString); } catch (IOException e) { - logger.error("Replace checksum failed for file property {}.", fileProperty.getProp()); + log.error("Replace checksum failed for file property {}.", fileProperty.getProp()); } return crcString; } diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java index 191e557f44..91e8716975 100644 --- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java @@ -26,8 +26,7 @@ import org.apache.dolphinscheduler.data.quality.context.DataQualityContext; import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; import org.apache.dolphinscheduler.data.quality.utils.JsonUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Strings; @@ -37,14 +36,13 @@ import com.google.common.base.Strings; * These three components realize the functions of connecting data, executing intermediate SQL * and writing execution results and error data to the specified storage engine */ +@Slf4j public class DataQualityApplication { - private static final Logger logger = LoggerFactory.getLogger(DataQualityApplication.class); - public static void main(String[] args) throws Exception { if (args.length < 1) { - logger.error("Can not find DataQualityConfiguration"); + log.error("Can not find DataQualityConfiguration"); System.exit(-1); } @@ -53,7 +51,7 @@ public class DataQualityApplication { DataQualityConfiguration dataQualityConfiguration = JsonUtils.fromJson(dataQualityParameter, DataQualityConfiguration.class); if (dataQualityConfiguration == null) { - logger.info("DataQualityConfiguration is null"); + log.info("DataQualityConfiguration is null"); System.exit(-1); } else { dataQualityConfiguration.validate(); diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java index f94d103508..f20cc85687 100644 --- a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java @@ -29,8 +29,7 @@ import org.apache.dolphinscheduler.data.quality.Constants; import java.text.SimpleDateFormat; import java.util.TimeZone; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; @@ -38,10 +37,9 @@ import com.google.common.base.Strings; /** * JsonUtil */ +@Slf4j public class JsonUtils { - private static final Logger logger = LoggerFactory.getLogger(JsonUtils.class); - /** * can use static singleton, inject: just make sure to reuse! */ @@ -67,7 +65,7 @@ public class JsonUtils { try { return MAPPER.readValue(json, clazz); } catch (Exception e) { - logger.error("parse object exception!", e); + log.error("parse object exception!", e); } return null; diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java index cf09b4b05a..c87b3453a1 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/client/CommonDataSourceClient.java @@ -28,17 +28,16 @@ import java.sql.Connection; import java.sql.SQLException; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.jdbc.core.JdbcTemplate; import com.google.common.base.Stopwatch; import com.zaxxer.hikari.HikariDataSource; +@Slf4j public class CommonDataSourceClient implements DataSourceClient { - private static final Logger logger = LoggerFactory.getLogger(CommonDataSourceClient.class); - public static final String COMMON_USER = "root"; public static final String COMMON_VALIDATION_QUERY = "select 1"; @@ -55,7 +54,7 @@ public class CommonDataSourceClient implements DataSourceClient { } protected void preInit() { - logger.info("preInit in CommonDataSourceClient"); + log.info("preInit in CommonDataSourceClient"); } protected void checkEnv(BaseConnectionParam baseConnectionParam) { @@ -97,7 +96,7 @@ public class CommonDataSourceClient implements DataSourceClient { } catch (Exception e) { throw new RuntimeException("JDBC connect failed", e); } finally { - logger.info("Time to execute check jdbc client with sql {} for {} ms ", + log.info("Time to execute check jdbc client with sql {} for {} ms ", this.baseConnectionParam.getValidationQuery(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); } } @@ -107,14 +106,14 @@ public class CommonDataSourceClient implements DataSourceClient { try { return this.dataSource.getConnection(); } catch (SQLException e) { - logger.error("get druidDataSource Connection fail SQLException: {}", e.getMessage(), e); + log.error("get druidDataSource Connection fail SQLException: {}", e.getMessage(), e); return null; } } @Override public void close() { - logger.info("do close dataSource {}.", baseConnectionParam.getDatabase()); + log.info("do close dataSource {}.", baseConnectionParam.getDatabase()); try (HikariDataSource closedDatasource = dataSource) { // only close the resource } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java index 2d8cbd3d34..29e1549385 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceClientProvider.java @@ -31,23 +31,21 @@ import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; +@Slf4j public class DataSourceClientProvider { - private static final Logger logger = LoggerFactory.getLogger(DataSourceClientProvider.class); - private static final long duration = PropertyUtils.getLong(TaskConstants.KERBEROS_EXPIRE_TIME, 24); private static final Cache uniqueId2dataSourceClientCache = CacheBuilder.newBuilder() .expireAfterWrite(duration, TimeUnit.HOURS) .removalListener((RemovalListener) notification -> { try (DataSourceClient closedClient = notification.getValue()) { - logger.info("Datasource: {} is removed from cache due to expire", notification.getKey()); + log.info("Datasource: {} is removed from cache due to expire", notification.getKey()); } }) .maximumSize(100) @@ -70,7 +68,7 @@ public class DataSourceClientProvider { public Connection getConnection(DbType dbType, ConnectionParam connectionParam) throws ExecutionException { BaseConnectionParam baseConnectionParam = (BaseConnectionParam) connectionParam; String datasourceUniqueId = DataSourceUtils.getDatasourceUniqueId(baseConnectionParam, dbType); - logger.info("Get connection from datasource {}", datasourceUniqueId); + log.info("Get connection from datasource {}", datasourceUniqueId); DataSourceClient dataSourceClient = uniqueId2dataSourceClientCache.get(datasourceUniqueId, () -> { Map dataSourceChannelMap = dataSourcePluginManager.getDataSourceChannelMap(); diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java index 6425064b3d..f1d4391221 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourcePluginManager.java @@ -27,13 +27,11 @@ import java.util.Collections; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class DataSourcePluginManager { - private static final Logger logger = LoggerFactory.getLogger(DataSourcePluginManager.class); - private final Map datasourceClientMap = new ConcurrentHashMap<>(); public Map getDataSourceChannelMap() { @@ -48,7 +46,7 @@ public class DataSourcePluginManager { final DataSourceChannelFactory factory = entry.getValue(); final String name = entry.getKey(); - logger.info("Registering datasource plugin: {}", name); + log.info("Registering datasource plugin: {}", name); if (datasourceClientMap.containsKey(name)) { throw new IllegalStateException(format("Duplicate datasource plugins named '%s'", name)); @@ -56,7 +54,7 @@ public class DataSourcePluginManager { loadDatasourceClient(factory); - logger.info("Registered datasource plugin: {}", name); + log.info("Registered datasource plugin: {}", name); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorManager.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorManager.java index dbdbe3bf68..421d42dcf0 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorManager.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorManager.java @@ -26,13 +26,11 @@ import java.util.Map; import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class DataSourceProcessorManager { - private static final Logger logger = LoggerFactory.getLogger(DataSourceProcessorManager.class); - private static final Map dataSourceProcessorMap = new ConcurrentHashMap<>(); public Map getDataSourceProcessorMap() { @@ -44,13 +42,13 @@ public class DataSourceProcessorManager { ServiceLoader.load(DataSourceProcessor.class).forEach(factory -> { final String name = factory.getDbType().name(); - logger.info("start register processor: {}", name); + log.info("start register processor: {}", name); if (dataSourceProcessorMap.containsKey(name)) { throw new IllegalStateException(format("Duplicate datasource plugins named '%s'", name)); } loadDatasourceClient(factory); - logger.info("done register processor: {}", name); + log.info("done register processor: {}", name); }); } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java index 6e21117c9f..4ec1124325 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/plugin/DataSourceProcessorProvider.java @@ -23,14 +23,11 @@ import org.apache.dolphinscheduler.spi.enums.DbType; import java.util.Map; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +@Slf4j public class DataSourceProcessorProvider { - private static final Logger logger = LoggerFactory.getLogger(DataSourceProcessorProvider.class); - private DataSourceProcessorManager dataSourcePluginManager; private DataSourceProcessorProvider() { diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JDBCDataSourceProvider.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JDBCDataSourceProvider.java index 1a62eb3a66..386b4ec822 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JDBCDataSourceProvider.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/provider/JDBCDataSourceProvider.java @@ -30,20 +30,18 @@ import org.apache.commons.lang3.StringUtils; import java.sql.Driver; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.zaxxer.hikari.HikariDataSource; /** * Jdbc Data Source Provider */ +@Slf4j public class JDBCDataSourceProvider { - private static final Logger logger = LoggerFactory.getLogger(JDBCDataSourceProvider.class); - public static HikariDataSource createJdbcDataSource(BaseConnectionParam properties, DbType dbType) { - logger.info("Creating HikariDataSource pool for maxActive:{}", + log.info("Creating HikariDataSource pool for maxActive:{}", PropertyUtils.getInt(DataSourceConstants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); HikariDataSource dataSource = new HikariDataSource(); @@ -64,7 +62,7 @@ public class JDBCDataSourceProvider { properties.getOther().forEach(dataSource::addDataSourceProperty); } - logger.info("Creating HikariDataSource pool success."); + log.info("Creating HikariDataSource pool success."); return dataSource; } @@ -72,7 +70,7 @@ public class JDBCDataSourceProvider { * @return One Session Jdbc DataSource */ public static HikariDataSource createOneSessionJdbcDataSource(BaseConnectionParam properties, DbType dbType) { - logger.info("Creating OneSession HikariDataSource pool for maxActive:{}", + log.info("Creating OneSession HikariDataSource pool for maxActive:{}", PropertyUtils.getInt(DataSourceConstants.SPRING_DATASOURCE_MAX_ACTIVE, 50)); HikariDataSource dataSource = new HikariDataSource(); @@ -93,7 +91,7 @@ public class JDBCDataSourceProvider { properties.getOther().forEach(dataSource::addDataSourceProperty); } - logger.info("Creating OneSession HikariDataSource pool success."); + log.info("Creating OneSession HikariDataSource pool success."); return dataSource; } @@ -105,7 +103,7 @@ public class JDBCDataSourceProvider { final Class clazz = Class.forName(drv, true, classLoader); final Driver driver = (Driver) clazz.newInstance(); if (!driver.acceptsURL(properties.getJdbcUrl())) { - logger.warn("Jdbc driver loading error. Driver {} cannot accept url.", drv); + log.warn("Jdbc driver loading error. Driver {} cannot accept url.", drv); throw new RuntimeException("Jdbc driver loading error."); } if (dbType.equals(DbType.MYSQL)) { @@ -116,7 +114,7 @@ public class JDBCDataSourceProvider { } } } catch (final Exception e) { - logger.warn("The specified driver not suitable."); + log.warn("The specified driver not suitable."); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java index 0b2c15a4e5..af8a99eb26 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java @@ -27,18 +27,16 @@ import org.apache.dolphinscheduler.spi.enums.DbType; import java.sql.Connection; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.databind.JsonNode; +@Slf4j public class DataSourceUtils { public DataSourceUtils() { } - private static final Logger logger = LoggerFactory.getLogger(DataSourceUtils.class); - /** * check datasource param * @@ -56,7 +54,7 @@ public class DataSourceUtils { public static ConnectionParam buildConnectionParams(BaseDataSourceParamDTO baseDataSourceParamDTO) { ConnectionParam connectionParams = getDatasourceProcessor(baseDataSourceParamDTO.getType()) .createConnectionParams(baseDataSourceParamDTO); - logger.info("Parameters map:{}", connectionParams); + log.info("Parameters map:{}", connectionParams); return connectionParams; } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java index 37f23e8b12..779e617148 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/PasswordUtils.java @@ -28,13 +28,11 @@ import org.apache.commons.lang3.StringUtils; import java.nio.charset.StandardCharsets; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class PasswordUtils { - private static final Logger logger = LoggerFactory.getLogger(PasswordUtils.class); - private static final Base64 BASE64 = new Base64(); private PasswordUtils() { @@ -79,7 +77,7 @@ public class PasswordUtils { String salt = PropertyUtils.getString(DATASOURCE_ENCRYPTION_SALT, DATASOURCE_ENCRYPTION_SALT_DEFAULT); String passwordWithSalt = new String(BASE64.decode(password), StandardCharsets.UTF_8); if (!passwordWithSalt.startsWith(salt)) { - logger.warn("There is a password and salt mismatch: {} ", password); + log.warn("There is a password and salt mismatch: {} ", password); return password; } return new String(BASE64.decode(passwordWithSalt.substring(salt.length())), StandardCharsets.UTF_8); diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceClient.java index 11feaa1c6a..cf7db2e3b2 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceClient.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-azure-sql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/azuresql/AzureSQLDataSourceClient.java @@ -29,15 +29,13 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Stopwatch; +@Slf4j public class AzureSQLDataSourceClient extends CommonDataSourceClient { - private static final Logger logger = LoggerFactory.getLogger(AzureSQLDataSourceClient.class); - public AzureSQLDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { super(baseConnectionParam, dbType); } @@ -64,7 +62,7 @@ public class AzureSQLDataSourceClient extends CommonDataSourceClient { } catch (Exception e) { throw new RuntimeException("JDBC connect failed", e); } finally { - logger.info("Time to execute check jdbc client with sql {} for {} ms ", + log.info("Time to execute check jdbc client with sql {} for {} ms ", this.baseConnectionParam.getValidationQuery(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); } } else { @@ -75,7 +73,7 @@ public class AzureSQLDataSourceClient extends CommonDataSourceClient { } catch (SQLException e) { throw new RuntimeException(e); } finally { - logger.info("Time to execute check azure sql token client with sql {} for {} ms ", + log.info("Time to execute check azure sql token client with sql {} for {} ms ", this.baseConnectionParam.getValidationQuery(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java index b15345864d..cc1733662a 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-dameng/src/main/java/org/apache/dolphinscheduler/plugin/datasource/dameng/param/DamengDataSourceProcessor.java @@ -38,16 +38,11 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.auto.service.AutoService; @AutoService(DataSourceProcessor.class) public class DamengDataSourceProcessor extends AbstractDataSourceProcessor { - private final Logger logger = LoggerFactory.getLogger(DamengDataSourceProcessor.class); - @Override public BaseDataSourceParamDTO castDatasourceParamDTO(String paramJson) { return JSONUtils.parseObject(paramJson, DamengDataSourceParamDTO.class); diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java index c7eef482da..15270f60a3 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-hive/src/main/java/org/apache/dolphinscheduler/plugin/datasource/hive/HiveDataSourceClient.java @@ -36,32 +36,31 @@ import java.lang.reflect.Field; import java.sql.Connection; import java.sql.SQLException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.jdbc.core.JdbcTemplate; +@Slf4j public class HiveDataSourceClient extends CommonDataSourceClient { - private static final Logger logger = LoggerFactory.getLogger(HiveDataSourceClient.class); - public HiveDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { super(baseConnectionParam, dbType); } @Override protected void preInit() { - logger.info("PreInit in {}", getClass().getName()); + log.info("PreInit in {}", getClass().getName()); } @Override protected void initClient(BaseConnectionParam baseConnectionParam, DbType dbType) { - logger.info("Create UserGroupInformation."); + log.info("Create UserGroupInformation."); UserGroupInformationFactory.login(baseConnectionParam.getUser()); - logger.info("Create ugi success."); + log.info("Create ugi success."); this.dataSource = JDBCDataSourceProvider.createOneSessionJdbcDataSource(baseConnectionParam, dbType); this.jdbcTemplate = new JdbcTemplate(dataSource); - logger.info("Init {} success.", getClass().getName()); + log.info("Init {} success.", getClass().getName()); } @Override @@ -108,7 +107,7 @@ public class HiveDataSourceClient extends CommonDataSourceClient { } finally { UserGroupInformationFactory.logout(baseConnectionParam.getUser()); } - logger.info("Closed Hive datasource client."); + log.info("Closed Hive datasource client."); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java index 3bf05b95a6..67dbe5469d 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-mysql/src/main/java/org/apache/dolphinscheduler/plugin/datasource/mysql/param/MySQLDataSourceProcessor.java @@ -38,16 +38,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(DataSourceProcessor.class) +@Slf4j public class MySQLDataSourceProcessor extends AbstractDataSourceProcessor { - private final Logger logger = LoggerFactory.getLogger(MySQLDataSourceProcessor.class); - private static final String ALLOW_LOAD_LOCAL_IN_FILE_NAME = "allowLoadLocalInfile"; private static final String AUTO_DESERIALIZE = "autoDeserialize"; @@ -133,12 +131,12 @@ public class MySQLDataSourceProcessor extends AbstractDataSourceProcessor { Class.forName(getDatasourceDriver()); String user = mysqlConnectionParam.getUser(); if (user.contains(AUTO_DESERIALIZE)) { - logger.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE); + log.warn("sensitive param : {} in username field is filtered", AUTO_DESERIALIZE); user = user.replace(AUTO_DESERIALIZE, ""); } String password = PasswordUtils.decodePassword(mysqlConnectionParam.getPassword()); if (password.contains(AUTO_DESERIALIZE)) { - logger.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE); + log.warn("sensitive param : {} in password field is filtered", AUTO_DESERIALIZE); password = password.replace(AUTO_DESERIALIZE, ""); } return DriverManager.getConnection(getJdbcUrl(connectionParam), user, password); diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceClient.java index 944d507d6d..186e5afd19 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceClient.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-redshift/src/main/java/org/apache/dolphinscheduler/plugin/datasource/redshift/RedshiftDataSourceClient.java @@ -29,15 +29,13 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Stopwatch; +@Slf4j public class RedshiftDataSourceClient extends CommonDataSourceClient { - private static final Logger logger = LoggerFactory.getLogger(RedshiftDataSourceClient.class); - public RedshiftDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { super(baseConnectionParam, dbType); } @@ -63,7 +61,7 @@ public class RedshiftDataSourceClient extends CommonDataSourceClient { } catch (Exception e) { throw new RuntimeException("JDBC connect failed", e); } finally { - logger.info("Time to execute check jdbc client with sql {} for {} ms ", + log.info("Time to execute check jdbc client with sql {} for {} ms ", this.baseConnectionParam.getValidationQuery(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); } } else { @@ -74,7 +72,7 @@ public class RedshiftDataSourceClient extends CommonDataSourceClient { } catch (SQLException e) { throw new RuntimeException(e); } finally { - logger.info("Time to execute check redshift access key with sql {} for {} ms ", + log.info("Time to execute check redshift access key with sql {} for {} ms ", this.baseConnectionParam.getValidationQuery(), stopwatch.elapsed(TimeUnit.MILLISECONDS)); } } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceClient.java index 26ac9b3ce3..8354dfa392 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceClient.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-spark/src/main/java/org/apache/dolphinscheduler/plugin/datasource/spark/SparkDataSourceClient.java @@ -21,13 +21,8 @@ import org.apache.dolphinscheduler.plugin.datasource.hive.HiveDataSourceClient; import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class SparkDataSourceClient extends HiveDataSourceClient { - private static final Logger logger = LoggerFactory.getLogger(SparkDataSourceClient.class); - public SparkDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { super(baseConnectionParam, dbType); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java index 353f2ae129..c8c8f887fa 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/MasterServer.java @@ -32,9 +32,9 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import javax.annotation.PostConstruct; +import lombok.extern.slf4j.Slf4j; + import org.quartz.SchedulerException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -46,10 +46,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; @ComponentScan("org.apache.dolphinscheduler") @EnableTransactionManagement @EnableCaching +@Slf4j public class MasterServer implements IStoppable { - private static final Logger logger = LoggerFactory.getLogger(MasterServer.class); - @Autowired private SpringApplicationContext springApplicationContext; @@ -118,7 +117,7 @@ public class MasterServer implements IStoppable { // set stop signal is true // execute only once if (!ServerLifeCycleManager.toStopped()) { - logger.warn("MasterServer is already stopped, current cause: {}", cause); + log.warn("MasterServer is already stopped, current cause: {}", cause); return; } // thread sleep 3 seconds for thread quietly stop @@ -132,12 +131,12 @@ public class MasterServer implements IStoppable { // like ServerNodeManager,HostManager,TaskResponseService,CuratorZookeeperClient,etc SpringApplicationContext closedSpringContext = springApplicationContext) { - logger.info("Master server is stopping, current cause : {}", cause); + log.info("Master server is stopping, current cause : {}", cause); } catch (Exception e) { - logger.error("MasterServer stop failed, current cause: {}", cause, e); + log.error("MasterServer stop failed, current cause: {}", cause, e); return; } - logger.info("MasterServer stopped, current cause: {}", cause); + log.info("MasterServer stopped, current cause: {}", cause); } @Override diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java index 684cb10f91..ca89ca2337 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/builder/TaskExecutionContextBuilder.java @@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory; public class TaskExecutionContextBuilder { - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); public static TaskExecutionContextBuilder get() { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java index 0167c284f6..3971d1e290 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/config/MasterConfig.java @@ -28,9 +28,8 @@ import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import java.time.Duration; import lombok.Data; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.validation.Errors; @@ -41,10 +40,9 @@ import org.springframework.validation.annotation.Validated; @Validated @Configuration @ConfigurationProperties(prefix = "master") +@Slf4j public class MasterConfig implements Validator { - private Logger logger = LoggerFactory.getLogger(MasterConfig.class); - /** * The master RPC server listen port. */ @@ -152,23 +150,23 @@ public class MasterConfig implements Validator { } private void printConfig() { - logger.info("Master config: listenPort -> {} ", listenPort); - logger.info("Master config: fetchCommandNum -> {} ", fetchCommandNum); - logger.info("Master config: preExecThreads -> {} ", preExecThreads); - logger.info("Master config: execThreads -> {} ", execThreads); - logger.info("Master config: dispatchTaskNumber -> {} ", dispatchTaskNumber); - logger.info("Master config: hostSelector -> {} ", hostSelector); - logger.info("Master config: heartbeatInterval -> {} ", heartbeatInterval); - logger.info("Master config: taskCommitRetryTimes -> {} ", taskCommitRetryTimes); - logger.info("Master config: taskCommitInterval -> {} ", taskCommitInterval); - logger.info("Master config: stateWheelInterval -> {} ", stateWheelInterval); - logger.info("Master config: maxCpuLoadAvg -> {} ", maxCpuLoadAvg); - logger.info("Master config: reservedMemory -> {} ", reservedMemory); - logger.info("Master config: failoverInterval -> {} ", failoverInterval); - logger.info("Master config: killYarnJobWhenTaskFailover -> {} ", killYarnJobWhenTaskFailover); - logger.info("Master config: registryDisconnectStrategy -> {} ", registryDisconnectStrategy); - logger.info("Master config: masterAddress -> {} ", masterAddress); - logger.info("Master config: masterRegistryPath -> {} ", masterRegistryPath); - logger.info("Master config: workerGroupRefreshInterval -> {} ", workerGroupRefreshInterval); + log.info("Master config: listenPort -> {} ", listenPort); + log.info("Master config: fetchCommandNum -> {} ", fetchCommandNum); + log.info("Master config: preExecThreads -> {} ", preExecThreads); + log.info("Master config: execThreads -> {} ", execThreads); + log.info("Master config: dispatchTaskNumber -> {} ", dispatchTaskNumber); + log.info("Master config: hostSelector -> {} ", hostSelector); + log.info("Master config: heartbeatInterval -> {} ", heartbeatInterval); + log.info("Master config: taskCommitRetryTimes -> {} ", taskCommitRetryTimes); + log.info("Master config: taskCommitInterval -> {} ", taskCommitInterval); + log.info("Master config: stateWheelInterval -> {} ", stateWheelInterval); + log.info("Master config: maxCpuLoadAvg -> {} ", maxCpuLoadAvg); + log.info("Master config: reservedMemory -> {} ", reservedMemory); + log.info("Master config: failoverInterval -> {} ", failoverInterval); + log.info("Master config: killYarnJobWhenTaskFailover -> {} ", killYarnJobWhenTaskFailover); + log.info("Master config: registryDisconnectStrategy -> {} ", registryDisconnectStrategy); + log.info("Master config: masterAddress -> {} ", masterAddress); + log.info("Master config: masterRegistryPath -> {} ", masterRegistryPath); + log.info("Master config: workerGroupRefreshInterval -> {} ", workerGroupRefreshInterval); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java index 8af2f1e30a..c28c01ee58 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java @@ -61,8 +61,8 @@ import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -70,13 +70,9 @@ import org.springframework.stereotype.Component; * TaskUpdateQueue consumer */ @Component +@Slf4j public class TaskPriorityQueueConsumer extends BaseDaemonThread { - /** - * logger of TaskUpdateQueueConsumer - */ - private static final Logger logger = LoggerFactory.getLogger(TaskPriorityQueueConsumer.class); - /** * taskUpdateQueue */ @@ -129,9 +125,9 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { public void init() { this.consumerThreadPoolExecutor = (ThreadPoolExecutor) ThreadUtils .newDaemonFixedThreadExecutor("TaskUpdateQueueConsumerThread", masterConfig.getDispatchTaskNumber()); - logger.info("Task priority queue consume thread staring"); + log.info("Task priority queue consume thread staring"); super.start(); - logger.info("Task priority queue consume thread started"); + log.info("Task priority queue consume thread started"); } @Override @@ -142,20 +138,20 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { List failedDispatchTasks = this.batchDispatch(fetchTaskNum); if (CollectionUtils.isNotEmpty(failedDispatchTasks)) { - logger.info("{} tasks dispatch failed, will retry to dispatch", failedDispatchTasks.size()); + log.info("{} tasks dispatch failed, will retry to dispatch", failedDispatchTasks.size()); TaskMetrics.incTaskDispatchFailed(failedDispatchTasks.size()); for (TaskPriority dispatchFailedTask : failedDispatchTasks) { taskPriorityQueue.put(dispatchFailedTask); } // If the all task dispatch failed, will sleep for 1s to avoid the master cpu higher. if (fetchTaskNum == failedDispatchTasks.size()) { - logger.info("All tasks dispatch failed, will sleep a while to avoid the master cpu higher"); + log.info("All tasks dispatch failed, will sleep a while to avoid the master cpu higher"); TimeUnit.MILLISECONDS.sleep(Constants.SLEEP_TIME_MILLIS); } } } catch (Exception e) { TaskMetrics.incTaskDispatchError(); - logger.error("dispatcher task error", e); + log.error("dispatcher task error", e); } } } @@ -186,7 +182,7 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { } catch (ExecuteException e) { failedDispatchTasks.add(taskPriority); } catch (Exception e) { - logger.error("Dispatch task error, meet an unknown exception", e); + log.error("Dispatch task error, meet an unknown exception", e); failedDispatchTasks.add(taskPriority); } } finally { @@ -212,13 +208,13 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { WorkflowExecuteRunnable workflowExecuteRunnable = processInstanceExecCacheManager.getByProcessInstanceId(taskPriority.getProcessInstanceId()); if (workflowExecuteRunnable == null) { - logger.error("Cannot find the related processInstance of the task, taskPriority: {}", taskPriority); + log.error("Cannot find the related processInstance of the task, taskPriority: {}", taskPriority); return; } Optional taskInstanceOptional = workflowExecuteRunnable.getTaskInstance(taskPriority.getTaskId()); if (!taskInstanceOptional.isPresent()) { - logger.error("Cannot find the task instance from related processInstance, taskPriority: {}", + log.error("Cannot find the task instance from related processInstance, taskPriority: {}", taskPriority); // we return true, so that we will drop this task. return; @@ -235,7 +231,7 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { if (isTaskNeedToCheck(taskPriority)) { if (taskInstanceIsFinalState(taskPriority.getTaskId())) { // when task finish, ignore this task, there is no need to dispatch anymore - logger.info("Task {} is already finished, no need to dispatch, task instance id: {}", + log.info("Task {} is already finished, no need to dispatch, task instance id: {}", taskInstance.getName(), taskInstance.getId()); return; } @@ -247,7 +243,7 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { } dispatcher.dispatch(executionContext); - logger.info("Master success dispatch task to worker, taskInstanceId: {}, worker: {}", + log.info("Master success dispatch task to worker, taskInstanceId: {}, worker: {}", taskPriority.getTaskId(), executionContext.getHost()); addDispatchEvent(context, executionContext); @@ -331,7 +327,7 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { TaskInstance cacheTaskInstance = taskInstanceDao.findTaskInstanceByCacheKey(cacheKey); // if we can find the cache task instance, we will add cache event, and return true. if (cacheTaskInstance != null) { - logger.info("Task {} is cache, no need to dispatch, task instance id: {}", + log.info("Task {} is cache, no need to dispatch, task instance id: {}", taskInstance.getName(), taskInstance.getId()); addCacheEvent(taskInstance, cacheTaskInstance); taskInstance.setCacheKey(TaskCacheUtils.generateTagCacheKey(cacheTaskInstance.getId(), cacheKey)); @@ -341,7 +337,7 @@ public class TaskPriorityQueueConsumer extends BaseDaemonThread { taskInstance.setCacheKey(TaskCacheUtils.generateTagCacheKey(taskInstance.getId(), cacheKey)); } } catch (Exception e) { - logger.error("checkIsCacheExecution error", e); + log.error("checkIsCacheExecution error", e); } return false; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java index 90cb07fbb2..1880f9295f 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcher.java @@ -29,8 +29,8 @@ import org.apache.commons.lang3.StringUtils; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -39,10 +39,9 @@ import org.springframework.stereotype.Service; * executor dispatcher */ @Service +@Slf4j public class ExecutorDispatcher implements InitializingBean { - private static final Logger logger = LoggerFactory.getLogger(ExecutorDispatcher.class); - /** * netty executor manager */ @@ -84,7 +83,7 @@ public class ExecutorDispatcher implements InitializingBean { // host select Host host = hostManager.select(context); if (StringUtils.isEmpty(host.getAddress())) { - logger.warn("fail to execute : {} due to no suitable worker, current task needs worker group {} to execute", + log.warn("fail to execute : {} due to no suitable worker, current task needs worker group {} to execute", context.getCommand(), context.getWorkerGroup()); throw new ExecuteException("no suitable worker"); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java index cccf16ef72..3df401f8fd 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManager.java @@ -41,8 +41,8 @@ import java.util.Set; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -50,10 +50,9 @@ import org.springframework.stereotype.Service; * netty executor manager */ @Service +@Slf4j public class NettyExecutorManager extends AbstractExecutorManager { - private final Logger logger = LoggerFactory.getLogger(NettyExecutorManager.class); - /** * server node manager */ @@ -112,14 +111,14 @@ public class NettyExecutorManager extends AbstractExecutorManager { context.getTaskInstance().setHost(host.getAddress()); return; } catch (ExecuteException ex) { - logger.error("Execute command {} error", command, ex); + log.error("Execute command {} error", command, ex); try { failNodeSet.add(host.getAddress()); Set tmpAllIps = new HashSet<>(allNodes); Collection remained = CollectionUtils.subtract(tmpAllIps, failNodeSet); if (CollectionUtils.isNotEmpty(remained)) { host = Host.of(remained.iterator().next()); - logger.error("retry execute command : {} host : {}", command, host); + log.error("retry execute command : {} host : {}", command, host); } else { throw new ExecuteException("fail after try all nodes"); } @@ -152,7 +151,7 @@ public class NettyExecutorManager extends AbstractExecutorManager { nettyRemotingClient.send(host, command); success = true; } catch (Exception ex) { - logger.error("Send command to {} error, command: {}", host, command, ex); + log.error("Send command to {} error, command: {}", host, command, ex); retryCount--; ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java index a6d920a61c..5c615563ab 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java @@ -40,16 +40,14 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * lower weight host manager */ +@Slf4j public class LowerWeightHostManager extends CommonHostManager { - private final Logger logger = LoggerFactory.getLogger(LowerWeightHostManager.class); - /** * selector */ @@ -125,7 +123,7 @@ public class LowerWeightHostManager extends CommonHostManager { } syncWorkerHostWeight(workerHostWeights); } catch (Throwable ex) { - logger.error("Sync worker resource error", ex); + log.error("Sync worker resource error", ex); } } @@ -142,16 +140,16 @@ public class LowerWeightHostManager extends CommonHostManager { public Optional getHostWeight(String addr, String workerGroup, WorkerHeartBeat heartBeat) { if (heartBeat == null) { - logger.warn("worker {} in work group {} have not received the heartbeat", addr, workerGroup); + log.warn("worker {} in work group {} have not received the heartbeat", addr, workerGroup); return Optional.empty(); } if (Constants.ABNORMAL_NODE_STATUS == heartBeat.getServerStatus()) { - logger.warn("worker {} current cpu load average {} is too high or available memory {}G is too low", + log.warn("worker {} current cpu load average {} is too high or available memory {}G is too low", addr, heartBeat.getLoadAverage(), heartBeat.getAvailablePhysicalMemorySize()); return Optional.empty(); } if (Constants.BUSY_NODE_STATUE == heartBeat.getServerStatus()) { - logger.warn("worker {} is busy, current waiting task count {} is large than worker thread count {}", + log.warn("worker {} is busy, current waiting task count {} is large than worker thread count {}", addr, heartBeat.getWorkerWaitingTaskCount(), heartBeat.getWorkerExecThreadCount()); return Optional.empty(); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDelayEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDelayEventHandler.java index 18168056f0..6355e8278d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDelayEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDelayEventHandler.java @@ -31,16 +31,15 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class TaskDelayEventHandler implements TaskEventHandler { - private final Logger logger = LoggerFactory.getLogger(TaskDelayEventHandler.class); - @Autowired private ProcessInstanceExecCacheManager processInstanceExecCacheManager; @@ -71,7 +70,7 @@ public class TaskDelayEventHandler implements TaskEventHandler { } TaskInstance taskInstance = taskInstanceOptional.get(); if (taskInstance.getState().isFinished()) { - logger.warn( + log.warn( "The current task status is: {}, will not handle the running event, this event is delay, will discard this event: {}", taskInstance.getState(), taskEvent); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDispatchEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDispatchEventHandler.java index 08c13f2de1..efc6340703 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDispatchEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskDispatchEventHandler.java @@ -26,16 +26,15 @@ import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheM import org.apache.dolphinscheduler.server.master.processor.queue.TaskEvent; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class TaskDispatchEventHandler implements TaskEventHandler { - private final Logger logger = LoggerFactory.getLogger(TaskDispatchEventHandler.class); - @Autowired private ProcessInstanceExecCacheManager processInstanceExecCacheManager; @@ -55,7 +54,7 @@ public class TaskDispatchEventHandler implements TaskEventHandler { TaskInstance taskInstance = workflowExecuteRunnable.getTaskInstance(taskInstanceId) .orElseThrow(() -> new TaskEventHandleError("Cannot find related taskInstance from cache")); if (taskInstance.getState() != TaskExecutionStatus.SUBMITTED_SUCCESS) { - logger.warn( + log.warn( "The current taskInstance status is not SUBMITTED_SUCCESS, so the dispatch event will be discarded, the current is a delay event, event: {}", taskEvent); return; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRetryStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRetryStateEventHandler.java index d4ab7fc948..074be18102 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRetryStateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRetryStateEventHandler.java @@ -24,22 +24,20 @@ import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class TaskRetryStateEventHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(TaskRetryStateEventHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) throws StateEventHandleException { TaskStateEvent taskStateEvent = (TaskStateEvent) stateEvent; - logger.info("Handle task instance retry event, taskCode: {}", taskStateEvent.getTaskCode()); + log.info("Handle task instance retry event, taskCode: {}", taskStateEvent.getTaskCode()); TaskMetrics.incTaskInstanceByState("retry"); Map waitToRetryTaskInstanceMap = workflowExecuteRunnable.getWaitToRetryTaskInstanceMap(); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRunningEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRunningEventHandler.java index 85a10da40d..5bfa75733d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRunningEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskRunningEventHandler.java @@ -30,16 +30,12 @@ import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThreadPoo import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class TaskRunningEventHandler implements TaskEventHandler { - private final Logger logger = LoggerFactory.getLogger(TaskRunningEventHandler.class); - @Autowired private ProcessInstanceExecCacheManager processInstanceExecCacheManager; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskStateEventHandler.java index 935fa9db37..676e7c37a8 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskStateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskStateEventHandler.java @@ -27,16 +27,14 @@ import org.apache.dolphinscheduler.server.master.runner.task.TaskAction; import java.util.Map; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class TaskStateEventHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(TaskStateEventHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) throws StateEventHandleException, StateEventHandleError { @@ -54,7 +52,7 @@ public class TaskStateEventHandler implements StateEventHandler { throw new StateEventHandleError("Task state event handle error due to task state is null"); } - logger.info( + log.info( "Handle task instance state event, the current task instance state {} will be changed to {}", task.getState(), taskStateEvent.getStatus()); @@ -63,12 +61,12 @@ public class TaskStateEventHandler implements StateEventHandler { if (task.getState().isFinished()) { if (completeTaskMap.containsKey(task.getTaskCode()) && completeTaskMap.get(task.getTaskCode()) == task.getId()) { - logger.warn("The task instance is already complete, stateEvent: {}", stateEvent); + log.warn("The task instance is already complete, stateEvent: {}", stateEvent); return true; } workflowExecuteRunnable.taskFinished(task); if (task.getTaskGroupId() > 0) { - logger.info("The task instance need to release task Group: {}", task.getTaskGroupId()); + log.info("The task instance need to release task Group: {}", task.getTaskGroupId()); workflowExecuteRunnable.releaseTaskGroup(task); } return true; @@ -98,7 +96,7 @@ public class TaskStateEventHandler implements StateEventHandler { private void measureTaskState(TaskStateEvent taskStateEvent) { if (taskStateEvent == null || taskStateEvent.getStatus() == null) { // the event is broken - logger.warn("The task event is broken..., taskEvent: {}", taskStateEvent); + log.warn("The task event is broken..., taskEvent: {}", taskStateEvent); return; } if (taskStateEvent.getStatus().isFinished()) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskTimeoutStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskTimeoutStateEventHandler.java index 8a2854e6af..dfa60f87ad 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskTimeoutStateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskTimeoutStateEventHandler.java @@ -28,16 +28,14 @@ import org.apache.dolphinscheduler.server.master.runner.task.TaskAction; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class TaskTimeoutStateEventHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(TaskTimeoutStateEventHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) throws StateEventHandleError { @@ -52,7 +50,7 @@ public class TaskTimeoutStateEventHandler implements StateEventHandler { "Cannot find the task instance from workflow execute runnable, taskInstanceId: %s", taskStateEvent.getTaskInstanceId()))); - logger.info("Handle task instance state timeout event, taskInstanceId: {}", taskStateEvent.getTaskInstanceId()); + log.info("Handle task instance state timeout event, taskInstanceId: {}", taskStateEvent.getTaskInstanceId()); if (TimeoutFlag.CLOSE == taskInstance.getTaskDefine().getTimeoutFlag()) { return true; @@ -67,7 +65,7 @@ public class TaskTimeoutStateEventHandler implements StateEventHandler { ITaskProcessor taskProcessor = activeTaskProcessMap.get(taskInstance.getTaskCode()); taskProcessor.action(TaskAction.TIMEOUT); } else { - logger.warn( + log.warn( "cannot find the task processor for task {}, so skip task processor action.", taskInstance.getTaskCode()); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskWaitTaskGroupStateHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskWaitTaskGroupStateHandler.java index b5fd02258e..4859e896b9 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskWaitTaskGroupStateHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/TaskWaitTaskGroupStateHandler.java @@ -20,24 +20,22 @@ package org.apache.dolphinscheduler.server.master.event; import org.apache.dolphinscheduler.common.enums.StateEventType; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class TaskWaitTaskGroupStateHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(TaskWaitTaskGroupStateHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) { - logger.info("Handle task instance wait task group event, taskInstanceId: {}", stateEvent.getTaskInstanceId()); + log.info("Handle task instance wait task group event, taskInstanceId: {}", stateEvent.getTaskInstanceId()); if (workflowExecuteRunnable.checkForceStartAndWakeUp(stateEvent)) { - logger.info("Success wake up task instance, taskInstanceId: {}", stateEvent.getTaskInstanceId()); + log.info("Success wake up task instance, taskInstanceId: {}", stateEvent.getTaskInstanceId()); } else { - logger.info("Failed to wake up task instance, taskInstanceId: {}", stateEvent.getTaskInstanceId()); + log.info("Failed to wake up task instance, taskInstanceId: {}", stateEvent.getTaskInstanceId()); } return true; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java index 5c9f901e68..07f89fc544 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowBlockStateEventHandler.java @@ -25,20 +25,18 @@ import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class WorkflowBlockStateEventHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(WorkflowBlockStateEventHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) throws StateEventHandleError { - logger.info("Handle workflow instance state block event"); + log.info("Handle workflow instance state block event"); Optional taskInstanceOptional = workflowExecuteRunnable.getTaskInstance(stateEvent.getTaskInstanceId()); if (!taskInstanceOptional.isPresent()) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowEventQueue.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowEventQueue.java index 2c6e647d3e..18e47ba86e 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowEventQueue.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowEventQueue.java @@ -19,15 +19,14 @@ package org.apache.dolphinscheduler.server.master.event; import java.util.concurrent.LinkedBlockingQueue; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Component; @Component +@Slf4j public class WorkflowEventQueue { - private final Logger logger = LoggerFactory.getLogger(WorkflowEventQueue.class); - private static final LinkedBlockingQueue workflowEventQueue = new LinkedBlockingQueue<>(); /** @@ -35,7 +34,7 @@ public class WorkflowEventQueue { */ public void addEvent(WorkflowEvent workflowEvent) { workflowEventQueue.add(workflowEvent); - logger.info("Added workflow event to workflowEvent queue, event: {}", workflowEvent); + log.info("Added workflow event to workflowEvent queue, event: {}", workflowEvent); } /** diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStartEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStartEventHandler.java index b66c972ed6..e6e9113d48 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStartEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStartEventHandler.java @@ -27,16 +27,15 @@ import org.apache.dolphinscheduler.server.master.runner.WorkflowSubmitStatue; import java.util.concurrent.CompletableFuture; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class WorkflowStartEventHandler implements WorkflowEventHandler { - private final Logger logger = LoggerFactory.getLogger(WorkflowStartEventHandler.class); - @Autowired private ProcessInstanceExecCacheManager processInstanceExecCacheManager; @@ -51,7 +50,7 @@ public class WorkflowStartEventHandler implements WorkflowEventHandler { @Override public void handleWorkflowEvent(final WorkflowEvent workflowEvent) throws WorkflowEventHandleError { - logger.info("Handle workflow start event, begin to start a workflow, event: {}", workflowEvent); + log.info("Handle workflow start event, begin to start a workflow, event: {}", workflowEvent); WorkflowExecuteRunnable workflowExecuteRunnable = processInstanceExecCacheManager.getByProcessInstanceId( workflowEvent.getWorkflowInstanceId()); if (workflowExecuteRunnable == null) { @@ -63,13 +62,13 @@ public class WorkflowStartEventHandler implements WorkflowEventHandler { CompletableFuture.supplyAsync(workflowExecuteRunnable::call, workflowExecuteThreadPool) .thenAccept(workflowSubmitStatue -> { if (WorkflowSubmitStatue.SUCCESS == workflowSubmitStatue) { - logger.info("Success submit the workflow instance"); + log.info("Success submit the workflow instance"); if (processInstance.getTimeout() > 0) { stateWheelExecuteThread.addProcess4TimeoutCheck(processInstance); } } else { // submit failed will resend the event to workflow event queue - logger.error("Failed to submit the workflow instance, will resend the workflow start event: {}", + log.error("Failed to submit the workflow instance, will resend the workflow start event: {}", workflowEvent); workflowEventQueue.addEvent(workflowEvent); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStateEventHandler.java index 65015f0458..73b444a559 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowStateEventHandler.java @@ -23,16 +23,14 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.server.master.metrics.ProcessInstanceMetrics; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class WorkflowStateEventHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(WorkflowStateEventHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) throws StateEventHandleException { @@ -41,7 +39,7 @@ public class WorkflowStateEventHandler implements StateEventHandler { ProcessInstance processInstance = workflowExecuteRunnable.getProcessInstance(); ProcessDefinition processDefinition = processInstance.getProcessDefinition(); - logger.info( + log.info( "Handle workflow instance state event, the current workflow instance state {} will be changed to {}", processInstance.getState(), workflowStateEvent.getStatus()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowTimeoutStateEventHandler.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowTimeoutStateEventHandler.java index d68e97c5e3..e7bc579ea4 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowTimeoutStateEventHandler.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/event/WorkflowTimeoutStateEventHandler.java @@ -21,19 +21,17 @@ import org.apache.dolphinscheduler.common.enums.StateEventType; import org.apache.dolphinscheduler.server.master.metrics.ProcessInstanceMetrics; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.auto.service.AutoService; @AutoService(StateEventHandler.class) +@Slf4j public class WorkflowTimeoutStateEventHandler implements StateEventHandler { - private static final Logger logger = LoggerFactory.getLogger(WorkflowTimeoutStateEventHandler.class); - @Override public boolean handleStateEvent(WorkflowExecuteRunnable workflowExecuteRunnable, StateEvent stateEvent) { - logger.info("Handle workflow instance timeout event"); + log.info("Handle workflow instance timeout event"); ProcessInstanceMetrics.incProcessInstanceByState("timeout"); workflowExecuteRunnable.processTimeout(); return true; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java index 39c4776ce1..b69937357a 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/CacheProcessor.java @@ -24,8 +24,8 @@ import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cache.Cache; import org.springframework.cache.CacheManager; @@ -38,10 +38,9 @@ import io.netty.channel.Channel; * cache process from master/api */ @Component +@Slf4j public class CacheProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(CacheProcessor.class); - @Autowired private CacheManager cacheManager; @@ -52,7 +51,7 @@ public class CacheProcessor implements NettyRequestProcessor { CacheExpireCommand cacheExpireCommand = JSONUtils.parseObject(command.getBody(), CacheExpireCommand.class); - logger.info("received command : {}", cacheExpireCommand); + log.info("received command : {}", cacheExpireCommand); this.cacheExpire(cacheExpireCommand); } @@ -67,7 +66,7 @@ public class CacheProcessor implements NettyRequestProcessor { Cache cache = cacheManager.getCache(cacheType.getCacheName()); if (cache != null) { cache.evict(cacheExpireCommand.getCacheKey()); - logger.info("cache evict, type:{}, key:{}", cacheType.getCacheName(), cacheExpireCommand.getCacheKey()); + log.info("cache evict, type:{}, key:{}", cacheType.getCacheName(), cacheExpireCommand.getCacheKey()); } } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/HostUpdateResponseProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/HostUpdateResponseProcessor.java index c96d4fec06..c5a2af34da 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/HostUpdateResponseProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/HostUpdateResponseProcessor.java @@ -22,16 +22,14 @@ import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Preconditions; import io.netty.channel.Channel; +@Slf4j public class HostUpdateResponseProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(HostUpdateResponseProcessor.class); - @Override public void process(Channel channel, Command command) { Preconditions.checkArgument(CommandType.PROCESS_HOST_UPDATE_RESPONSE == command.getType(), @@ -39,6 +37,6 @@ public class HostUpdateResponseProcessor implements NettyRequestProcessor { HostUpdateResponseProcessor responseCommand = JSONUtils.parseObject(command.getBody(), HostUpdateResponseProcessor.class); - logger.info("received process host response command : {}", responseCommand); + log.info("received process host response command : {}", responseCommand); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java index c9d9b6fe8e..de856c393a 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/StateEventProcessor.java @@ -30,8 +30,8 @@ import org.apache.dolphinscheduler.server.master.event.WorkflowStateEvent; import org.apache.dolphinscheduler.server.master.processor.queue.StateEventResponseService; import org.apache.dolphinscheduler.service.utils.LoggerUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -42,10 +42,9 @@ import io.netty.channel.Channel; * handle state event received from master/api */ @Component +@Slf4j public class StateEventProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(StateEventProcessor.class); - @Autowired private StateEventResponseService stateEventResponseService; @@ -67,7 +66,7 @@ public class StateEventProcessor implements NettyRequestProcessor { LoggerUtils.setWorkflowAndTaskInstanceIDMDC(stateEvent.getProcessInstanceId(), stateEvent.getTaskInstanceId()); - logger.info("Received state change command, event: {}", stateEvent); + log.info("Received state change command, event: {}", stateEvent); stateEventResponseService.addStateChangeEvent(stateEvent); } finally { LoggerUtils.removeWorkflowAndTaskInstanceIdMDC(); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskEventProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskEventProcessor.java index e5f0e0f099..a1ea9822d2 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskEventProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskEventProcessor.java @@ -27,8 +27,8 @@ import org.apache.dolphinscheduler.server.master.event.TaskStateEvent; import org.apache.dolphinscheduler.server.master.processor.queue.StateEventResponseService; import org.apache.dolphinscheduler.service.utils.LoggerUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,10 +39,9 @@ import io.netty.channel.Channel; * handle state event received from master/api */ @Component +@Slf4j public class TaskEventProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskEventProcessor.class); - @Autowired private StateEventResponseService stateEventResponseService; @@ -63,7 +62,7 @@ public class TaskEventProcessor implements NettyRequestProcessor { try { LoggerUtils.setWorkflowAndTaskInstanceIDMDC(stateEvent.getProcessInstanceId(), stateEvent.getTaskInstanceId()); - logger.info("Received task event change command, event: {}", stateEvent); + log.info("Received task event change command, event: {}", stateEvent); stateEventResponseService.addEvent2WorkflowExecute(stateEvent); } finally { LoggerUtils.removeWorkflowAndTaskInstanceIdMDC(); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteResponseProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteResponseProcessor.java index 378562609c..4bc43184f9 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteResponseProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteResponseProcessor.java @@ -26,8 +26,8 @@ import org.apache.dolphinscheduler.server.master.processor.queue.TaskEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskEventService; import org.apache.dolphinscheduler.service.utils.LoggerUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -38,10 +38,9 @@ import io.netty.channel.Channel; * task execute response processor */ @Component +@Slf4j public class TaskExecuteResponseProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskExecuteResponseProcessor.class); - @Autowired private TaskEventService taskEventService; @@ -65,7 +64,7 @@ public class TaskExecuteResponseProcessor implements NettyRequestProcessor { try { LoggerUtils.setWorkflowAndTaskInstanceIDMDC(taskResultEvent.getProcessInstanceId(), taskResultEvent.getTaskInstanceId()); - logger.info("Received task execute result, event: {}", taskResultEvent); + log.info("Received task execute result, event: {}", taskResultEvent); taskEventService.addEvent(taskResultEvent); } finally { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteRunningProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteRunningProcessor.java index 2a0d265033..1a1788f5df 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteRunningProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteRunningProcessor.java @@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.master.processor.queue.TaskEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskEventService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -37,10 +37,9 @@ import io.netty.channel.Channel; * task execute running processor */ @Component +@Slf4j public class TaskExecuteRunningProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskExecuteRunningProcessor.class); - @Autowired private TaskEventService taskEventService; @@ -56,7 +55,7 @@ public class TaskExecuteRunningProcessor implements NettyRequestProcessor { String.format("invalid command type : %s", command.getType())); TaskExecuteRunningCommand taskExecuteRunningMessage = JSONUtils.parseObject(command.getBody(), TaskExecuteRunningCommand.class); - logger.info("taskExecuteRunningCommand: {}", taskExecuteRunningMessage); + log.info("taskExecuteRunningCommand: {}", taskExecuteRunningMessage); TaskEvent taskEvent = TaskEvent.newRunningEvent(taskExecuteRunningMessage, channel, diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteStartProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteStartProcessor.java index cf80bfbd19..b9d302e2fb 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteStartProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskExecuteStartProcessor.java @@ -27,8 +27,8 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.master.runner.StreamTaskExecuteRunnable; import org.apache.dolphinscheduler.server.master.runner.StreamTaskExecuteThreadPool; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,10 +39,9 @@ import io.netty.channel.Channel; * task execute start processor, from api to master */ @Component +@Slf4j public class TaskExecuteStartProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskExecuteStartProcessor.class); - @Autowired private StreamTaskExecuteThreadPool streamTaskExecuteThreadPool; @@ -55,12 +54,12 @@ public class TaskExecuteStartProcessor implements NettyRequestProcessor { String.format("invalid command type : %s", command.getType())); TaskExecuteStartCommand taskExecuteStartCommand = JSONUtils.parseObject(command.getBody(), TaskExecuteStartCommand.class); - logger.info("taskExecuteStartCommand: {}", taskExecuteStartCommand); + log.info("taskExecuteStartCommand: {}", taskExecuteStartCommand); TaskDefinition taskDefinition = taskDefinitionDao.findTaskDefinition( taskExecuteStartCommand.getTaskDefinitionCode(), taskExecuteStartCommand.getTaskDefinitionVersion()); if (taskDefinition == null) { - logger.error("Task definition can not be found, taskDefinitionCode:{}, taskDefinitionVersion:{}", + log.error("Task definition can not be found, taskDefinitionCode:{}, taskDefinitionVersion:{}", taskExecuteStartCommand.getTaskDefinitionCode(), taskExecuteStartCommand.getTaskDefinitionVersion()); return; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java index 1ff8cd2966..02ee30257a 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskKillResponseProcessor.java @@ -23,8 +23,8 @@ import org.apache.dolphinscheduler.remote.command.CommandType; import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Component; import com.google.common.base.Preconditions; @@ -34,10 +34,9 @@ import io.netty.channel.Channel; * task response processor */ @Component +@Slf4j public class TaskKillResponseProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskKillResponseProcessor.class); - /** * task final result response * need master process , state persistence @@ -52,7 +51,7 @@ public class TaskKillResponseProcessor implements NettyRequestProcessor { TaskKillResponseCommand responseCommand = JSONUtils.parseObject(command.getBody(), TaskKillResponseCommand.class); - logger.info("[TaskInstance-{}] Received task kill response command : {}", + log.info("[TaskInstance-{}] Received task kill response command : {}", responseCommand.getTaskInstanceId(), responseCommand); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java index b023d4ded6..54ee517ae5 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/TaskRecallProcessor.java @@ -26,8 +26,8 @@ import org.apache.dolphinscheduler.server.master.processor.queue.TaskEvent; import org.apache.dolphinscheduler.server.master.processor.queue.TaskEventService; import org.apache.dolphinscheduler.service.utils.LoggerUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -38,10 +38,9 @@ import io.netty.channel.Channel; * task recall processor */ @Component +@Slf4j public class TaskRecallProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskRecallProcessor.class); - @Autowired private TaskEventService taskEventService; @@ -60,7 +59,7 @@ public class TaskRecallProcessor implements NettyRequestProcessor { try { LoggerUtils.setWorkflowAndTaskInstanceIDMDC(recallCommand.getProcessInstanceId(), recallCommand.getTaskInstanceId()); - logger.info("Receive task recall command: {}", recallCommand); + log.info("Receive task recall command: {}", recallCommand); taskEventService.addEvent(taskEvent); } finally { LoggerUtils.removeWorkflowAndTaskInstanceIdMDC(); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/WorkflowExecutingDataRequestProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/WorkflowExecutingDataRequestProcessor.java index 1408b8d42f..6977789b5b 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/WorkflowExecutingDataRequestProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/WorkflowExecutingDataRequestProcessor.java @@ -28,8 +28,8 @@ import org.apache.dolphinscheduler.server.master.service.ExecutingService; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -40,10 +40,9 @@ import io.netty.channel.Channel; * workflow executing data process from api/master */ @Component +@Slf4j public class WorkflowExecutingDataRequestProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(WorkflowExecutingDataRequestProcessor.class); - @Autowired private ExecutingService executingService; @@ -55,7 +54,7 @@ public class WorkflowExecutingDataRequestProcessor implements NettyRequestProces WorkflowExecutingDataRequestCommand requestCommand = JSONUtils.parseObject(command.getBody(), WorkflowExecutingDataRequestCommand.class); - logger.info("received command, processInstanceId:{}", requestCommand.getProcessInstanceId()); + log.info("received command, processInstanceId:{}", requestCommand.getProcessInstanceId()); Optional workflowExecuteDtoOptional = executingService.queryWorkflowExecutingData(requestCommand.getProcessInstanceId()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/StateEventResponseService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/StateEventResponseService.java index bed95868ce..6633f11c7d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/StateEventResponseService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/StateEventResponseService.java @@ -34,21 +34,17 @@ import java.util.concurrent.LinkedBlockingQueue; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import io.netty.channel.Channel; @Component +@Slf4j public class StateEventResponseService { - /** - * logger - */ - private final Logger logger = LoggerFactory.getLogger(StateEventResponseService.class); - /** * attemptQueue */ @@ -98,7 +94,7 @@ public class StateEventResponseService { // check the event is validated eventQueue.put(stateEvent); } catch (InterruptedException e) { - logger.error("Put state event : {} error", stateEvent, e); + log.error("Put state event : {} error", stateEvent, e); Thread.currentThread().interrupt(); } } @@ -114,7 +110,7 @@ public class StateEventResponseService { @Override public void run() { - logger.info("State event loop service started"); + log.info("State event loop service started"); while (!ServerLifeCycleManager.isStopped()) { try { // if not task , blocking here @@ -123,14 +119,14 @@ public class StateEventResponseService { stateEvent.getTaskInstanceId()); persist(stateEvent); } catch (InterruptedException e) { - logger.warn("State event loop service interrupted, will stop this loop", e); + log.warn("State event loop service interrupted, will stop this loop", e); Thread.currentThread().interrupt(); break; } finally { LoggerUtils.removeWorkflowAndTaskInstanceIdMDC(); } } - logger.info("State event loop service stopped"); + log.info("State event loop service stopped"); } } @@ -145,7 +141,7 @@ public class StateEventResponseService { private void persist(StateEvent stateEvent) { try { if (!this.processInstanceExecCacheManager.contains(stateEvent.getProcessInstanceId())) { - logger.warn("Persist event into workflow execute thread error, " + log.warn("Persist event into workflow execute thread error, " + "cannot find the workflow instance from cache manager, event: {}", stateEvent); writeResponse(stateEvent); return; @@ -167,7 +163,7 @@ public class StateEventResponseService { // this response is not needed. writeResponse(stateEvent); } catch (Exception e) { - logger.error("Persist event queue error, event: {}", stateEvent, e); + log.error("Persist event queue error, event: {}", stateEvent, e); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskEventService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskEventService.java index 7bf3ca6dca..f19dce2c76 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskEventService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskEventService.java @@ -30,8 +30,8 @@ import java.util.concurrent.TimeUnit; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,13 +39,9 @@ import org.springframework.stereotype.Component; * task manager */ @Component +@Slf4j public class TaskEventService { - /** - * logger - */ - private final Logger logger = LoggerFactory.getLogger(TaskEventService.class); - /** * attemptQueue */ @@ -64,14 +60,14 @@ public class TaskEventService { @PostConstruct public void start() { this.taskEventThread = new TaskEventDispatchThread(); - logger.info("TaskEvent dispatch thread starting"); + log.info("TaskEvent dispatch thread starting"); this.taskEventThread.start(); - logger.info("TaskEvent dispatch thread started"); + log.info("TaskEvent dispatch thread started"); this.taskEventHandlerThread = new TaskEventHandlerThread(); - logger.info("TaskEvent handle thread staring"); + log.info("TaskEvent handle thread staring"); this.taskEventHandlerThread.start(); - logger.info("TaskEvent handle thread started"); + log.info("TaskEvent handle thread started"); } @PreDestroy @@ -88,7 +84,7 @@ public class TaskEventService { taskExecuteThreadPool.eventHandler(); } } catch (Exception e) { - logger.error("TaskEventService stop error:", e); + log.error("TaskEventService stop error:", e); } } @@ -121,10 +117,10 @@ public class TaskEventService { Thread.currentThread().interrupt(); break; } catch (Exception e) { - logger.error("persist task error", e); + log.error("persist task error", e); } } - logger.info("StateEventResponseWorker stopped"); + log.info("StateEventResponseWorker stopped"); } } @@ -139,17 +135,17 @@ public class TaskEventService { @Override public void run() { - logger.info("event handler thread started"); + log.info("event handler thread started"); while (!ServerLifeCycleManager.isStopped()) { try { taskExecuteThreadPool.eventHandler(); TimeUnit.MILLISECONDS.sleep(Constants.SLEEP_TIME_MILLIS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.warn("TaskEvent handle thread interrupted, will return this loop"); + log.warn("TaskEvent handle thread interrupted, will return this loop"); break; } catch (Exception e) { - logger.error("event handler thread error", e); + log.error("event handler thread error", e); } } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteRunnable.java index f1b84fc8c8..78199f517d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteRunnable.java @@ -26,16 +26,14 @@ import org.apache.dolphinscheduler.server.master.event.TaskEventHandler; import java.util.Map; import java.util.concurrent.ConcurrentLinkedQueue; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * task execute thread */ +@Slf4j public class TaskExecuteRunnable implements Runnable { - private static final Logger logger = LoggerFactory.getLogger(TaskExecuteRunnable.class); - private final int processInstanceId; private final ConcurrentLinkedQueue events = new ConcurrentLinkedQueue<>(); @@ -54,20 +52,20 @@ public class TaskExecuteRunnable implements Runnable { TaskEvent event = this.events.peek(); try { LogUtils.setWorkflowAndTaskInstanceIDMDC(event.getProcessInstanceId(), event.getTaskInstanceId()); - logger.info("Handle task event begin: {}", event); + log.info("Handle task event begin: {}", event); taskEventHandlerMap.get(event.getEvent()).handleTaskEvent(event); events.remove(event); - logger.info("Handle task event finished: {}", event); + log.info("Handle task event finished: {}", event); } catch (TaskEventHandleException taskEventHandleException) { // we don't need to resubmit this event, since the worker will resubmit this event - logger.error("Handle task event failed, this event will be retry later, event: {}", event, + log.error("Handle task event failed, this event will be retry later, event: {}", event, taskEventHandleException); } catch (TaskEventHandleError taskEventHandleError) { - logger.error("Handle task event error, this event will be removed, event: {}", event, + log.error("Handle task event error, this event will be removed, event: {}", event, taskEventHandleError); events.remove(event); } catch (Exception unknownException) { - logger.error("Handle task event error, get a unknown exception, this event will be removed, event: {}", + log.error("Handle task event error, get a unknown exception, this event will be removed, event: {}", event, unknownException); events.remove(event); } finally { @@ -94,7 +92,7 @@ public class TaskExecuteRunnable implements Runnable { public boolean addEvent(TaskEvent event) { if (event.getProcessInstanceId() != this.processInstanceId) { - logger.warn( + log.warn( "event would be abounded, task instance id:{}, process instance id:{}, this.processInstanceId:{}", event.getTaskInstanceId(), event.getProcessInstanceId(), this.processInstanceId); return false; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteThreadPool.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteThreadPool.java index ece37e7e40..0bdfbb0e3e 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteThreadPool.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskExecuteThreadPool.java @@ -30,8 +30,8 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.stereotype.Component; @@ -39,10 +39,9 @@ import org.springframework.util.concurrent.ListenableFuture; import org.springframework.util.concurrent.ListenableFutureCallback; @Component +@Slf4j public class TaskExecuteThreadPool extends ThreadPoolTaskExecutor { - private static final Logger logger = LoggerFactory.getLogger(TaskExecuteThreadPool.class); - private final ConcurrentHashMap multiThreadFilterMap = new ConcurrentHashMap<>(); @Autowired @@ -83,7 +82,7 @@ public class TaskExecuteThreadPool extends ThreadPoolTaskExecutor { return; } if (!processInstanceExecCacheManager.contains(taskEvent.getProcessInstanceId())) { - logger.warn("Cannot find workflowExecuteThread from cacheManager, event: {}", taskEvent); + log.warn("Cannot find workflowExecuteThread from cacheManager, event: {}", taskEvent); return; } TaskExecuteRunnable taskExecuteRunnable = taskExecuteThreadMap.computeIfAbsent(taskEvent.getProcessInstanceId(), @@ -111,10 +110,10 @@ public class TaskExecuteThreadPool extends ThreadPoolTaskExecutor { @Override public void onFailure(Throwable ex) { Integer processInstanceId = taskExecuteThread.getProcessInstanceId(); - logger.error("[WorkflowInstance-{}] persist event failed", processInstanceId, ex); + log.error("[WorkflowInstance-{}] persist event failed", processInstanceId, ex); if (!processInstanceExecCacheManager.contains(processInstanceId)) { taskExecuteThreadMap.remove(processInstanceId); - logger.info( + log.info( "[WorkflowInstance-{}] Cannot find processInstance from cacheManager, remove process instance from threadMap", processInstanceId); } @@ -124,10 +123,10 @@ public class TaskExecuteThreadPool extends ThreadPoolTaskExecutor { @Override public void onSuccess(Object result) { Integer processInstanceId = taskExecuteThread.getProcessInstanceId(); - logger.info("[WorkflowInstance-{}] persist events succeeded", processInstanceId); + log.info("[WorkflowInstance-{}] persist events succeeded", processInstanceId); if (!processInstanceExecCacheManager.contains(processInstanceId)) { taskExecuteThreadMap.remove(processInstanceId); - logger.info( + log.info( "[WorkflowInstance-{}] Cannot find processInstance from cacheManager, remove process instance from threadMap", processInstanceId); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterConnectionStateListener.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterConnectionStateListener.java index b223c7e8c0..aa82178813 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterConnectionStateListener.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterConnectionStateListener.java @@ -24,14 +24,11 @@ import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +@Slf4j public class MasterConnectionStateListener implements ConnectionListener { - private static final Logger logger = LoggerFactory.getLogger(MasterConnectionStateListener.class); - private final MasterConfig masterConfig; private final RegistryClient registryClient; private final MasterConnectStrategy masterConnectStrategy; @@ -46,7 +43,7 @@ public class MasterConnectionStateListener implements ConnectionListener { @Override public void onUpdate(ConnectionState state) { - logger.info("Master received a {} event from registry, the current server state is {}", state, + log.info("Master received a {} event from registry, the current server state is {}", state, ServerLifeCycleManager.getServerStatus()); switch (state) { case CONNECTED: diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java index c319d393ed..14e4fa24e9 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryClient.java @@ -33,8 +33,8 @@ import org.apache.dolphinscheduler.server.master.task.MasterHeartBeatTask; import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -43,10 +43,9 @@ import org.springframework.stereotype.Component; *

When the Master node startup, it will register in registry center. And start a {@link MasterHeartBeatTask} to update its metadata in registry. */ @Component +@Slf4j public class MasterRegistryClient implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(MasterRegistryClient.class); - @Autowired private FailoverService failoverService; @@ -92,29 +91,29 @@ public class MasterRegistryClient implements AutoCloseable { * @param failover is failover */ public void removeMasterNodePath(String path, NodeType nodeType, boolean failover) { - logger.info("{} node deleted : {}", nodeType, path); + log.info("{} node deleted : {}", nodeType, path); if (StringUtils.isEmpty(path)) { - logger.error("server down error: empty path: {}, nodeType:{}", path, nodeType); + log.error("server down error: empty path: {}, nodeType:{}", path, nodeType); return; } String serverHost = registryClient.getHostByEventDataPath(path); if (StringUtils.isEmpty(serverHost)) { - logger.error("server down error: unknown path: {}, nodeType:{}", path, nodeType); + log.error("server down error: unknown path: {}, nodeType:{}", path, nodeType); return; } try { if (!registryClient.exists(path)) { - logger.info("path: {} not exists", path); + log.info("path: {} not exists", path); } // failover server if (failover) { failoverService.failoverServerWhenDown(serverHost, nodeType); } } catch (Exception e) { - logger.error("{} server failover failed, host:{}", nodeType, serverHost, e); + log.error("{} server failover failed, host:{}", nodeType, serverHost, e); } } @@ -126,17 +125,17 @@ public class MasterRegistryClient implements AutoCloseable { * @param failover is failover */ public void removeWorkerNodePath(String path, NodeType nodeType, boolean failover) { - logger.info("{} node deleted : {}", nodeType, path); + log.info("{} node deleted : {}", nodeType, path); try { String serverHost = null; if (!StringUtils.isEmpty(path)) { serverHost = registryClient.getHostByEventDataPath(path); if (StringUtils.isEmpty(serverHost)) { - logger.error("server down error: unknown path: {}", path); + log.error("server down error: unknown path: {}", path); return; } if (!registryClient.exists(path)) { - logger.info("path: {} not exists", path); + log.info("path: {} not exists", path); } } // failover server @@ -144,7 +143,7 @@ public class MasterRegistryClient implements AutoCloseable { failoverService.failoverServerWhenDown(serverHost, nodeType); } } catch (Exception e) { - logger.error("{} server failover failed", nodeType, e); + log.error("{} server failover failed", nodeType, e); } } @@ -152,7 +151,7 @@ public class MasterRegistryClient implements AutoCloseable { * Registry the current master server itself to registry. */ void registry() { - logger.info("Master node : {} registering to registry center", masterConfig.getMasterAddress()); + log.info("Master node : {} registering to registry center", masterConfig.getMasterAddress()); String masterRegistryPath = masterConfig.getMasterRegistryPath(); // remove before persist @@ -160,7 +159,7 @@ public class MasterRegistryClient implements AutoCloseable { registryClient.persistEphemeral(masterRegistryPath, JSONUtils.toJsonString(masterHeartBeatTask.getHeartBeat())); while (!registryClient.checkNodeExists(NetUtils.getHost(), NodeType.MASTER)) { - logger.warn("The current master server node:{} cannot find in registry", NetUtils.getHost()); + log.warn("The current master server node:{} cannot find in registry", NetUtils.getHost()); ThreadUtils.sleep(SLEEP_TIME_MILLIS); } @@ -168,20 +167,20 @@ public class MasterRegistryClient implements AutoCloseable { ThreadUtils.sleep(SLEEP_TIME_MILLIS); masterHeartBeatTask.start(); - logger.info("Master node : {} registered to registry center successfully", masterConfig.getMasterAddress()); + log.info("Master node : {} registered to registry center successfully", masterConfig.getMasterAddress()); } public void deregister() { try { registryClient.remove(masterConfig.getMasterRegistryPath()); - logger.info("Master node : {} unRegistry to register center.", masterConfig.getMasterAddress()); + log.info("Master node : {} unRegistry to register center.", masterConfig.getMasterAddress()); if (masterHeartBeatTask != null) { masterHeartBeatTask.shutdown(); } registryClient.close(); } catch (Exception e) { - logger.error("MasterServer remove registry path exception ", e); + log.error("MasterServer remove registry path exception ", e); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java index 5a1c101e4f..513db20059 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryDataListener.java @@ -26,15 +26,13 @@ import org.apache.dolphinscheduler.registry.api.Event; import org.apache.dolphinscheduler.registry.api.SubscribeListener; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Strings; +@Slf4j public class MasterRegistryDataListener implements SubscribeListener { - private static final Logger logger = LoggerFactory.getLogger(MasterRegistryDataListener.class); - private final MasterRegistryClient masterRegistryClient; public MasterRegistryDataListener() { @@ -60,7 +58,7 @@ public class MasterRegistryDataListener implements SubscribeListener { final String path = event.path(); switch (event.type()) { case ADD: - logger.info("master node added : {}", path); + log.info("master node added : {}", path); break; case REMOVE: masterRegistryClient.removeMasterNodePath(path, NodeType.MASTER, true); @@ -75,10 +73,10 @@ public class MasterRegistryDataListener implements SubscribeListener { final String path = event.path(); switch (event.type()) { case ADD: - logger.info("worker node added : {}", path); + log.info("worker node added : {}", path); break; case REMOVE: - logger.info("worker node deleted : {}", path); + log.info("worker node deleted : {}", path); masterRegistryClient.removeWorkerNodePath(path, NodeType.WORKER, true); break; default: diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterStopStrategy.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterStopStrategy.java index 5ac361ddca..bbc38d35fe 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterStopStrategy.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterStopStrategy.java @@ -21,8 +21,8 @@ import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.registry.api.StrategyType; import org.apache.dolphinscheduler.server.master.config.MasterConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Service; @@ -32,10 +32,9 @@ import org.springframework.stereotype.Service; */ @Service @ConditionalOnProperty(prefix = "master.registry-disconnect-strategy", name = "strategy", havingValue = "stop", matchIfMissing = true) +@Slf4j public class MasterStopStrategy implements MasterConnectStrategy { - private final Logger logger = LoggerFactory.getLogger(MasterStopStrategy.class); - @Autowired private RegistryClient registryClient; @Autowired @@ -49,7 +48,7 @@ public class MasterStopStrategy implements MasterConnectStrategy { @Override public void reconnect() { - logger.warn("The current connect strategy is stop, so the master will not reconnect to registry"); + log.warn("The current connect strategy is stop, so the master will not reconnect to registry"); } @Override diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterWaitingStrategy.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterWaitingStrategy.java index 4556500db6..6929317ebd 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterWaitingStrategy.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterWaitingStrategy.java @@ -32,8 +32,8 @@ import org.apache.dolphinscheduler.server.master.runner.StateWheelExecuteThread; import java.time.Duration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Service; @@ -43,10 +43,9 @@ import org.springframework.stereotype.Service; */ @Service @ConditionalOnProperty(prefix = "master.registry-disconnect-strategy", name = "strategy", havingValue = "waiting") +@Slf4j public class MasterWaitingStrategy implements MasterConnectStrategy { - private final Logger logger = LoggerFactory.getLogger(MasterWaitingStrategy.class); - @Autowired private MasterConfig masterConfig; @Autowired @@ -67,7 +66,7 @@ public class MasterWaitingStrategy implements MasterConnectStrategy { clearMasterResource(); Duration maxWaitingTime = masterConfig.getRegistryDisconnectStrategy().getMaxWaitingTime(); try { - logger.info("Master disconnect from registry will try to reconnect in {} s", + log.info("Master disconnect from registry will try to reconnect in {} s", maxWaitingTime.getSeconds()); registryClient.connectUntilTimeout(maxWaitingTime); } catch (RegistryException ex) { @@ -78,15 +77,15 @@ public class MasterWaitingStrategy implements MasterConnectStrategy { String errorMessage = String.format( "Disconnect from registry and change the current status to waiting error, the current server state is %s, will stop the current server", ServerLifeCycleManager.getServerStatus()); - logger.error(errorMessage, e); + log.error(errorMessage, e); registryClient.getStoppable().stop(errorMessage); } catch (RegistryException ex) { String errorMessage = "Disconnect from registry and waiting to reconnect failed, will stop the server"; - logger.error(errorMessage, ex); + log.error(errorMessage, ex); registryClient.getStoppable().stop(errorMessage); } catch (Exception ex) { String errorMessage = "Disconnect from registry and get an unknown exception, will stop the server"; - logger.error(errorMessage, ex); + log.error(errorMessage, ex); registryClient.getStoppable().stop(errorMessage); } } @@ -94,19 +93,19 @@ public class MasterWaitingStrategy implements MasterConnectStrategy { @Override public void reconnect() { if (ServerLifeCycleManager.isRunning()) { - logger.info("no need to reconnect, as the current server status is running"); + log.info("no need to reconnect, as the current server status is running"); } else { try { ServerLifeCycleManager.recoverFromWaiting(); reStartMasterResource(); - logger.info("Recover from waiting success, the current server status is {}", + log.info("Recover from waiting success, the current server status is {}", ServerLifeCycleManager.getServerStatus()); } catch (Exception e) { String errorMessage = String.format( "Recover from waiting failed, the current server status is %s, will stop the server", ServerLifeCycleManager.getServerStatus()); - logger.error(errorMessage, e); + log.error(errorMessage, e); registryClient.getStoppable().stop(errorMessage); } } @@ -120,19 +119,19 @@ public class MasterWaitingStrategy implements MasterConnectStrategy { private void clearMasterResource() { // close the worker resource, if close failed should stop the worker server masterRPCServer.close(); - logger.warn("Master closed RPC server due to lost registry connection"); + log.warn("Master closed RPC server due to lost registry connection"); workflowEventQueue.clearWorkflowEventQueue(); - logger.warn("Master clear workflow event queue due to lost registry connection"); + log.warn("Master clear workflow event queue due to lost registry connection"); processInstanceExecCacheManager.clearCache(); - logger.warn("Master clear process instance cache due to lost registry connection"); + log.warn("Master clear process instance cache due to lost registry connection"); stateWheelExecuteThread.clearAllTasks(); - logger.warn("Master clear all state wheel task due to lost registry connection"); + log.warn("Master clear all state wheel task due to lost registry connection"); } private void reStartMasterResource() { // reopen the resource, if reopen failed should stop the worker server masterRPCServer.start(); - logger.warn("Master restarted RPC server due to reconnect to registry"); + log.warn("Master restarted RPC server due to reconnect to registry"); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java index c6151866c0..d04b7d0510 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/registry/ServerNodeManager.java @@ -62,17 +62,16 @@ import java.util.stream.Collectors; import javax.annotation.PreDestroy; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service +@Slf4j public class ServerNodeManager implements InitializingBean { - private final Logger logger = LoggerFactory.getLogger(ServerNodeManager.class); - private final Lock masterLock = new ReentrantLock(); private final ReentrantReadWriteLock workerGroupLock = new ReentrantReadWriteLock(); @@ -156,7 +155,7 @@ public class ServerNodeManager implements InitializingBean { // sync worker node info refreshWorkerNodesAndGroupMappings(); } catch (Exception e) { - logger.error("WorkerNodeInfoAndGroupDbSyncTask error:", e); + log.error("WorkerNodeInfoAndGroupDbSyncTask error:", e); } } } @@ -186,17 +185,17 @@ public class ServerNodeManager implements InitializingBean { final String workerAddress = parts[parts.length - 1]; // todo: update workerNodeInfo - logger.debug("received subscribe event : {}", event); + log.debug("received subscribe event : {}", event); if (type == Type.ADD) { - logger.info("Worker: {} added, currentNode : {}", path, workerAddress); + log.info("Worker: {} added, currentNode : {}", path, workerAddress); } else if (type == Type.REMOVE) { - logger.info("Worker node : {} down.", path); + log.info("Worker node : {} down.", path); alertDao.sendServerStoppedAlert(1, path, "WORKER"); } else if (type == Type.UPDATE) { syncSingleWorkerNodeInfo(workerAddress, JSONUtils.parseObject(data, WorkerHeartBeat.class)); } } catch (Exception ex) { - logger.error("WorkerGroupListener capture data change and get data failed", ex); + log.error("WorkerGroupListener capture data change and get data failed", ex); } } } @@ -220,16 +219,16 @@ public class ServerNodeManager implements InitializingBean { if (registryClient.isMasterPath(path)) { try { if (type.equals(Type.ADD)) { - logger.info("master node : {} added.", path); + log.info("master node : {} added.", path); updateMasterNodes(); } if (type.equals(Type.REMOVE)) { - logger.info("master node : {} down.", path); + log.info("master node : {} down.", path); updateMasterNodes(); alertDao.sendServerStoppedAlert(1, path, "MASTER"); } } catch (Exception ex) { - logger.error("MasterNodeListener capture data change and get data failed.", ex); + log.error("MasterNodeListener capture data change and get data failed.", ex); } } } @@ -246,7 +245,7 @@ public class ServerNodeManager implements InitializingBean { List masterNodeList = registryClient.getServerList(NodeType.MASTER); syncMasterNodes(currentNodes, masterNodeList); } catch (Exception e) { - logger.error("update master nodes error", e); + log.error("update master nodes error", e); } finally { registryClient.releaseLock(nodeLock); } @@ -312,9 +311,9 @@ public class ServerNodeManager implements InitializingBean { totalSlot = nodes.size(); currentSlot = index; } else { - logger.warn("Current master is not in active master list"); + log.warn("Current master is not in active master list"); } - logger.info("Update master nodes, total master size: {}, current slot: {}", totalSlot, currentSlot); + log.info("Update master nodes, total master size: {}, current slot: {}", totalSlot, currentSlot); } finally { masterLock.unlock(); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRPCServer.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRPCServer.java index 8255e9a3b0..1a070c10af 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRPCServer.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/rpc/MasterRPCServer.java @@ -32,8 +32,8 @@ import org.apache.dolphinscheduler.server.master.processor.TaskKillResponseProce import org.apache.dolphinscheduler.server.master.processor.TaskRecallProcessor; import org.apache.dolphinscheduler.server.master.processor.WorkflowExecutingDataRequestProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -41,10 +41,9 @@ import org.springframework.stereotype.Service; * Master RPC Server, used to send/receive request to other system. */ @Service +@Slf4j public class MasterRPCServer implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(MasterRPCServer.class); - private NettyRemotingServer nettyRemotingServer; @Autowired @@ -81,7 +80,7 @@ public class MasterRPCServer implements AutoCloseable { private TaskExecuteStartProcessor taskExecuteStartProcessor; public void start() { - logger.info("Starting Master RPC Server..."); + log.info("Starting Master RPC Server..."); // init remoting server NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(masterConfig.getListenPort()); @@ -98,21 +97,21 @@ public class MasterRPCServer implements AutoCloseable { workflowExecutingDataRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_START, taskExecuteStartProcessor); - // logger server + // log server this.nettyRemotingServer.registerProcessor(CommandType.GET_LOG_BYTES_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.ROLL_VIEW_LOG_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.VIEW_WHOLE_LOG_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.REMOVE_TAK_LOG_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.start(); - logger.info("Started Master RPC Server..."); + log.info("Started Master RPC Server..."); } @Override public void close() { - logger.info("Closing Master RPC Server..."); + log.info("Closing Master RPC Server..."); this.nettyRemotingServer.close(); - logger.info("Closed Master RPC Server..."); + log.info("Closed Master RPC Server..."); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java index a36f01c534..305d0ee680 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/EventExecuteService.java @@ -26,16 +26,15 @@ import org.apache.dolphinscheduler.service.utils.LoggerUtils; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service +@Slf4j public class EventExecuteService extends BaseDaemonThread { - private static final Logger logger = LoggerFactory.getLogger(EventExecuteService.class); - @Autowired private ProcessInstanceExecCacheManager processInstanceExecCacheManager; @@ -54,9 +53,9 @@ public class EventExecuteService extends BaseDaemonThread { @Override public synchronized void start() { - logger.info("Master Event execute service starting"); + log.info("Master Event execute service starting"); super.start(); - logger.info("Master Event execute service started"); + log.info("Master Event execute service started"); } @Override @@ -67,11 +66,11 @@ public class EventExecuteService extends BaseDaemonThread { streamTaskEventHandler(); TimeUnit.MILLISECONDS.sleep(Constants.SLEEP_TIME_MILLIS_SHORT); } catch (InterruptedException interruptedException) { - logger.warn("Master event service interrupted, will exit this loop", interruptedException); + log.warn("Master event service interrupted, will exit this loop", interruptedException); Thread.currentThread().interrupt(); break; } catch (Exception e) { - logger.error("Master event execute service error", e); + log.error("Master event execute service error", e); } } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java index 2ae65c7cb8..068a8c4c3a 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/FailoverExecuteThread.java @@ -24,16 +24,15 @@ import org.apache.dolphinscheduler.common.thread.ThreadUtils; import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.server.master.service.MasterFailoverService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service +@Slf4j public class FailoverExecuteThread extends BaseDaemonThread { - private static final Logger logger = LoggerFactory.getLogger(FailoverExecuteThread.class); - @Autowired private MasterConfig masterConfig; @@ -49,9 +48,9 @@ public class FailoverExecuteThread extends BaseDaemonThread { @Override public synchronized void start() { - logger.info("Master failover thread staring"); + log.info("Master failover thread staring"); super.start(); - logger.info("Master failover thread stared"); + log.info("Master failover thread stared"); } @Override @@ -68,7 +67,7 @@ public class FailoverExecuteThread extends BaseDaemonThread { // This kind of check may only need to be executed when a master server start masterFailoverService.checkMasterFailover(); } catch (Exception e) { - logger.error("Master failover thread execute error", e); + log.error("Master failover thread execute error", e); } finally { ThreadUtils.sleep(masterConfig.getFailoverInterval().toMillis()); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java index 42bbe075ab..e688e758d1 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterSchedulerBootstrap.java @@ -53,8 +53,8 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -62,10 +62,9 @@ import org.springframework.stereotype.Service; * Master scheduler thread, this thread will consume the commands from database and trigger processInstance executed. */ @Service +@Slf4j public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(MasterSchedulerBootstrap.class); - @Autowired private ProcessService processService; @@ -130,16 +129,16 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl @Override public synchronized void start() { - logger.info("Master schedule bootstrap starting.."); + log.info("Master schedule bootstrap starting.."); super.start(); workflowEventLooper.start(); - logger.info("Master schedule bootstrap started..."); + log.info("Master schedule bootstrap started..."); } @Override public void close() { - logger.info("Master schedule bootstrap stopping..."); - logger.info("Master schedule bootstrap stopped..."); + log.info("Master schedule bootstrap stopping..."); + log.info("Master schedule bootstrap stopped..."); } /** @@ -151,7 +150,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl try { if (!ServerLifeCycleManager.isRunning()) { // the current server is not at running status, cannot consume command. - logger.warn("The current server {} is not at running status, cannot consumes commands.", + log.warn("The current server {} is not at running status, cannot consumes commands.", this.masterAddress); Thread.sleep(Constants.SLEEP_TIME_MILLIS); } @@ -159,7 +158,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl boolean isOverload = OSUtils.isOverload(masterConfig.getMaxCpuLoadAvg(), masterConfig.getReservedMemory()); if (isOverload) { - logger.warn("The current server {} is overload, cannot consumes commands.", this.masterAddress); + log.warn("The current server {} is overload, cannot consumes commands.", this.masterAddress); MasterServerMetrics.incMasterOverload(); Thread.sleep(Constants.SLEEP_TIME_MILLIS); continue; @@ -183,7 +182,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl try { LoggerUtils.setWorkflowInstanceIdMDC(processInstance.getId()); if (processInstanceExecCacheManager.contains(processInstance.getId())) { - logger.error( + log.error( "The workflow instance is already been cached, this case shouldn't be happened"); } WorkflowExecuteRunnable workflowRunnable = new WorkflowExecuteRunnable(processInstance, @@ -205,11 +204,11 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl } }); } catch (InterruptedException interruptedException) { - logger.warn("Master schedule bootstrap interrupted, close the loop", interruptedException); + log.warn("Master schedule bootstrap interrupted, close the loop", interruptedException); Thread.currentThread().interrupt(); break; } catch (Exception e) { - logger.error("Master schedule workflow error", e); + log.error("Master schedule workflow error", e); // sleep for 1s here to avoid the database down cause the exception boom ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } @@ -218,7 +217,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl private List command2ProcessInstance(List commands) throws InterruptedException { long commandTransformStartTime = System.currentTimeMillis(); - logger.info("Master schedule bootstrap transforming command to ProcessInstance, commandSize: {}", + log.info("Master schedule bootstrap transforming command to ProcessInstance, commandSize: {}", commands.size()); List processInstances = Collections.synchronizedList(new ArrayList<>(commands.size())); CountDownLatch latch = new CountDownLatch(commands.size()); @@ -231,18 +230,18 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl // by only one master SlotCheckState slotCheckState = slotCheck(command); if (slotCheckState.equals(SlotCheckState.CHANGE) || slotCheckState.equals(SlotCheckState.INJECT)) { - logger.info("Master handle command {} skip, slot check state: {}", command.getId(), + log.info("Master handle command {} skip, slot check state: {}", command.getId(), slotCheckState); return; } ProcessInstance processInstance = processService.handleCommand(masterAddress, command); if (processInstance != null) { processInstances.add(processInstance); - logger.info("Master handle command {} end, create process instance {}", command.getId(), + log.info("Master handle command {} end, create process instance {}", command.getId(), processInstance.getId()); } } catch (Exception e) { - logger.error("Master handle command {} error ", command.getId(), e); + log.error("Master handle command {} error ", command.getId(), e); commandService.moveToErrorCommand(command, e.toString()); } finally { latch.countDown(); @@ -252,7 +251,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl // make sure to finish handling command each time before next scan latch.await(); - logger.info( + log.info( "Master schedule bootstrap transformed command to ProcessInstance, commandSize: {}, processInstanceSize: {}", commands.size(), processInstances.size()); ProcessInstanceMetrics @@ -266,7 +265,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl int thisMasterSlot = serverNodeManager.getSlot(); int masterCount = serverNodeManager.getMasterSize(); if (masterCount <= 0) { - logger.warn("Master count: {} is invalid, the current slot: {}", masterCount, thisMasterSlot); + log.warn("Master count: {} is invalid, the current slot: {}", masterCount, thisMasterSlot); return Collections.emptyList(); } int pageSize = masterConfig.getFetchCommandNum(); @@ -274,7 +273,7 @@ public class MasterSchedulerBootstrap extends BaseDaemonThread implements AutoCl commandService.findCommandPageBySlot(pageSize, masterCount, thisMasterSlot); if (CollectionUtils.isNotEmpty(result)) { long cost = System.currentTimeMillis() - scheduleStartTime; - logger.info( + log.info( "Master schedule bootstrap loop command success, fetch command size: {}, cost: {}ms, current slot: {}, total slot size: {}", result.size(), cost, thisMasterSlot, masterCount); ProcessInstanceMetrics.recordCommandQueryTime(cost); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java index 94fefea634..1b2c603b8e 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StateWheelExecuteThread.java @@ -41,9 +41,8 @@ import java.util.concurrent.ConcurrentLinkedQueue; import javax.annotation.PostConstruct; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Component; @@ -56,10 +55,9 @@ import org.springframework.stereotype.Component; * 4. timeout process check */ @Component +@Slf4j public class StateWheelExecuteThread extends BaseDaemonThread { - private static final Logger logger = LoggerFactory.getLogger(StateWheelExecuteThread.class); - /** * ProcessInstance timeout check list, element is the processInstanceId. */ @@ -109,12 +107,12 @@ public class StateWheelExecuteThread extends BaseDaemonThread { checkTask4State(); checkProcess4Timeout(); } catch (Exception e) { - logger.error("state wheel thread check error:", e); + log.error("state wheel thread check error:", e); } try { Thread.sleep(checkInterval); } catch (InterruptedException e) { - logger.error("state wheel thread sleep error, will close the loop", e); + log.error("state wheel thread sleep error, will close the loop", e); Thread.currentThread().interrupt(); break; } @@ -123,13 +121,13 @@ public class StateWheelExecuteThread extends BaseDaemonThread { public void addProcess4TimeoutCheck(ProcessInstance processInstance) { processInstanceTimeoutCheckList.add(processInstance.getId()); - logger.info("Success add workflow instance {} into timeout check list", processInstance.getId()); + log.info("Success add workflow instance {} into timeout check list", processInstance.getId()); } public void removeProcess4TimeoutCheck(int processInstanceId) { boolean removeFlag = processInstanceTimeoutCheckList.remove(processInstanceId); if (removeFlag) { - logger.info("Success remove workflow instance {} from timeout check list", processInstanceId); + log.info("Success remove workflow instance {} from timeout check list", processInstanceId); } } @@ -143,27 +141,27 @@ public class StateWheelExecuteThread extends BaseDaemonThread { WorkflowExecuteRunnable workflowExecuteThread = processInstanceExecCacheManager.getByProcessInstanceId( processInstanceId); if (workflowExecuteThread == null) { - logger.warn( + log.warn( "Check workflow timeout failed, can not find workflowExecuteThread from cache manager, will remove this workflowInstance from check list"); processInstanceTimeoutCheckList.remove(processInstanceId); continue; } ProcessInstance processInstance = workflowExecuteThread.getProcessInstance(); if (processInstance == null) { - logger.warn("Check workflow timeout failed, the workflowInstance is null"); + log.warn("Check workflow timeout failed, the workflowInstance is null"); continue; } long timeRemain = DateUtils.getRemainTime(processInstance.getStartTime(), (long) processInstance.getTimeout() * Constants.SEC_2_MINUTES_TIME_UNIT); if (timeRemain < 0) { - logger.info("Workflow instance {} timeout, adding timeout event", processInstance.getId()); + log.info("Workflow instance {} timeout, adding timeout event", processInstance.getId()); addProcessTimeoutEvent(processInstance); processInstanceTimeoutCheckList.remove(processInstance.getId()); - logger.info("Workflow instance timeout, added timeout event"); + log.info("Workflow instance timeout, added timeout event"); } } catch (Exception ex) { - logger.error("Check workflow instance timeout error"); + log.error("Check workflow instance timeout error"); } finally { LoggerUtils.removeWorkflowInstanceIdMDC(); } @@ -172,72 +170,72 @@ public class StateWheelExecuteThread extends BaseDaemonThread { public void addTask4TimeoutCheck(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { TaskInstanceKey taskInstanceKey = TaskInstanceKey.getTaskInstanceKey(processInstance, taskInstance); - logger.info("Adding task instance into timeout check list"); + log.info("Adding task instance into timeout check list"); if (taskInstanceTimeoutCheckList.contains(taskInstanceKey)) { - logger.warn("Task instance is already in timeout check list"); + log.warn("Task instance is already in timeout check list"); return; } TaskDefinition taskDefinition = taskInstance.getTaskDefine(); if (taskDefinition == null) { - logger.error("Failed to add task instance into timeout check list, taskDefinition is null"); + log.error("Failed to add task instance into timeout check list, taskDefinition is null"); return; } if (TimeoutFlag.OPEN == taskDefinition.getTimeoutFlag()) { taskInstanceTimeoutCheckList.add(taskInstanceKey); - logger.info("Timeout flag is open, added task instance into timeout check list"); + log.info("Timeout flag is open, added task instance into timeout check list"); } if (taskInstance.isDependTask() || taskInstance.isSubProcess()) { taskInstanceTimeoutCheckList.add(taskInstanceKey); - logger.info("task instance is dependTask orSubProcess, added task instance into timeout check list"); + log.info("task instance is dependTask orSubProcess, added task instance into timeout check list"); } } public void removeTask4TimeoutCheck(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { TaskInstanceKey taskInstanceKey = TaskInstanceKey.getTaskInstanceKey(processInstance, taskInstance); taskInstanceTimeoutCheckList.remove(taskInstanceKey); - logger.info("remove task instance from timeout check list"); + log.info("remove task instance from timeout check list"); } public void addTask4RetryCheck(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { - logger.info("Adding task instance into retry check list"); + log.info("Adding task instance into retry check list"); TaskInstanceKey taskInstanceKey = TaskInstanceKey.getTaskInstanceKey(processInstance, taskInstance); if (taskInstanceRetryCheckList.contains(taskInstanceKey)) { - logger.warn("Task instance is already in retry check list"); + log.warn("Task instance is already in retry check list"); return; } TaskDefinition taskDefinition = taskInstance.getTaskDefine(); if (taskDefinition == null) { - logger.error("Add task instance into retry check list error, taskDefinition is null"); + log.error("Add task instance into retry check list error, taskDefinition is null"); return; } taskInstanceRetryCheckList.add(taskInstanceKey); - logger.info("[WorkflowInstance-{}][TaskInstanceKey-{}:{}] Added task instance into retry check list", + log.info("[WorkflowInstance-{}][TaskInstanceKey-{}:{}] Added task instance into retry check list", processInstance.getId(), taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); } public void removeTask4RetryCheck(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { TaskInstanceKey taskInstanceKey = TaskInstanceKey.getTaskInstanceKey(processInstance, taskInstance); taskInstanceRetryCheckList.remove(taskInstanceKey); - logger.info("remove task instance from retry check list"); + log.info("remove task instance from retry check list"); } public void addTask4StateCheck(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { - logger.info("Adding task instance into state check list"); + log.info("Adding task instance into state check list"); TaskInstanceKey taskInstanceKey = TaskInstanceKey.getTaskInstanceKey(processInstance, taskInstance); if (taskInstanceStateCheckList.contains(taskInstanceKey)) { - logger.warn("Task instance is already in state check list"); + log.warn("Task instance is already in state check list"); return; } if (taskInstance.isDependTask() || taskInstance.isSubProcess()) { taskInstanceStateCheckList.add(taskInstanceKey); - logger.info("Added task instance into state check list"); + log.info("Added task instance into state check list"); } } public void removeTask4StateCheck(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { TaskInstanceKey taskInstanceKey = TaskInstanceKey.getTaskInstanceKey(processInstance, taskInstance); taskInstanceStateCheckList.remove(taskInstanceKey); - logger.info("Removed task instance from state check list"); + log.info("Removed task instance from state check list"); } public void clearAllTasks() { @@ -260,7 +258,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { WorkflowExecuteRunnable workflowExecuteThread = processInstanceExecCacheManager.getByProcessInstanceId(processInstanceId); if (workflowExecuteThread == null) { - logger.warn( + log.warn( "Check task instance timeout failed, can not find workflowExecuteThread from cache manager, will remove this check task"); taskInstanceTimeoutCheckList.remove(taskInstanceKey); continue; @@ -268,7 +266,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { Optional taskInstanceOptional = workflowExecuteThread.getActiveTaskInstanceByTaskCode(taskCode); if (!taskInstanceOptional.isPresent()) { - logger.warn( + log.warn( "Check task instance timeout failed, can not get taskInstance from workflowExecuteThread, taskCode: {}" + "will remove this check task", taskCode); @@ -281,13 +279,13 @@ public class StateWheelExecuteThread extends BaseDaemonThread { (long) taskInstance.getTaskDefine().getTimeout() * Constants.SEC_2_MINUTES_TIME_UNIT); if (timeRemain < 0) { - logger.info("Task instance is timeout, adding task timeout event and remove the check"); + log.info("Task instance is timeout, adding task timeout event and remove the check"); addTaskTimeoutEvent(taskInstance); taskInstanceTimeoutCheckList.remove(taskInstanceKey); } } } catch (Exception ex) { - logger.error("Check task timeout error, taskInstanceKey: {}", taskInstanceKey, ex); + log.error("Check task timeout error, taskInstanceKey: {}", taskInstanceKey, ex); } finally { LoggerUtils.removeWorkflowInstanceIdMDC(); } @@ -309,7 +307,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { processInstanceExecCacheManager.getByProcessInstanceId(processInstanceId); if (workflowExecuteThread == null) { - logger.warn( + log.warn( "Task instance retry check failed, can not find workflowExecuteThread from cache manager, " + "will remove this check task"); taskInstanceRetryCheckList.remove(taskInstanceKey); @@ -321,7 +319,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { ProcessInstance processInstance = workflowExecuteThread.getProcessInstance(); if (processInstance.getState().isReadyStop()) { - logger.warn( + log.warn( "The process instance is ready to stop, will send process stop event and remove the check task"); addProcessStopEvent(processInstance); taskInstanceRetryCheckList.remove(taskInstanceKey); @@ -329,7 +327,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { } if (!taskInstanceOptional.isPresent()) { - logger.warn( + log.warn( "Task instance retry check failed, can not find taskInstance from workflowExecuteThread, will remove this check"); taskInstanceRetryCheckList.remove(taskInstanceKey); continue; @@ -344,7 +342,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { // reset taskInstance endTime and state // todo relative function: TaskInstance.retryTaskIntervalOverTime, // WorkflowExecuteThread.cloneRetryTaskInstance - logger.info("[TaskInstanceKey-{}:{}]The task instance can retry, will retry this task instance", + log.info("[TaskInstanceKey-{}:{}]The task instance can retry, will retry this task instance", taskInstance.getTaskCode(), taskInstance.getTaskDefinitionVersion()); taskInstance.setEndTime(null); taskInstance.setState(TaskExecutionStatus.SUBMITTED_SUCCESS); @@ -353,7 +351,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { taskInstanceRetryCheckList.remove(taskInstanceKey); } } catch (Exception ex) { - logger.error("Check task retry error, taskInstanceKey: {}", taskInstanceKey, ex); + log.error("Check task retry error, taskInstanceKey: {}", taskInstanceKey, ex); } finally { LoggerUtils.removeWorkflowInstanceIdMDC(); } @@ -373,7 +371,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { WorkflowExecuteRunnable workflowExecuteThread = processInstanceExecCacheManager.getByProcessInstanceId(processInstanceId); if (workflowExecuteThread == null) { - logger.warn( + log.warn( "Task instance state check failed, can not find workflowExecuteThread from cache manager, will remove this check task"); taskInstanceStateCheckList.remove(taskInstanceKey); continue; @@ -381,7 +379,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { Optional taskInstanceOptional = workflowExecuteThread.getActiveTaskInstanceByTaskCode(taskCode); if (!taskInstanceOptional.isPresent()) { - logger.warn( + log.warn( "Task instance state check failed, can not find taskInstance from workflowExecuteThread, will remove this check event"); taskInstanceStateCheckList.remove(taskInstanceKey); continue; @@ -392,7 +390,7 @@ public class StateWheelExecuteThread extends BaseDaemonThread { } addTaskStateChangeEvent(taskInstance); } catch (Exception ex) { - logger.error("Task state check error, taskInstanceKey: {}", taskInstanceKey, ex); + log.error("Task state check error, taskInstanceKey: {}", taskInstanceKey, ex); } finally { LoggerUtils.removeWorkflowInstanceIdMDC(); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java index d4761b04a9..0d6eabfad1 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteRunnable.java @@ -75,17 +75,14 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.NonNull; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * stream task execute */ +@Slf4j public class StreamTaskExecuteRunnable implements Runnable { - private static final Logger logger = LoggerFactory.getLogger(StreamTaskExecuteRunnable.class); - protected MasterConfig masterConfig; protected ProcessService processService; @@ -168,13 +165,13 @@ public class StreamTaskExecuteRunnable implements Runnable { dispatcher.dispatch(executionContext); dispatchSuccess = true; } catch (ExecuteException e) { - logger.error("Master dispatch task to worker error, taskInstanceId: {}, worker: {}", + log.error("Master dispatch task to worker error, taskInstanceId: {}, worker: {}", taskInstance.getId(), executionContext.getHost(), e); } if (!dispatchSuccess) { - logger.info("Master failed to dispatch task to worker, taskInstanceId: {}, worker: {}", + log.info("Master failed to dispatch task to worker, taskInstanceId: {}, worker: {}", taskInstance.getId(), executionContext.getHost()); @@ -187,7 +184,7 @@ public class StreamTaskExecuteRunnable implements Runnable { // set started flag taskRunnableStatus = TaskRunnableStatus.STARTED; - logger.info("Master success dispatch task to worker, taskInstanceId: {}, worker: {}", + log.info("Master success dispatch task to worker, taskInstanceId: {}, worker: {}", taskInstance.getId(), executionContext.getHost()); } @@ -198,7 +195,7 @@ public class StreamTaskExecuteRunnable implements Runnable { public boolean addTaskEvent(TaskEvent taskEvent) { if (taskInstance.getId() != taskEvent.getTaskInstanceId()) { - logger.info("state event would be abounded, taskInstanceId:{}, eventType:{}, state:{}", + log.info("state event would be abounded, taskInstanceId:{}, eventType:{}, state:{}", taskEvent.getTaskInstanceId(), taskEvent.getEvent(), taskEvent.getState()); return false; } @@ -215,7 +212,7 @@ public class StreamTaskExecuteRunnable implements Runnable { */ public void handleEvents() { if (!isStart()) { - logger.info( + log.info( "The stream task instance is not started, will not handle its state event, current state event size: {}", taskEvents.size()); return; @@ -226,23 +223,23 @@ public class StreamTaskExecuteRunnable implements Runnable { taskEvent = this.taskEvents.peek(); LoggerUtils.setTaskInstanceIdMDC(taskEvent.getTaskInstanceId()); - logger.info("Begin to handle state event, {}", taskEvent); + log.info("Begin to handle state event, {}", taskEvent); if (this.handleTaskEvent(taskEvent)) { this.taskEvents.remove(taskEvent); } } catch (StateEventHandleError stateEventHandleError) { - logger.error("State event handle error, will remove this event: {}", taskEvent, stateEventHandleError); + log.error("State event handle error, will remove this event: {}", taskEvent, stateEventHandleError); this.taskEvents.remove(taskEvent); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } catch (StateEventHandleException stateEventHandleException) { - logger.error("State event handle error, will retry this event: {}", + log.error("State event handle error, will retry this event: {}", taskEvent, stateEventHandleException); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } catch (Exception e) { // we catch the exception here, since if the state event handle failed, the state event will still keep // in the stateEvents queue. - logger.error("State event handle error, get a unknown exception, will retry this event: {}", + log.error("State event handle error, get a unknown exception, will retry this event: {}", taskEvent, e); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); @@ -339,7 +336,7 @@ public class StreamTaskExecuteRunnable implements Runnable { // verify tenant is null if (tenant == null) { - logger.error("tenant not exists,task instance id : {}", taskInstance.getId()); + log.error("tenant not exists,task instance id : {}", taskInstance.getId()); return null; } @@ -429,7 +426,7 @@ public class StreamTaskExecuteRunnable implements Runnable { if (taskInstance.getState().isFinished()) { streamTaskInstanceExecCacheManager.removeByTaskInstanceId(taskInstance.getId()); - logger.info("The stream task instance is finish, taskInstanceId:{}, state:{}", taskInstance.getId(), + log.info("The stream task instance is finish, taskInstanceId:{}, state:{}", taskInstance.getId(), taskEvent.getState()); } @@ -439,7 +436,7 @@ public class StreamTaskExecuteRunnable implements Runnable { private void measureTaskState(TaskEvent taskEvent) { if (taskEvent == null || taskEvent.getState() == null) { // the event is broken - logger.warn("The task event is broken..., taskEvent: {}", taskEvent); + log.warn("The task event is broken..., taskEvent: {}", taskEvent); return; } if (taskEvent.getState().isFinished()) { diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteThreadPool.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteThreadPool.java index 29977317c4..7c9508f5ed 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteThreadPool.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/StreamTaskExecuteThreadPool.java @@ -22,8 +22,8 @@ import org.apache.dolphinscheduler.service.utils.LoggerUtils; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.stereotype.Component; @@ -34,10 +34,9 @@ import org.springframework.util.concurrent.ListenableFutureCallback; * Used to execute {@link StreamTaskExecuteRunnable}. */ @Component +@Slf4j public class StreamTaskExecuteThreadPool extends ThreadPoolTaskExecutor { - private static final Logger logger = LoggerFactory.getLogger(StreamTaskExecuteThreadPool.class); - @Autowired private MasterConfig masterConfig; @@ -63,14 +62,14 @@ public class StreamTaskExecuteThreadPool extends ThreadPoolTaskExecutor { @Override public void onFailure(Throwable ex) { LoggerUtils.setTaskInstanceIdMDC(taskInstanceId); - logger.error("Stream task instance events handle failed", ex); + log.error("Stream task instance events handle failed", ex); LoggerUtils.removeTaskInstanceIdMDC(); } @Override public void onSuccess(Object result) { LoggerUtils.setTaskInstanceIdMDC(taskInstanceId); - logger.info("Stream task instance is finished."); + log.info("Stream task instance is finished."); LoggerUtils.removeTaskInstanceIdMDC(); } }); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowEventLooper.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowEventLooper.java index 3466b92bc3..eb978bc766 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowEventLooper.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowEventLooper.java @@ -35,16 +35,15 @@ import java.util.Map; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class WorkflowEventLooper extends BaseDaemonThread { - private final Logger logger = LoggerFactory.getLogger(WorkflowEventLooper.class); - @Autowired private WorkflowEventQueue workflowEventQueue; @@ -66,9 +65,9 @@ public class WorkflowEventLooper extends BaseDaemonThread { @Override public synchronized void start() { - logger.info("WorkflowEventLooper thread starting"); + log.info("WorkflowEventLooper thread starting"); super.start(); - logger.info("WorkflowEventLooper thread started"); + log.info("WorkflowEventLooper thread started"); } public void run() { @@ -77,25 +76,25 @@ public class WorkflowEventLooper extends BaseDaemonThread { try { workflowEvent = workflowEventQueue.poolEvent(); LoggerUtils.setWorkflowInstanceIdMDC(workflowEvent.getWorkflowInstanceId()); - logger.info("Workflow event looper receive a workflow event: {}, will handle this", workflowEvent); + log.info("Workflow event looper receive a workflow event: {}, will handle this", workflowEvent); WorkflowEventHandler workflowEventHandler = workflowEventHandlerMap.get(workflowEvent.getWorkflowEventType()); workflowEventHandler.handleWorkflowEvent(workflowEvent); } catch (InterruptedException e) { - logger.warn("WorkflowEventLooper thread is interrupted, will close this loop", e); + log.warn("WorkflowEventLooper thread is interrupted, will close this loop", e); Thread.currentThread().interrupt(); break; } catch (WorkflowEventHandleException workflowEventHandleException) { - logger.error("Handle workflow event failed, will add this event to event queue again, event: {}", + log.error("Handle workflow event failed, will add this event to event queue again, event: {}", workflowEvent, workflowEventHandleException); workflowEventQueue.addEvent(workflowEvent); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } catch (WorkflowEventHandleError workflowEventHandleError) { - logger.error("Handle workflow event error, will drop this event, event: {}", + log.error("Handle workflow event error, will drop this event, event: {}", workflowEvent, workflowEventHandleError); } catch (Exception unknownException) { - logger.error( + log.error( "Handle workflow event failed, get a unknown exception, will add this event to event queue again, event: {}", workflowEvent, unknownException); workflowEventQueue.addEvent(workflowEvent); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java index 7c75a6517f..5ebbf8c8ff 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteRunnable.java @@ -119,9 +119,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeanUtils; import com.google.common.collect.Lists; @@ -130,10 +129,9 @@ import com.google.common.collect.Sets; /** * Workflow execute task, used to execute a workflow instance. */ +@Slf4j public class WorkflowExecuteRunnable implements Callable { - private static final Logger logger = LoggerFactory.getLogger(WorkflowExecuteRunnable.class); - private final ProcessService processService; private final CommandService commandService; @@ -290,7 +288,7 @@ public class WorkflowExecuteRunnable implements Callable { */ public void handleEvents() { if (!isStart()) { - logger.info( + log.info( "The workflow instance is not started, will not handle its state event, current state event size: {}", stateEvents); return; @@ -312,21 +310,21 @@ public class WorkflowExecuteRunnable implements Callable { StateEventHandlerManager.getStateEventHandler(stateEvent.getType()) .orElseThrow(() -> new StateEventHandleError( "Cannot find handler for the given state event")); - logger.info("Begin to handle state event, {}", stateEvent); + log.info("Begin to handle state event, {}", stateEvent); if (stateEventHandler.handleStateEvent(this, stateEvent)) { this.stateEvents.remove(stateEvent); } } catch (StateEventHandleError stateEventHandleError) { - logger.error("State event handle error, will remove this event: {}", stateEvent, stateEventHandleError); + log.error("State event handle error, will remove this event: {}", stateEvent, stateEventHandleError); this.stateEvents.remove(stateEvent); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } catch (StateEventHandleException stateEventHandleException) { - logger.error("State event handle error, will retry this event: {}", + log.error("State event handle error, will retry this event: {}", stateEvent, stateEventHandleException); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } catch (StateEventHandleFailure stateEventHandleFailure) { - logger.error("State event handle failed, will move event to the tail: {}", + log.error("State event handle failed, will move event to the tail: {}", stateEvent, stateEventHandleFailure); this.stateEvents.remove(stateEvent); @@ -335,7 +333,7 @@ public class WorkflowExecuteRunnable implements Callable { } catch (Exception e) { // we catch the exception here, since if the state event handle failed, the state event will still keep // in the stateEvents queue. - logger.error("State event handle error, get a unknown exception, will retry this event: {}", + log.error("State event handle error, get a unknown exception, will retry this event: {}", stateEvent, e); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); @@ -359,7 +357,7 @@ public class WorkflowExecuteRunnable implements Callable { public boolean addStateEvent(StateEvent stateEvent) { if (processInstance.getId() != stateEvent.getProcessInstanceId()) { - logger.info("state event would be abounded :{}", stateEvent); + log.info("state event would be abounded :{}", stateEvent); return false; } this.stateEvents.add(stateEvent); @@ -377,29 +375,29 @@ public class WorkflowExecuteRunnable implements Callable { public boolean checkForceStartAndWakeUp(StateEvent stateEvent) { TaskGroupQueue taskGroupQueue = this.processService.loadTaskGroupQueue(stateEvent.getTaskInstanceId()); if (taskGroupQueue.getForceStart() == Flag.YES.getCode()) { - logger.info("Begin to force start taskGroupQueue: {}", taskGroupQueue.getId()); + log.info("Begin to force start taskGroupQueue: {}", taskGroupQueue.getId()); TaskInstance taskInstance = this.taskInstanceDao.findTaskInstanceById(stateEvent.getTaskInstanceId()); ITaskProcessor taskProcessor = activeTaskProcessorMaps.get(taskInstance.getTaskCode()); taskProcessor.action(TaskAction.DISPATCH); this.processService.updateTaskGroupQueueStatus(taskGroupQueue.getTaskId(), TaskGroupQueueStatus.ACQUIRE_SUCCESS.getCode()); - logger.info("Success force start taskGroupQueue: {}", taskGroupQueue.getId()); + log.info("Success force start taskGroupQueue: {}", taskGroupQueue.getId()); return true; } if (taskGroupQueue.getInQueue() == Flag.YES.getCode()) { - logger.info("Begin to wake up taskGroupQueue: {}", taskGroupQueue.getId()); + log.info("Begin to wake up taskGroupQueue: {}", taskGroupQueue.getId()); boolean acquireTaskGroup = processService.robTaskGroupResource(taskGroupQueue); if (acquireTaskGroup) { TaskInstance taskInstance = this.taskInstanceDao.findTaskInstanceById(stateEvent.getTaskInstanceId()); ITaskProcessor taskProcessor = activeTaskProcessorMaps.get(taskInstance.getTaskCode()); taskProcessor.action(TaskAction.DISPATCH); - logger.info("Success wake up taskGroupQueue: {}", taskGroupQueue.getId()); + log.info("Success wake up taskGroupQueue: {}", taskGroupQueue.getId()); return true; } - logger.warn("Failed to wake up taskGroupQueue, taskGroupQueueId: {}", taskGroupQueue.getId()); + log.warn("Failed to wake up taskGroupQueue, taskGroupQueueId: {}", taskGroupQueue.getId()); return false; } else { - logger.info( + log.info( "Failed to wake up the taskGroupQueue: {}, since the taskGroupQueue is not in queue, will no need to wake up.", taskGroupQueue); return true; @@ -417,7 +415,7 @@ public class WorkflowExecuteRunnable implements Callable { } public void taskFinished(TaskInstance taskInstance) throws StateEventHandleException { - logger.info("TaskInstance finished task code:{} state:{}", taskInstance.getTaskCode(), taskInstance.getState()); + log.info("TaskInstance finished task code:{} state:{}", taskInstance.getTaskCode(), taskInstance.getState()); try { activeTaskProcessorMaps.remove(taskInstance.getTaskCode()); @@ -439,7 +437,7 @@ public class WorkflowExecuteRunnable implements Callable { } } else if (taskInstance.taskCanRetry() && !processInstance.getState().isReadyStop()) { // retry task - logger.info("Retry taskInstance taskInstance state: {}", taskInstance.getState()); + log.info("Retry taskInstance taskInstance state: {}", taskInstance.getState()); retryTaskInstance(taskInstance); } else if (taskInstance.getState().isFailure()) { completeTaskMap.put(taskInstance.getTaskCode(), taskInstance.getId()); @@ -458,12 +456,12 @@ public class WorkflowExecuteRunnable implements Callable { // todo: when the task instance type is pause, then it should not in completeTaskMap completeTaskMap.put(taskInstance.getTaskCode(), taskInstance.getId()); } - logger.info("TaskInstance finished will try to update the workflow instance state, task code:{} state:{}", + log.info("TaskInstance finished will try to update the workflow instance state, task code:{} state:{}", taskInstance.getTaskCode(), taskInstance.getState()); this.updateProcessInstanceState(); } catch (Exception ex) { - logger.error("Task finish failed, get a exception, will remove this taskInstance from completeTaskMap", ex); + log.error("Task finish failed, get a exception, will remove this taskInstance from completeTaskMap", ex); // remove the task from complete map, so that we can finish in the next time. completeTaskMap.remove(taskInstance.getTaskCode()); throw ex; @@ -505,14 +503,14 @@ public class WorkflowExecuteRunnable implements Callable { } TaskInstance newTaskInstance = cloneRetryTaskInstance(taskInstance); if (newTaskInstance == null) { - logger.error("Retry task fail because new taskInstance is null, task code:{}, task id:{}", + log.error("Retry task fail because new taskInstance is null, task code:{}, task id:{}", taskInstance.getTaskCode(), taskInstance.getId()); return; } waitToRetryTaskInstanceMap.put(newTaskInstance.getTaskCode(), newTaskInstance); if (!taskInstance.retryTaskIntervalOverTime()) { - logger.info( + log.info( "Failure task will be submitted, process id: {}, task instance code: {}, state: {}, retry times: {} / {}, interval: {}", processInstance.getId(), newTaskInstance.getTaskCode(), newTaskInstance.getState(), newTaskInstance.getRetryTimes(), newTaskInstance.getMaxRetryTimes(), @@ -530,7 +528,7 @@ public class WorkflowExecuteRunnable implements Callable { * update process instance */ public void refreshProcessInstance(int processInstanceId) { - logger.info("process instance update: {}", processInstanceId); + log.info("process instance update: {}", processInstanceId); ProcessInstance newProcessInstance = processService.findProcessInstanceById(processInstanceId); // just update the processInstance field(this is soft copy) BeanUtils.copyProperties(newProcessInstance, processInstance); @@ -544,10 +542,10 @@ public class WorkflowExecuteRunnable implements Callable { * update task instance */ public void refreshTaskInstance(int taskInstanceId) { - logger.info("task instance update: {} ", taskInstanceId); + log.info("task instance update: {} ", taskInstanceId); TaskInstance taskInstance = taskInstanceDao.findTaskInstanceById(taskInstanceId); if (taskInstance == null) { - logger.error("can not find task instance, id:{}", taskInstanceId); + log.error("can not find task instance, id:{}", taskInstanceId); return; } processService.packageTaskInstance(taskInstance, processInstance); @@ -628,7 +626,7 @@ public class WorkflowExecuteRunnable implements Callable { public void processBlock() { ProjectUser projectUser = processService.queryProjectWithUserByProcessInstanceId(processInstance.getId()); processAlertManager.sendProcessBlockingAlert(processInstance, projectUser); - logger.info("processInstance {} block alert send successful!", processInstance.getId()); + log.info("processInstance {} block alert send successful!", processInstance.getId()); } public boolean processComplementData() { @@ -648,23 +646,23 @@ public class WorkflowExecuteRunnable implements Callable { } else if (processInstance.getState().isFinished()) { endProcess(); if (complementListDate.isEmpty()) { - logger.info("process complement end. process id:{}", processInstance.getId()); + log.info("process complement end. process id:{}", processInstance.getId()); return true; } int index = complementListDate.indexOf(scheduleDate); if (index >= complementListDate.size() - 1 || !processInstance.getState().isSuccess()) { - logger.info("process complement end. process id:{}", processInstance.getId()); + log.info("process complement end. process id:{}", processInstance.getId()); // complement data ends || no success return true; } - logger.info("process complement continue. process id:{}, schedule time:{} complementListDate:{}", + log.info("process complement continue. process id:{}, schedule time:{} complementListDate:{}", processInstance.getId(), processInstance.getScheduleTime(), complementListDate); scheduleDate = complementListDate.get(index + 1); } // the next process complement int create = this.createComplementDataCommand(scheduleDate); if (create > 0) { - logger.info("create complement data command successfully."); + log.info("create complement data command successfully."); } return true; } @@ -719,7 +717,7 @@ public class WorkflowExecuteRunnable implements Callable { public WorkflowSubmitStatue call() { if (isStart()) { // This case should not been happened - logger.warn("[WorkflowInstance-{}] The workflow has already been started", processInstance.getId()); + log.warn("[WorkflowInstance-{}] The workflow has already been started", processInstance.getId()); return WorkflowSubmitStatue.DUPLICATED_SUBMITTED; } @@ -728,21 +726,21 @@ public class WorkflowExecuteRunnable implements Callable { if (workflowRunnableStatus == WorkflowRunnableStatus.CREATED) { buildFlowDag(); workflowRunnableStatus = WorkflowRunnableStatus.INITIALIZE_DAG; - logger.info("workflowStatue changed to :{}", workflowRunnableStatus); + log.info("workflowStatue changed to :{}", workflowRunnableStatus); } if (workflowRunnableStatus == WorkflowRunnableStatus.INITIALIZE_DAG) { initTaskQueue(); workflowRunnableStatus = WorkflowRunnableStatus.INITIALIZE_QUEUE; - logger.info("workflowStatue changed to :{}", workflowRunnableStatus); + log.info("workflowStatue changed to :{}", workflowRunnableStatus); } if (workflowRunnableStatus == WorkflowRunnableStatus.INITIALIZE_QUEUE) { submitPostNode(null); workflowRunnableStatus = WorkflowRunnableStatus.STARTED; - logger.info("workflowStatue changed to :{}", workflowRunnableStatus); + log.info("workflowStatue changed to :{}", workflowRunnableStatus); } return WorkflowSubmitStatue.SUCCESS; } catch (Exception e) { - logger.error("Start workflow error", e); + log.error("Start workflow error", e); return WorkflowSubmitStatue.FAILED; } finally { LoggerUtils.removeWorkflowInstanceIdMDC(); @@ -834,12 +832,12 @@ public class WorkflowExecuteRunnable implements Callable { ProcessDag processDag = generateFlowDag(taskNodeList, startNodeNameList, recoveryNodeCodeList, processInstance.getTaskDependType()); if (processDag == null) { - logger.error("ProcessDag is null"); + log.error("ProcessDag is null"); return; } // generate process dag dag = DagHelper.buildDagGraph(processDag); - logger.info("Build dag success, dag: {}", dag); + log.info("Build dag success, dag: {}", dag); } /** @@ -854,7 +852,7 @@ public class WorkflowExecuteRunnable implements Callable { errorTaskMap.clear(); if (!isNewProcessInstance()) { - logger.info("The workflowInstance is not a newly running instance, runtimes: {}, recover flag: {}", + log.info("The workflowInstance is not a newly running instance, runtimes: {}, recover flag: {}", processInstance.getRunTimes(), processInstance.getRecovery()); List validTaskInstanceList = @@ -863,12 +861,12 @@ public class WorkflowExecuteRunnable implements Callable { for (TaskInstance task : validTaskInstanceList) { try { LoggerUtils.setWorkflowAndTaskInstanceIDMDC(task.getProcessInstanceId(), task.getId()); - logger.info( + log.info( "Check the taskInstance from a exist workflowInstance, existTaskInstanceCode: {}, taskInstanceStatus: {}", task.getTaskCode(), task.getState()); if (validTaskMap.containsKey(task.getTaskCode())) { - logger.warn( + log.warn( "Have same taskCode taskInstance when init task queue, need to check taskExecutionStatus, taskCode:{}", task.getTaskCode()); int oldTaskInstanceId = validTaskMap.get(task.getTaskCode()); @@ -884,7 +882,7 @@ public class WorkflowExecuteRunnable implements Callable { taskInstanceMap.put(task.getId(), task); if (task.isTaskComplete()) { - logger.info("TaskInstance is already complete."); + log.info("TaskInstance is already complete."); completeTaskMap.put(task.getTaskCode(), task.getId()); continue; } @@ -894,7 +892,7 @@ public class WorkflowExecuteRunnable implements Callable { } if (task.taskCanRetry()) { if (task.getState().isNeedFaultTolerance()) { - logger.info("TaskInstance needs fault tolerance, will be added to standby list."); + log.info("TaskInstance needs fault tolerance, will be added to standby list."); task.setFlag(Flag.NO); taskInstanceDao.updateTaskInstance(task); @@ -902,7 +900,7 @@ public class WorkflowExecuteRunnable implements Callable { TaskInstance tolerantTaskInstance = cloneTolerantTaskInstance(task); addTaskToStandByList(tolerantTaskInstance); } else { - logger.info("Retry taskInstance, taskState: {}", task.getState()); + log.info("Retry taskInstance, taskState: {}", task.getState()); retryTaskInstance(task); } continue; @@ -916,7 +914,7 @@ public class WorkflowExecuteRunnable implements Callable { } clearDataIfExecuteTask(); } else { - logger.info("The current workflowInstance is a newly running workflowInstance"); + log.info("The current workflowInstance is a newly running workflowInstance"); } if (processInstance.isComplementData() && complementListDate.isEmpty()) { @@ -941,7 +939,7 @@ public class WorkflowExecuteRunnable implements Callable { if (cmdParam.containsKey(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST)) { complementListDate = CronUtils.getSelfScheduleDateList(cmdParam); } - logger.info(" process definition code:{} complement data: {}", + log.info(" process definition code:{} complement data: {}", processInstance.getProcessDefinitionCode(), complementListDate); if (!complementListDate.isEmpty() && Flag.NO == processInstance.getIsSubProcess()) { @@ -958,7 +956,7 @@ public class WorkflowExecuteRunnable implements Callable { } } } - logger.info("Initialize task queue, dependFailedTaskSet: {}, completeTaskMap: {}, errorTaskMap: {}", + log.info("Initialize task queue, dependFailedTaskSet: {}, completeTaskMap: {}, errorTaskMap: {}", dependFailedTaskSet, completeTaskMap, errorTaskMap); @@ -985,7 +983,7 @@ public class WorkflowExecuteRunnable implements Callable { boolean submit = taskProcessor.action(TaskAction.SUBMIT); if (!submit) { - logger.error("Submit standby task failed!, taskCode: {}, taskName: {}", + log.error("Submit standby task failed!, taskCode: {}, taskName: {}", taskInstance.getTaskCode(), taskInstance.getName()); return Optional.empty(); @@ -1019,7 +1017,7 @@ public class WorkflowExecuteRunnable implements Callable { taskInstance.getProcessInstanceId(), taskInstance.getTaskGroupPriority()); if (!acquireTaskGroup) { - logger.info( + log.info( "Submitted task will not be dispatch right now because the first time to try to acquire" + " task group failed, taskInstanceName: {}, taskGroupId: {}", taskInstance.getName(), taskGroupId); @@ -1029,7 +1027,7 @@ public class WorkflowExecuteRunnable implements Callable { boolean dispatchSuccess = taskProcessor.action(TaskAction.DISPATCH); if (!dispatchSuccess) { - logger.error("Dispatch standby process {} task {} failed", processInstance.getName(), + log.error("Dispatch standby process {} task {} failed", processInstance.getName(), taskInstance.getName()); return Optional.empty(); } @@ -1058,7 +1056,7 @@ public class WorkflowExecuteRunnable implements Callable { } return Optional.of(taskInstance); } catch (Exception e) { - logger.error("Submit standby task {} error, taskCode: {}", taskInstance.getName(), + log.error("Submit standby task {} error, taskCode: {}", taskInstance.getName(), taskInstance.getTaskCode(), e); return Optional.empty(); } finally { @@ -1079,7 +1077,7 @@ public class WorkflowExecuteRunnable implements Callable { nettyExecutorManager.doExecute(host, hostUpdateCommand.convert2Command()); } catch (Exception e) { // Do we need to catch this exception? - logger.error("notify process host update", e); + log.error("notify process host update", e); } } @@ -1125,7 +1123,7 @@ public class WorkflowExecuteRunnable implements Callable { public TaskInstance cloneRetryTaskInstance(TaskInstance taskInstance) { TaskNode taskNode = dag.getNode(Long.toString(taskInstance.getTaskCode())); if (taskNode == null) { - logger.error("Clone retry taskInstance error because taskNode is null, taskCode:{}", + log.error("Clone retry taskInstance error because taskNode is null, taskCode:{}", taskInstance.getTaskCode()); return null; } @@ -1153,7 +1151,7 @@ public class WorkflowExecuteRunnable implements Callable { public TaskInstance cloneTolerantTaskInstance(TaskInstance taskInstance) { TaskNode taskNode = dag.getNode(Long.toString(taskInstance.getTaskCode())); if (taskNode == null) { - logger.error("Clone tolerant taskInstance error because taskNode is null, taskCode:{}", + log.error("Clone tolerant taskInstance error because taskNode is null, taskCode:{}", taskInstance.getTaskCode()); return null; } @@ -1339,7 +1337,7 @@ public class WorkflowExecuteRunnable implements Callable { Integer taskInstanceId = entry.getValue(); TaskInstance taskInstance = taskInstanceMap.get(taskInstanceId); if (taskInstance == null) { - logger.warn("Cannot find the taskInstance from taskInstanceMap, taskInstanceId: {}, taskConde: {}", + log.warn("Cannot find the taskInstance from taskInstanceMap, taskInstanceId: {}, taskConde: {}", taskInstanceId, taskConde); // This case will happen when we submit to db failed, then the taskInstanceId is 0 @@ -1397,16 +1395,16 @@ public class WorkflowExecuteRunnable implements Callable { for (TaskInstance task : taskInstances) { if (readyToSubmitTaskQueue.contains(task)) { - logger.warn("Task is already at submit queue, taskInstanceId: {}", task.getId()); + log.warn("Task is already at submit queue, taskInstanceId: {}", task.getId()); continue; } if (task.getId() != null && completeTaskMap.containsKey(task.getTaskCode())) { - logger.info("Task has already run success, taskName: {}", task.getName()); + log.info("Task has already run success, taskName: {}", task.getName()); continue; } if (task.getState().isKill()) { - logger.info("Task is be stopped, the state is {}, taskInstanceId: {}", task.getState(), task.getId()); + log.info("Task is be stopped, the state is {}, taskInstanceId: {}", task.getState(), task.getId()); continue; } @@ -1458,7 +1456,7 @@ public class WorkflowExecuteRunnable implements Callable { } } } - logger.info("The dependTasks of task all success, currentTaskCode: {}, dependTaskCodes: {}", + log.info("The dependTasks of task all success, currentTaskCode: {}, dependTaskCodes: {}", taskCode, Arrays.toString(completeTaskMap.keySet().toArray())); return DependResult.SUCCESS; } @@ -1491,7 +1489,7 @@ public class WorkflowExecuteRunnable implements Callable { List nextTaskList = DagHelper.parseConditionTask(dependNodeName, skipTaskNodeMap, dag, getCompleteTaskInstanceMap()); if (!nextTaskList.contains(nextNodeName)) { - logger.info( + log.info( "DependTask is a condition task, and its next condition branch does not hava current task, " + "dependTaskCode: {}, currentTaskCode: {}", dependNodeName, nextNodeName); @@ -1563,7 +1561,7 @@ public class WorkflowExecuteRunnable implements Callable { */ private boolean processFailed() { if (hasFailedTask()) { - logger.info("The current process has failed task, the current process failed"); + log.info("The current process has failed task, the current process failed"); if (processInstance.getFailureStrategy() == FailureStrategy.END) { return true; } @@ -1632,21 +1630,21 @@ public class WorkflowExecuteRunnable implements Callable { if (activeTaskProcessorMaps.size() > 0 || hasRetryTaskInStandBy()) { // active task and retry task exists WorkflowExecutionStatus executionStatus = runningState(state); - logger.info("The workflowInstance has task running, the workflowInstance status is {}", executionStatus); + log.info("The workflowInstance has task running, the workflowInstance status is {}", executionStatus); return executionStatus; } // block if (state == WorkflowExecutionStatus.READY_BLOCK) { WorkflowExecutionStatus executionStatus = processReadyBlock(); - logger.info("The workflowInstance is ready to block, the workflowInstance status is {}", executionStatus); + log.info("The workflowInstance is ready to block, the workflowInstance status is {}", executionStatus); return executionStatus; } // pause if (state == WorkflowExecutionStatus.READY_PAUSE) { WorkflowExecutionStatus executionStatus = processReadyPause(); - logger.info("The workflowInstance is ready to pause, the workflow status is {}", executionStatus); + log.info("The workflowInstance is ready to pause, the workflow status is {}", executionStatus); return executionStatus; } @@ -1660,13 +1658,13 @@ public class WorkflowExecuteRunnable implements Callable { } else { executionStatus = WorkflowExecutionStatus.SUCCESS; } - logger.info("The workflowInstance is ready to stop, the workflow status is {}", executionStatus); + log.info("The workflowInstance is ready to stop, the workflow status is {}", executionStatus); return executionStatus; } // process failure if (processFailed()) { - logger.info("The workflowInstance is failed, the workflow status is {}", WorkflowExecutionStatus.FAILURE); + log.info("The workflowInstance is failed, the workflow status is {}", WorkflowExecutionStatus.FAILURE); return WorkflowExecutionStatus.FAILURE; } @@ -1710,7 +1708,7 @@ public class WorkflowExecuteRunnable implements Callable { private void updateProcessInstanceState() throws StateEventHandleException { WorkflowExecutionStatus state = getProcessInstanceState(processInstance); if (processInstance.getState() != state) { - logger.info("Update workflowInstance states, origin state: {}, target state: {}", + log.info("Update workflowInstance states, origin state: {}, target state: {}", processInstance.getState(), state); updateWorkflowInstanceStatesToDB(state); @@ -1723,7 +1721,7 @@ public class WorkflowExecuteRunnable implements Callable { // replace with `stateEvents`, make sure `WorkflowExecuteThread` can be deleted to avoid memory leaks this.stateEvents.add(stateEvent); } else { - logger.info("There is no need to update the workflow instance state, origin state: {}, target state: {}", + log.info("There is no need to update the workflow instance state, origin state: {}, target state: {}", processInstance.getState(), state); } @@ -1740,7 +1738,7 @@ public class WorkflowExecuteRunnable implements Callable { private void updateWorkflowInstanceStatesToDB(WorkflowExecutionStatus newStates) throws StateEventHandleException { WorkflowExecutionStatus originStates = processInstance.getState(); if (originStates != newStates) { - logger.info("Begin to update workflow instance state , state will change from {} to {}", + log.info("Begin to update workflow instance state , state will change from {} to {}", originStates, newStates); @@ -1776,11 +1774,11 @@ public class WorkflowExecuteRunnable implements Callable { */ public void addTaskToStandByList(TaskInstance taskInstance) { if (readyToSubmitTaskQueue.contains(taskInstance)) { - logger.warn("Task already exists in ready submit queue, no need to add again, task code:{}", + log.warn("Task already exists in ready submit queue, no need to add again, task code:{}", taskInstance.getTaskCode()); return; } - logger.info("Add task to stand by list, task name:{}, task id:{}, task code:{}", + log.info("Add task to stand by list, task name:{}, task id:{}, task code:{}", taskInstance.getName(), taskInstance.getId(), taskInstance.getTaskCode()); @@ -1815,7 +1813,7 @@ public class WorkflowExecuteRunnable implements Callable { * close the on going tasks */ public void killAllTasks() { - logger.info("kill called on process instance id: {}, num: {}", + log.info("kill called on process instance id: {}, num: {}", processInstance.getId(), activeTaskProcessorMaps.size()); @@ -1870,7 +1868,7 @@ public class WorkflowExecuteRunnable implements Callable { TaskInstance retryTask = taskInstanceDao.findTaskInstanceById(task.getId()); if (retryTask != null && retryTask.getState().isForceSuccess()) { task.setState(retryTask.getState()); - logger.info( + log.info( "Task {} has been forced success, put it into complete task list and stop retrying, taskInstanceId: {}", task.getName(), task.getId()); removeTaskFromStandbyList(task); @@ -1890,13 +1888,13 @@ public class WorkflowExecuteRunnable implements Callable { } DependResult dependResult = getDependResultForTask(task); if (DependResult.SUCCESS == dependResult) { - logger.info("The dependResult of task {} is success, so ready to submit to execute", task.getName()); + log.info("The dependResult of task {} is success, so ready to submit to execute", task.getName()); Optional taskInstanceOptional = submitTaskExec(task); if (!taskInstanceOptional.isPresent()) { this.taskFailedSubmit = true; // Remove and add to complete map and error map if (!removeTaskFromStandbyList(task)) { - logger.error( + log.error( "Task submit failed, remove from standby list failed, workflowInstanceId: {}, taskCode: {}", processInstance.getId(), task.getTaskCode()); @@ -1905,7 +1903,7 @@ public class WorkflowExecuteRunnable implements Callable { taskInstanceMap.put(task.getId(), task); errorTaskMap.put(task.getTaskCode(), task.getId()); activeTaskProcessorMaps.remove(task.getTaskCode()); - logger.error("Task submitted failed, workflowInstanceId: {}, taskInstanceId: {}, taskCode: {}", + log.error("Task submitted failed, workflowInstanceId: {}, taskInstanceId: {}, taskCode: {}", task.getProcessInstanceId(), task.getId(), task.getTaskCode()); @@ -1916,12 +1914,12 @@ public class WorkflowExecuteRunnable implements Callable { // if the dependency fails, the current node is not submitted and the state changes to failure. dependFailedTaskSet.add(task.getTaskCode()); removeTaskFromStandbyList(task); - logger.info("Task dependent result is failed, taskInstanceId:{} depend result : {}", task.getId(), + log.info("Task dependent result is failed, taskInstanceId:{} depend result : {}", task.getId(), dependResult); } else if (DependResult.NON_EXEC == dependResult) { // for some reasons(depend task pause/stop) this task would not be submit removeTaskFromStandbyList(task); - logger.info("Remove task due to depend result not executed, taskInstanceId:{} depend result : {}", + log.info("Remove task due to depend result not executed, taskInstanceId:{} depend result : {}", task.getId(), dependResult); } } @@ -2019,7 +2017,7 @@ public class WorkflowExecuteRunnable implements Callable { */ private boolean isNewProcessInstance() { if (Flag.YES.equals(processInstance.getRecovery())) { - logger.info("This workInstance will be recover by this execution"); + log.info("This workInstance will be recover by this execution"); return false; } @@ -2027,7 +2025,7 @@ public class WorkflowExecuteRunnable implements Callable { && processInstance.getRunTimes() == 1) { return true; } - logger.info( + log.info( "The workflowInstance has been executed before, this execution is to reRun, processInstance status: {}, runTimes: {}", processInstance.getState(), processInstance.getRunTimes()); @@ -2038,7 +2036,7 @@ public class WorkflowExecuteRunnable implements Callable { ITaskProcessor taskProcessor = activeTaskProcessorMaps.get(taskCode); if (taskProcessor != null) { taskProcessor.action(TaskAction.RESUBMIT); - logger.debug("RESUBMIT: task code:{}", taskCode); + log.debug("RESUBMIT: task code:{}", taskCode); } else { throw new Exception("resubmit error, taskProcessor is null, task code: " + taskCode); } @@ -2169,7 +2167,7 @@ public class WorkflowExecuteRunnable implements Callable { try { taskInstanceDao.updateTaskInstance(taskInstance); } catch (Exception e) { - logger.error("update task instance cache key failed", e); + log.error("update task instance cache key failed", e); } } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThreadPool.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThreadPool.java index be4d55eeed..e2194d6e50 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThreadPool.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/WorkflowExecuteThreadPool.java @@ -39,9 +39,8 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.PostConstruct; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.stereotype.Component; @@ -54,10 +53,9 @@ import com.google.common.base.Strings; * Used to execute {@link WorkflowExecuteRunnable}. */ @Component +@Slf4j public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { - private static final Logger logger = LoggerFactory.getLogger(WorkflowExecuteThreadPool.class); - @Autowired private MasterConfig masterConfig; @@ -93,12 +91,12 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { WorkflowExecuteRunnable workflowExecuteThread = processInstanceExecCacheManager.getByProcessInstanceId(stateEvent.getProcessInstanceId()); if (workflowExecuteThread == null) { - logger.warn("Submit state event error, cannot from workflowExecuteThread from cache manager, stateEvent:{}", + log.warn("Submit state event error, cannot from workflowExecuteThread from cache manager, stateEvent:{}", stateEvent); return; } workflowExecuteThread.addStateEvent(stateEvent); - logger.info("Submit state event success, stateEvent: {}", stateEvent); + log.info("Submit state event success, stateEvent: {}", stateEvent); } /** @@ -109,7 +107,7 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { return; } if (multiThreadFilterMap.containsKey(workflowExecuteThread.getKey())) { - logger.debug("The workflow has been executed by another thread"); + log.debug("The workflow has been executed by another thread"); return; } multiThreadFilterMap.put(workflowExecuteThread.getKey(), workflowExecuteThread); @@ -121,7 +119,7 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { public void onFailure(Throwable ex) { LoggerUtils.setWorkflowInstanceIdMDC(processInstanceId); try { - logger.error("Workflow instance events handle failed", ex); + log.error("Workflow instance events handle failed", ex); notifyProcessChanged(workflowExecuteThread.getProcessInstance()); multiThreadFilterMap.remove(workflowExecuteThread.getKey()); } finally { @@ -138,10 +136,10 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { .removeProcess4TimeoutCheck(workflowExecuteThread.getProcessInstance().getId()); processInstanceExecCacheManager.removeByProcessInstanceId(processInstanceId); notifyProcessChanged(workflowExecuteThread.getProcessInstance()); - logger.info("Workflow instance is finished."); + log.info("Workflow instance is finished."); } } catch (Exception e) { - logger.error("Workflow instance is finished, but notify changed error", e); + log.error("Workflow instance is finished, but notify changed error", e); } finally { // make sure the process has been removed from multiThreadFilterMap multiThreadFilterMap.remove(workflowExecuteThread.getKey()); @@ -166,10 +164,10 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { try { LoggerUtils.setWorkflowAndTaskInstanceIDMDC(processInstance.getId(), taskInstance.getId()); if (processInstance.getHost().equalsIgnoreCase(address)) { - logger.info("Process host is local master, will notify it"); + log.info("Process host is local master, will notify it"); this.notifyMyself(processInstance, taskInstance); } else { - logger.info("Process host is remote master, will notify it"); + log.info("Process host is remote master, will notify it"); this.notifyProcess(finishProcessInstance, processInstance, taskInstance); } } finally { @@ -183,7 +181,7 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { */ private void notifyMyself(@NonNull ProcessInstance processInstance, @NonNull TaskInstance taskInstance) { if (!processInstanceExecCacheManager.contains(processInstance.getId())) { - logger.warn("The execute cache manager doesn't contains this workflow instance"); + log.warn("The execute cache manager doesn't contains this workflow instance"); return; } TaskStateEvent stateEvent = TaskStateEvent.builder() @@ -202,7 +200,7 @@ public class WorkflowExecuteThreadPool extends ThreadPoolTaskExecutor { TaskInstance taskInstance) { String processInstanceHost = processInstance.getHost(); if (Strings.isNullOrEmpty(processInstanceHost)) { - logger.error("Process {} host is empty, cannot notify task {} now, taskId: {}", processInstance.getName(), + log.error("Process {} host is empty, cannot notify task {} now, taskId: {}", processInstance.getName(), taskInstance.getName(), taskInstance.getId()); return; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java index 524585794c..757d479b9e 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java @@ -101,7 +101,7 @@ import com.zaxxer.hikari.HikariDataSource; public abstract class BaseTaskProcessor implements ITaskProcessor { - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); protected boolean killed = false; @@ -218,7 +218,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { result = resubmit(); break; default: - logger.error("unknown task action: {}", taskAction); + log.error("unknown task action: {}", taskAction); } return result; } finally { @@ -284,7 +284,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { } /** - * set master task running logger. + * set master task running log. */ public void setTaskExecutionLogger() { threadLoggerInfoName = LoggerUtils.buildTaskId(taskInstance.getFirstSubmitTime(), @@ -307,7 +307,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { // verify tenant is null if (verifyTenantIsNull(tenant, taskInstance)) { - logger.info("Task state changes to {}", TaskExecutionStatus.FAILURE); + log.info("Task state changes to {}", TaskExecutionStatus.FAILURE); taskInstance.setState(TaskExecutionStatus.FAILURE); taskInstanceDao.upsertTaskInstance(taskInstance); return null; @@ -424,7 +424,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { int ruleId = dataQualityParameters.getRuleId(); DqRule dqRule = processService.getDqRule(ruleId); if (dqRule == null) { - logger.error("Can not get dataQuality rule by id {}", ruleId); + log.error("Can not get dataQuality rule by id {}", ruleId); return; } @@ -434,7 +434,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { List ruleInputEntryList = processService.getRuleInputEntry(ruleId); if (CollectionUtils.isEmpty(ruleInputEntryList)) { - logger.error("Rule input entry list is empty, ruleId: {}", ruleId); + log.error("Rule input entry list is empty, ruleId: {}", ruleId); return; } List executeSqlList = processService.getDqExecuteSql(ruleId); @@ -609,7 +609,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { */ protected boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) { if (tenant == null) { - logger.error("Tenant does not exists"); + log.error("Tenant does not exists"); return true; } return false; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BlockingTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BlockingTaskProcessor.java index 0b4848b1df..9f11f4b26d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BlockingTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BlockingTaskProcessor.java @@ -88,7 +88,7 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { taskInstance.setState(TaskExecutionStatus.PAUSE); taskInstance.setEndTime(new Date()); taskInstanceDao.upsertTaskInstance(taskInstance); - logger.info("blocking task has been paused"); + log.info("blocking task has been paused"); return true; } @@ -97,7 +97,7 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { taskInstance.setState(TaskExecutionStatus.KILL); taskInstance.setEndTime(new Date()); taskInstanceDao.upsertTaskInstance(taskInstance); - logger.info("blocking task has been killed"); + log.info("blocking task has been killed"); return true; } @@ -114,13 +114,13 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { return false; } this.setTaskExecutionLogger(); - logger.info("blocking task submit success"); + log.info("blocking task submit success"); return true; } @Override protected boolean runTask() { - logger.info("blocking task starting"); + log.info("blocking task starting"); initTaskParameters(); if (conditionResult.equals(DependResult.WAITING)) { setConditionResult(); @@ -128,7 +128,7 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { } else { endTask(); } - logger.info("blocking task finished"); + log.info("blocking task finished"); return true; } @@ -154,17 +154,17 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { DependResult dependResult = DependResult.SUCCESS; if (!completeTaskList.containsKey(item.getDepTaskCode())) { - logger.info("depend item: {} have not completed yet.", item.getDepTaskCode()); + log.info("depend item: {} have not completed yet.", item.getDepTaskCode()); dependResult = DependResult.FAILED; return dependResult; } TaskExecutionStatus executionStatus = completeTaskList.get(item.getDepTaskCode()); if (executionStatus != item.getStatus()) { - logger.info("depend item : {} expect status: {}, actual status: {}", item.getDepTaskCode(), + log.info("depend item : {} expect status: {}, actual status: {}", item.getDepTaskCode(), item.getStatus(), executionStatus); dependResult = DependResult.FAILED; } - logger.info("dependent item complete {} {},{}", + log.info("dependent item complete {} {},{}", Constants.DEPENDENT_SPLIT, item.getDepTaskCode(), dependResult); return dependResult; } @@ -188,7 +188,7 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { tempResultList.add(tempResult); } conditionResult = DependentUtils.getDependResultForRelation(dependentParameters.getRelation(), tempResultList); - logger.info("the blocking task depend result : {}", conditionResult); + log.info("the blocking task depend result : {}", conditionResult); } private void endTask() { @@ -197,7 +197,7 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { ? DependResult.SUCCESS : DependResult.FAILED; boolean isBlocked = (expected == this.conditionResult); - logger.info("blocking opportunity: expected-->{}, actual-->{}", expected, this.conditionResult); + log.info("blocking opportunity: expected-->{}, actual-->{}", expected, this.conditionResult); processInstance.setBlocked(isBlocked); if (isBlocked) { processInstance.setStateWithDesc(WorkflowExecutionStatus.READY_BLOCK, "ready block"); @@ -205,6 +205,6 @@ public class BlockingTaskProcessor extends BaseTaskProcessor { taskInstance.setState(TaskExecutionStatus.SUCCESS); taskInstance.setEndTime(new Date()); taskInstanceDao.updateTaskInstance(taskInstance); - logger.info("blocking task execute complete, blocking:{}", isBlocked); + log.info("blocking task execute complete, blocking:{}", isBlocked); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java index 09eaa2ed17..4b63fb3ea7 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/CommonTaskProcessor.java @@ -103,18 +103,18 @@ public class CommonTaskProcessor extends BaseTaskProcessor { this.initQueue(); } if (taskInstance.getState().isFinished()) { - logger.info("Task {} has already finished, no need to submit to task queue, taskState: {}", + log.info("Task {} has already finished, no need to submit to task queue, taskState: {}", taskInstance.getName(), taskInstance.getState()); return true; } // task cannot be submitted because its execution state is RUNNING or DELAY. if (taskInstance.getState() == TaskExecutionStatus.RUNNING_EXECUTION || taskInstance.getState() == TaskExecutionStatus.DELAY_EXECUTION) { - logger.info("Task {} is already running or delayed, no need to submit to task queue, taskState: {}", + log.info("Task {} is already running or delayed, no need to submit to task queue, taskState: {}", taskInstance.getName(), taskInstance.getState()); return true; } - logger.info("Task {} is ready to dispatch to worker", taskInstance.getName()); + log.info("Task {} is ready to dispatch to worker", taskInstance.getName()); TaskPriority taskPriority = new TaskPriority(processInstance.getProcessInstancePriority().getCode(), processInstance.getId(), taskInstance.getProcessInstancePriority().getCode(), @@ -123,17 +123,17 @@ public class CommonTaskProcessor extends BaseTaskProcessor { TaskExecutionContext taskExecutionContext = getTaskExecutionContext(taskInstance); if (taskExecutionContext == null) { - logger.error("Get taskExecutionContext fail, task: {}", taskInstance); + log.error("Get taskExecutionContext fail, task: {}", taskInstance); return false; } taskPriority.setTaskExecutionContext(taskExecutionContext); taskUpdateQueue.put(taskPriority); - logger.info("Task {} is submitted to priority queue success by master", taskInstance.getName()); + log.info("Task {} is submitted to priority queue success by master", taskInstance.getName()); return true; } catch (Exception e) { - logger.error("Task {} is submitted to priority queue error", taskInstance.getName(), e); + log.error("Task {} is submitted to priority queue error", taskInstance.getName(), e); return false; } } @@ -146,13 +146,13 @@ public class CommonTaskProcessor extends BaseTaskProcessor { public boolean killTask() { try { - logger.info("Begin to kill task: {}", taskInstance.getName()); + log.info("Begin to kill task: {}", taskInstance.getName()); if (taskInstance == null) { - logger.warn("Kill task failed, the task instance is not exist"); + log.warn("Kill task failed, the task instance is not exist"); return true; } if (taskInstance.getState().isFinished()) { - logger.warn("Kill task failed, the task instance is already finished"); + log.warn("Kill task failed, the task instance is already finished"); return true; } // we don't wait the kill response @@ -163,12 +163,12 @@ public class CommonTaskProcessor extends BaseTaskProcessor { killRemoteTask(); } } catch (Exception e) { - logger.error("Master kill task: {} error, taskInstance id: {}", taskInstance.getName(), + log.error("Master kill task: {} error, taskInstance id: {}", taskInstance.getName(), taskInstance.getId(), e); return false; } - logger.info("Master success kill task: {}, taskInstanceId: {}", taskInstance.getName(), taskInstance.getId()); + log.info("Master success kill task: {}, taskInstanceId: {}", taskInstance.getName(), taskInstance.getId()); return true; } @@ -202,9 +202,9 @@ public class CommonTaskProcessor extends BaseTaskProcessor { taskInstance.getTaskDefine().setTaskParams(JSONUtils.toJsonString(taskDefinitionParams)); taskInstance.setTaskParams(JSONUtils.toJsonString(taskInstanceParams)); if (null == testDataSourceId) { - logger.warn("task name :{}, test data source replacement failed", taskInstance.getName()); + log.warn("task name :{}, test data source replacement failed", taskInstance.getName()); } else { - logger.info("task name :{}, test data source replacement succeeded", taskInstance.getName()); + log.info("task name :{}, test data source replacement succeeded", taskInstance.getName()); } } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java index 23e0e65071..f3435a21a6 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/ConditionTaskProcessor.java @@ -67,21 +67,21 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { return false; } this.setTaskExecutionLogger(); - logger.info("condition task submit success"); + log.info("condition task submit success"); return true; } @Override public boolean runTask() { initTaskParameters(); - logger.info("condition task start"); + log.info("condition task start"); if (conditionResult.equals(DependResult.WAITING)) { setConditionResult(); endTask(); } else { endTask(); } - logger.info("condition task finished"); + log.info("condition task finished"); return true; } @@ -109,7 +109,7 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { if (taskTimeoutStrategy == TaskTimeoutStrategy.WARN) { return true; } - logger.info("condition task {} timeout, strategy {} ", + log.info("condition task {} timeout, strategy {} ", taskInstance.getId(), taskTimeoutStrategy.getDescp()); conditionResult = DependResult.FAILED; endTask(); @@ -161,7 +161,7 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { modelResultList.add(modelResult); } conditionResult = DependentUtils.getDependResultForRelation(dependentParameters.getRelation(), modelResultList); - logger.info("the conditions task depend result : {}", conditionResult); + log.info("the conditions task depend result : {}", conditionResult); } /** @@ -171,17 +171,17 @@ public class ConditionTaskProcessor extends BaseTaskProcessor { DependResult dependResult = DependResult.SUCCESS; if (!completeTaskList.containsKey(item.getDepTaskCode())) { - logger.info("depend item: {} have not completed yet.", item.getDepTaskCode()); + log.info("depend item: {} have not completed yet.", item.getDepTaskCode()); dependResult = DependResult.FAILED; return dependResult; } TaskExecutionStatus executionStatus = completeTaskList.get(item.getDepTaskCode()); if (executionStatus != item.getStatus()) { - logger.info("depend item : {} expect status: {}, actual status: {}", item.getDepTaskCode(), + log.info("depend item : {} expect status: {}, actual status: {}", item.getDepTaskCode(), item.getStatus(), executionStatus); dependResult = DependResult.FAILED; } - logger.info("dependent item complete, dependentTaskCode: {}, dependResult: {}", item.getDepTaskCode(), + log.info("dependent item complete, dependentTaskCode: {}, dependResult: {}", item.getDepTaskCode(), dependResult); return dependResult; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java index 2edd0e97ea..84fb55a14d 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/DependentTaskProcessor.java @@ -105,7 +105,7 @@ public class DependentTaskProcessor extends BaseTaskProcessor { return false; } this.setTaskExecutionLogger(); - logger.info("Dependent task submit success"); + log.info("Dependent task submit success"); taskInstance.setLogPath(LogUtils.getTaskLogPath(taskInstance.getFirstSubmitTime(), processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion(), @@ -116,10 +116,10 @@ public class DependentTaskProcessor extends BaseTaskProcessor { taskInstance.setStartTime(new Date()); taskInstanceDao.updateTaskInstance(taskInstance); initDependParameters(); - logger.info("Success initialize dependent task parameters, the dependent data is: {}", dependentDate); + log.info("Success initialize dependent task parameters, the dependent data is: {}", dependentDate); return true; } catch (Exception ex) { - logger.error("Submit/Initialize dependent task error", ex); + log.error("Submit/Initialize dependent task error", ex); return false; } } @@ -153,7 +153,7 @@ public class DependentTaskProcessor extends BaseTaskProcessor { && TaskTimeoutStrategy.WARNFAILED != taskTimeoutStrategy) { return true; } - logger.info("dependent taskInstanceId: {} timeout, taskName: {}, strategy: {} ", + log.info("dependent taskInstanceId: {} timeout, taskName: {}, strategy: {} ", taskInstance.getId(), taskInstance.getName(), taskTimeoutStrategy.getDescp()); result = DependResult.FAILED; endTask(); @@ -191,34 +191,34 @@ public class DependentTaskProcessor extends BaseTaskProcessor { .collect(Collectors.toMap(TaskDefinition::getCode, Function.identity())); for (DependentTaskModel taskModel : dependentParameters.getDependTaskList()) { - logger.info("Add sub dependent check tasks, dependent relation: {}", taskModel.getRelation()); + log.info("Add sub dependent check tasks, dependent relation: {}", taskModel.getRelation()); for (DependentItem dependentItem : taskModel.getDependItemList()) { Project project = projectCodeMap.get(dependentItem.getProjectCode()); if (project == null) { - logger.error("The dependent task's project is not exist, dependentItem: {}", dependentItem); + log.error("The dependent task's project is not exist, dependentItem: {}", dependentItem); throw new RuntimeException( "The dependent task's project is not exist, dependentItem: " + dependentItem); } ProcessDefinition processDefinition = processDefinitionMap.get(dependentItem.getDefinitionCode()); if (processDefinition == null) { - logger.error("The dependent task's workflow is not exist, dependentItem: {}", dependentItem); + log.error("The dependent task's workflow is not exist, dependentItem: {}", dependentItem); throw new RuntimeException( "The dependent task's workflow is not exist, dependentItem: " + dependentItem); } if (dependentItem.getDepTaskCode() == Constants.DEPENDENT_ALL_TASK_CODE) { - logger.info( + log.info( "Add dependent task: projectName: {}, workflowName: {}, taskName: ALL, dependentKey: {}", project.getName(), processDefinition.getName(), dependentItem.getKey()); } else { TaskDefinition taskDefinition = taskDefinitionMap.get(dependentItem.getDepTaskCode()); if (taskDefinition == null) { - logger.error("The dependent task's taskDefinition is not exist, dependentItem: {}", + log.error("The dependent task's taskDefinition is not exist, dependentItem: {}", dependentItem); throw new RuntimeException( "The dependent task's taskDefinition is not exist, dependentItem: " + dependentItem); } - logger.info("Add dependent task: projectName: {}, workflowName: {}, taskName: {}, dependentKey: {}", + log.info("Add dependent task: projectName: {}, workflowName: {}, taskName: {}, dependentKey: {}", project.getName(), processDefinition.getName(), taskDefinition.getName(), dependentItem.getKey()); } @@ -255,7 +255,7 @@ public class DependentTaskProcessor extends BaseTaskProcessor { if (!dependResultMap.containsKey(entry.getKey())) { dependResultMap.put(entry.getKey(), entry.getValue()); // save depend result to log - logger.info("dependent item complete, dependentKey: {}, result: {}, dependentDate: {}", + log.info("dependent item complete, dependentKey: {}, result: {}, dependentDate: {}", entry.getKey(), entry.getValue(), dependentDate); } } @@ -278,7 +278,7 @@ public class DependentTaskProcessor extends BaseTaskProcessor { dependResultList.add(dependResult); } result = DependentUtils.getDependResultForRelation(this.dependentParameters.getRelation(), dependResultList); - logger.info("Dependent task completed, dependent result: {}", result); + log.info("Dependent task completed, dependent result: {}", result); return result; } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java index 1828e269ea..7ef817f5b0 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SubTaskProcessor.java @@ -88,7 +88,7 @@ public class SubTaskProcessor extends BaseTaskProcessor { updateTaskState(); } } catch (Exception e) { - logger.error("work flow {} sub task {} exceptions", + log.error("work flow {} sub task {} exceptions", this.processInstance.getId(), this.taskInstance.getId(), e); @@ -115,7 +115,7 @@ public class SubTaskProcessor extends BaseTaskProcessor { && TaskTimeoutStrategy.WARNFAILED != taskTimeoutStrategy) { return true; } - logger.info("sub process task {} timeout, strategy {} ", + log.info("sub process task {} timeout, strategy {} ", taskInstance.getId(), taskTimeoutStrategy.getDescp()); killTask(); return true; @@ -123,7 +123,7 @@ public class SubTaskProcessor extends BaseTaskProcessor { private void updateTaskState() { subProcessInstance = processService.findSubProcessInstance(processInstance.getId(), taskInstance.getId()); - logger.info("work flow {} task {}, sub work flow: {} state: {}", + log.info("work flow {} task {}, sub work flow: {} state: {}", this.processInstance.getId(), this.taskInstance.getId(), subProcessInstance.getId(), @@ -188,7 +188,7 @@ public class SubTaskProcessor extends BaseTaskProcessor { } private boolean setSubWorkFlow() { - logger.info("set work flow {} task {} running", + log.info("set work flow {} task {} running", this.processInstance.getId(), this.taskInstance.getId()); if (this.subProcessInstance != null) { @@ -202,7 +202,7 @@ public class SubTaskProcessor extends BaseTaskProcessor { taskInstance.setState(TaskExecutionStatus.RUNNING_EXECUTION); taskInstance.setStartTime(new Date()); taskInstanceDao.updateTaskInstance(taskInstance); - logger.info("set sub work flow {} task {} state: {}", + log.info("set sub work flow {} task {} state: {}", processInstance.getId(), taskInstance.getId(), taskInstance.getState()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java index dfd5a02fb7..4893f44731 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/SwitchTaskProcessor.java @@ -65,13 +65,13 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { return false; } this.setTaskExecutionLogger(); - logger.info("switch task submit success"); + log.info("switch task submit success"); return true; } @Override public boolean runTask() { - logger.info("switch task starting"); + log.info("switch task starting"); taskInstance.setLogPath( LogUtils.getTaskLogPath(taskInstance.getFirstSubmitTime(), processInstance.getProcessDefinitionCode(), processInstance.getProcessDefinitionVersion(), @@ -86,7 +86,7 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { setSwitchResult(); } endTaskState(); - logger.info("switch task finished"); + log.info("switch task finished"); return true; } @@ -142,23 +142,23 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { int i = 0; conditionResult = DependResult.SUCCESS; for (SwitchResultVo info : switchResultVos) { - logger.info("the {} execution ", (i + 1)); - logger.info("original condition sentence:{}", info.getCondition()); + log.info("the {} execution ", (i + 1)); + log.info("original condition sentence:{}", info.getCondition()); if (StringUtils.isEmpty(info.getCondition())) { finalConditionLocation = i; break; } String content = setTaskParams(info.getCondition().replaceAll("'", "\""), rgex); - logger.info("format condition sentence::{}", content); + log.info("format condition sentence::{}", content); Boolean result = null; try { result = SwitchTaskUtils.evaluate(content); } catch (Exception e) { - logger.info("error sentence : {}", content); + log.info("error sentence : {}", content); conditionResult = DependResult.FAILED; break; } - logger.info("condition result : {}", result); + log.info("condition result : {}", result); if (result) { finalConditionLocation = i; break; @@ -171,12 +171,12 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { if (!isValidSwitchResult(switchResultVos.get(finalConditionLocation))) { conditionResult = DependResult.FAILED; - logger.error("the switch task depend result is invalid, result:{}, switch branch:{}", conditionResult, + log.error("the switch task depend result is invalid, result:{}, switch branch:{}", conditionResult, finalConditionLocation); return true; } - logger.info("the switch task depend result:{}, switch branch:{}", conditionResult, finalConditionLocation); + log.info("the switch task depend result:{}, switch branch:{}", conditionResult, finalConditionLocation); return true; } @@ -216,7 +216,7 @@ public class SwitchTaskProcessor extends BaseTaskProcessor { if (!org.apache.commons.lang3.math.NumberUtils.isCreatable(value)) { value = "\"" + value + "\""; } - logger.info("paramName:{},paramValue:{}", paramName, value); + log.info("paramName:{},paramValue:{}", paramName, value); content = content.replace("${" + paramName + "}", value); } return content; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactory.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactory.java index 3d532c1ff4..6da322167a 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactory.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/TaskProcessorFactory.java @@ -29,18 +29,15 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.experimental.UtilityClass; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * the factory to create task processor */ @UtilityClass +@Slf4j public final class TaskProcessorFactory { - private static final Logger logger = LoggerFactory.getLogger(TaskProcessorFactory.class); - private static final Map> PROCESS_MAP = new ConcurrentHashMap<>(); private static final String DEFAULT_PROCESSOR = COMMON_TASK_TYPE; @@ -49,10 +46,10 @@ public final class TaskProcessorFactory { PrioritySPIFactory prioritySPIFactory = new PrioritySPIFactory<>(ITaskProcessor.class); for (Map.Entry entry : prioritySPIFactory.getSPIMap().entrySet()) { try { - logger.info("Registering task processor: {} - {}", entry.getKey(), entry.getValue().getClass()); + log.info("Registering task processor: {} - {}", entry.getKey(), entry.getValue().getClass()); PROCESS_MAP.put(entry.getKey(), (Constructor) entry.getValue().getClass().getConstructor()); - logger.info("Registered task processor: {} - {}", entry.getKey(), entry.getValue().getClass()); + log.info("Registered task processor: {} - {}", entry.getKey(), entry.getValue().getClass()); } catch (NoSuchMethodException e) { throw new IllegalArgumentException( String.format("The task processor: %s should has a no args constructor", entry.getKey())); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/ExecutingService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/ExecutingService.java index 90be9f6d3c..91b8edb127 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/ExecutingService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/ExecutingService.java @@ -21,7 +21,6 @@ import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.remote.dto.TaskInstanceExecuteDto; import org.apache.dolphinscheduler.remote.dto.WorkflowExecuteDto; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; -import org.apache.dolphinscheduler.server.master.controller.WorkflowExecuteController; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteRunnable; import org.apache.commons.beanutils.BeanUtils; @@ -32,8 +31,8 @@ import java.lang.reflect.InvocationTargetException; import java.util.List; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -41,10 +40,9 @@ import org.springframework.stereotype.Component; * executing service, to query executing data from memory, such workflow instance */ @Component +@Slf4j public class ExecutingService { - private static final Logger logger = LoggerFactory.getLogger(WorkflowExecuteController.class); - @Autowired private ProcessInstanceExecCacheManager processInstanceExecCacheManager; @@ -52,7 +50,7 @@ public class ExecutingService { WorkflowExecuteRunnable workflowExecuteRunnable = processInstanceExecCacheManager.getByProcessInstanceId(processInstanceId); if (workflowExecuteRunnable == null) { - logger.info("workflow execute data not found, maybe it has finished, workflow id:{}", processInstanceId); + log.info("workflow execute data not found, maybe it has finished, workflow id:{}", processInstanceId); return Optional.empty(); } try { @@ -69,7 +67,7 @@ public class ExecutingService { workflowExecuteDto.setTaskInstances(taskInstanceList); return Optional.of(workflowExecuteDto); } catch (IllegalAccessException | InvocationTargetException e) { - logger.error("query workflow execute data fail, workflow id:{}", processInstanceId, e); + log.error("query workflow execute data fail, workflow id:{}", processInstanceId, e); } return Optional.empty(); } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/FailoverService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/FailoverService.java index fece6392f3..46188f1719 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/FailoverService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/FailoverService.java @@ -20,19 +20,17 @@ package org.apache.dolphinscheduler.server.master.service; import org.apache.dolphinscheduler.common.enums.NodeType; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; /** * failover service */ @Component +@Slf4j public class FailoverService { - private static final Logger LOGGER = LoggerFactory.getLogger(FailoverService.class); - private final MasterFailoverService masterFailoverService; private final WorkerFailoverService workerFailoverService; @@ -51,14 +49,14 @@ public class FailoverService { public void failoverServerWhenDown(String serverHost, NodeType nodeType) { switch (nodeType) { case MASTER: - LOGGER.info("Master failover starting, masterServer: {}", serverHost); + log.info("Master failover starting, masterServer: {}", serverHost); masterFailoverService.failoverMaster(serverHost); - LOGGER.info("Master failover finished, masterServer: {}", serverHost); + log.info("Master failover finished, masterServer: {}", serverHost); break; case WORKER: - LOGGER.info("Worker failover starting, workerServer: {}", serverHost); + log.info("Worker failover starting, workerServer: {}", serverHost); workerFailoverService.failoverWorker(serverHost); - LOGGER.info("Worker failover finished, workerServer: {}", serverHost); + log.info("Worker failover finished, workerServer: {}", serverHost); break; default: break; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/MasterFailoverService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/MasterFailoverService.java index b3caa4709b..3979401425 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/MasterFailoverService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/MasterFailoverService.java @@ -56,9 +56,8 @@ import java.util.function.Function; import java.util.stream.Collectors; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -66,9 +65,9 @@ import io.micrometer.core.annotation.Counted; import io.micrometer.core.annotation.Timed; @Service +@Slf4j public class MasterFailoverService { - private static final Logger LOGGER = LoggerFactory.getLogger(MasterFailoverService.class); private final RegistryClient registryClient; private final MasterConfig masterConfig; private final ProcessService processService; @@ -117,7 +116,7 @@ public class MasterFailoverService { if (CollectionUtils.isEmpty(needFailoverMasterHosts)) { return; } - LOGGER.info("Master failover service {} begin to failover hosts:{}", localAddress, needFailoverMasterHosts); + log.info("Master failover service {} begin to failover hosts:{}", localAddress, needFailoverMasterHosts); for (String needFailoverMasterHost : needFailoverMasterHosts) { failoverMaster(needFailoverMasterHost); @@ -130,7 +129,7 @@ public class MasterFailoverService { registryClient.getLock(failoverPath); doFailoverMaster(masterHost); } catch (Exception e) { - LOGGER.error("Master server failover failed, host:{}", masterHost, e); + log.error("Master server failover failed, host:{}", masterHost, e); } finally { registryClient.releaseLock(failoverPath); } @@ -154,7 +153,7 @@ public class MasterFailoverService { return; } - LOGGER.info( + log.info( "Master[{}] failover starting there are {} workflowInstance may need to failover, will do a deep check, workflowInstanceIds: {}", masterHost, needFailoverProcessInstanceList.size(), @@ -169,9 +168,9 @@ public class MasterFailoverService { for (ProcessInstance processInstance : needFailoverProcessInstanceList) { try { LoggerUtils.setWorkflowInstanceIdMDC(processInstance.getId()); - LOGGER.info("WorkflowInstance failover starting"); + log.info("WorkflowInstance failover starting"); if (!checkProcessInstanceNeedFailover(masterStartupTimeOptional, processInstance)) { - LOGGER.info("WorkflowInstance doesn't need to failover"); + log.info("WorkflowInstance doesn't need to failover"); continue; } ProcessDefinition processDefinition = codeDefinitionMap.get(processInstance.getProcessDefinitionCode()); @@ -182,13 +181,13 @@ public class MasterFailoverService { for (TaskInstance taskInstance : taskInstanceList) { try { LoggerUtils.setTaskInstanceIdMDC(taskInstance.getId()); - LOGGER.info("TaskInstance failover starting"); + log.info("TaskInstance failover starting"); if (!checkTaskInstanceNeedFailover(taskInstance)) { - LOGGER.info("The taskInstance doesn't need to failover"); + log.info("The taskInstance doesn't need to failover"); continue; } failoverTaskInstance(processInstance, taskInstance); - LOGGER.info("TaskInstance failover finished"); + log.info("TaskInstance failover finished"); } finally { LoggerUtils.removeTaskInstanceIdMDC(); } @@ -199,14 +198,14 @@ public class MasterFailoverService { // and insert a failover command processInstance.setHost(Constants.NULL); processService.processNeedFailoverProcessInstances(processInstance); - LOGGER.info("WorkflowInstance failover finished"); + log.info("WorkflowInstance failover finished"); } finally { LoggerUtils.removeWorkflowInstanceIdMDC(); } } failoverTimeCost.stop(); - LOGGER.info("Master[{}] failover finished, useTime:{}ms", + log.info("Master[{}] failover finished, useTime:{}ms", masterHost, failoverTimeCost.getTime(TimeUnit.MILLISECONDS)); } @@ -242,7 +241,7 @@ public class MasterFailoverService { taskInstance.setProcessInstance(processInstance); if (!isMasterTask) { - LOGGER.info("The failover taskInstance is not master task"); + log.info("The failover taskInstance is not master task"); TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() .buildTaskInstanceRelatedInfo(taskInstance) .buildProcessInstanceRelatedInfo(processInstance) @@ -251,7 +250,7 @@ public class MasterFailoverService { if (masterConfig.isKillYarnJobWhenTaskFailover()) { // only kill yarn job if exists , the local thread has exited - LOGGER.info("TaskInstance failover begin kill the task related yarn job"); + log.info("TaskInstance failover begin kill the task related yarn job"); ProcessUtils.killYarnJob(logClient, taskExecutionContext); } // kill worker task, When the master failover and worker failover happened in the same time, @@ -259,7 +258,7 @@ public class MasterFailoverService { // This can be improved if we can load all task when cache a workflowInstance in memory sendKillCommandToWorker(taskInstance); } else { - LOGGER.info("The failover taskInstance is a master task"); + log.info("The failover taskInstance is a master task"); } taskInstance.setState(TaskExecutionStatus.NEED_FAULT_TOLERANCE); @@ -274,9 +273,9 @@ public class MasterFailoverService { TaskKillRequestCommand killCommand = new TaskKillRequestCommand(taskInstance.getId()); Host workerHost = Host.of(taskInstance.getHost()); nettyExecutorManager.doExecute(workerHost, killCommand.convert2Command()); - LOGGER.info("Failover task success, has killed the task in worker: {}", taskInstance.getHost()); + log.info("Failover task success, has killed the task in worker: {}", taskInstance.getHost()); } catch (ExecuteException e) { - LOGGER.error("Kill task failed", e); + log.error("Kill task failed", e); } } diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java index e3ed6daa01..928580fb82 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/service/WorkerFailoverService.java @@ -56,16 +56,14 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @Service +@Slf4j public class WorkerFailoverService { - private static final Logger LOGGER = LoggerFactory.getLogger(WorkerFailoverService.class); - private final RegistryClient registryClient; private final MasterConfig masterConfig; private final ProcessService processService; @@ -102,7 +100,7 @@ public class WorkerFailoverService { * @param workerHost worker host */ public void failoverWorker(@NonNull String workerHost) { - LOGGER.info("Worker[{}] failover starting", workerHost); + log.info("Worker[{}] failover starting", workerHost); final StopWatch failoverTimeCost = StopWatch.createStarted(); // we query the task instance from cache, so that we can directly update the cache @@ -111,10 +109,10 @@ public class WorkerFailoverService { final List needFailoverTaskInstanceList = getNeedFailoverTaskInstance(workerHost); if (CollectionUtils.isEmpty(needFailoverTaskInstanceList)) { - LOGGER.info("Worker[{}] failover finished there are no taskInstance need to failover", workerHost); + log.info("Worker[{}] failover finished there are no taskInstance need to failover", workerHost); return; } - LOGGER.info( + log.info( "Worker[{}] failover there are {} taskInstance may need to failover, will do a deep check, taskInstanceIds: {}", workerHost, needFailoverTaskInstanceList.size(), @@ -133,22 +131,22 @@ public class WorkerFailoverService { return workflowExecuteRunnable.getProcessInstance(); }); if (!checkTaskInstanceNeedFailover(needFailoverWorkerStartTime, processInstance, taskInstance)) { - LOGGER.info("Worker[{}] the current taskInstance doesn't need to failover", workerHost); + log.info("Worker[{}] the current taskInstance doesn't need to failover", workerHost); continue; } - LOGGER.info( + log.info( "Worker[{}] failover: begin to failover taskInstance, will set the status to NEED_FAULT_TOLERANCE", workerHost); failoverTaskInstance(processInstance, taskInstance); - LOGGER.info("Worker[{}] failover: Finish failover taskInstance", workerHost); + log.info("Worker[{}] failover: Finish failover taskInstance", workerHost); } catch (Exception ex) { - LOGGER.info("Worker[{}] failover taskInstance occur exception", workerHost, ex); + log.info("Worker[{}] failover taskInstance occur exception", workerHost, ex); } finally { LoggerUtils.removeWorkflowAndTaskInstanceIdMDC(); } } failoverTimeCost.stop(); - LOGGER.info("Worker[{}] failover finished, useTime:{}ms", + log.info("Worker[{}] failover finished, useTime:{}ms", workerHost, failoverTimeCost.getTime(TimeUnit.MILLISECONDS)); } @@ -170,7 +168,7 @@ public class WorkerFailoverService { taskInstance.setProcessInstance(processInstance); if (!isMasterTask) { - LOGGER.info("The failover taskInstance is not master task"); + log.info("The failover taskInstance is not master task"); TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get() .buildTaskInstanceRelatedInfo(taskInstance) .buildProcessInstanceRelatedInfo(processInstance) @@ -179,11 +177,11 @@ public class WorkerFailoverService { if (masterConfig.isKillYarnJobWhenTaskFailover()) { // only kill yarn job if exists , the local thread has exited - LOGGER.info("TaskInstance failover begin kill the task related yarn job"); + log.info("TaskInstance failover begin kill the task related yarn job"); ProcessUtils.killYarnJob(logClient, taskExecutionContext); } } else { - LOGGER.info("The failover taskInstance is a master task"); + log.info("The failover taskInstance is a master task"); } taskInstance.setState(TaskExecutionStatus.NEED_FAULT_TOLERANCE); @@ -209,18 +207,18 @@ public class WorkerFailoverService { TaskInstance taskInstance) { if (processInstance == null) { // This case should be happened. - LOGGER.error( + log.error( "Failover task instance error, cannot find the related processInstance form memory, this case shouldn't happened"); return false; } if (taskInstance == null) { // This case should be happened. - LOGGER.error("Master failover task instance error, taskInstance is null, this case shouldn't happened"); + log.error("Master failover task instance error, taskInstance is null, this case shouldn't happened"); return false; } // only failover the task owned myself if worker down. if (!StringUtils.equalsIgnoreCase(processInstance.getHost(), localAddress)) { - LOGGER.error( + log.error( "Master failover task instance error, the taskInstance's processInstance's host: {} is not the current master: {}", processInstance.getHost(), localAddress); @@ -228,7 +226,7 @@ public class WorkerFailoverService { } if (taskInstance.getState() != null && taskInstance.getState().isFinished()) { // The taskInstance is already finished, doesn't need to failover - LOGGER.info("The task is already finished, doesn't need to failover"); + log.info("The task is already finished, doesn't need to failover"); return false; } if (!needFailoverWorkerStartTime.isPresent()) { @@ -238,7 +236,7 @@ public class WorkerFailoverService { // The worker is active, may already send some new task to it if (taskInstance.getSubmitTime() != null && taskInstance.getSubmitTime() .after(needFailoverWorkerStartTime.get())) { - LOGGER.info( + log.info( "The taskInstance's submitTime: {} is after the need failover worker's start time: {}, the taskInstance is newly submit, it doesn't need to failover", taskInstance.getSubmitTime(), needFailoverWorkerStartTime.get()); diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DataQualityResultOperator.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DataQualityResultOperator.java index b45833424f..5a7a793afc 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DataQualityResultOperator.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DataQualityResultOperator.java @@ -33,8 +33,8 @@ import org.apache.dolphinscheduler.service.process.ProcessService; import java.math.BigDecimal; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -42,10 +42,9 @@ import org.springframework.stereotype.Component; * DataQualityResultOperator */ @Component +@Slf4j public class DataQualityResultOperator { - private final Logger logger = LoggerFactory.getLogger(DataQualityResultOperator.class); - @Autowired private ProcessService processService; @@ -101,11 +100,11 @@ public class DataQualityResultOperator { sendDqTaskResultAlert(dqExecuteResult, processInstance); switch (dqFailureStrategy) { case ALERT: - logger.info("task is failure, continue and alert"); + log.info("task is failure, continue and alert"); break; case BLOCK: taskResponseEvent.setState(TaskExecutionStatus.FAILURE); - logger.info("task is failure, end and alert"); + log.info("task is failure, end and alert"); break; default: break; diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java index 9124a6f1a7..bdf6a2957f 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/utils/DependentExecute.java @@ -36,9 +36,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * dependent item execute */ @@ -71,11 +68,6 @@ public class DependentExecute { */ private Map dependResultMap = new HashMap<>(); - /** - * logger - */ - private Logger logger = LoggerFactory.getLogger(DependentExecute.class); - /** * constructor * diff --git a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java index 0b958345ce..7a8e7d32ef 100644 --- a/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java +++ b/dolphinscheduler-microbench/src/main/java/org/apache/dolphinscheduler/microbench/base/AbstractBaseBenchmark.java @@ -20,6 +20,8 @@ package org.apache.dolphinscheduler.microbench.base; import java.io.File; import java.io.IOException; +import lombok.extern.slf4j.Slf4j; + import org.junit.jupiter.api.Test; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; @@ -30,8 +32,6 @@ import org.openjdk.jmh.results.format.ResultFormatType; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; import org.openjdk.jmh.runner.options.OptionsBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * BaseBenchMark @@ -41,6 +41,7 @@ import org.slf4j.LoggerFactory; @Measurement(iterations = AbstractBaseBenchmark.DEFAULT_MEASURE_ITERATIONS) @State(Scope.Thread) @Fork(AbstractBaseBenchmark.DEFAULT_FORKS) +@Slf4j public abstract class AbstractBaseBenchmark { static final int DEFAULT_WARMUP_ITERATIONS = 10; @@ -49,8 +50,6 @@ public abstract class AbstractBaseBenchmark { static final int DEFAULT_FORKS = 2; - private static Logger logger = LoggerFactory.getLogger(AbstractBaseBenchmark.class); - private ChainedOptionsBuilder newOptionsBuilder() { String className = getClass().getSimpleName(); @@ -85,7 +84,7 @@ public abstract class AbstractBaseBenchmark { try { writeFileStatus = file.createNewFile(); } catch (IOException e) { - logger.warn("jmh test create file error" + e); + log.warn("jmh test create file error" + e); } } if (writeFileStatus) { @@ -125,7 +124,7 @@ public abstract class AbstractBaseBenchmark { try { return Integer.parseInt(forkCount); } catch (NumberFormatException e) { - logger.error("fail to convert forkCount into int", e); + log.error("fail to convert forkCount into int", e); } return -1; diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryClient.java b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryClient.java index 2ef7d74c59..3153b4c1f0 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryClient.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-api/src/main/java/org/apache/dolphinscheduler/registry/api/RegistryClient.java @@ -43,18 +43,16 @@ import java.util.Set; import javax.annotation.PostConstruct; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; import com.google.common.base.Strings; @Component +@Slf4j public class RegistryClient { - private static final Logger logger = LoggerFactory.getLogger(RegistryClient.class); - private static final String EMPTY = ""; private IStoppable stoppable; @@ -81,7 +79,7 @@ public class RegistryClient { childrenList = getChildrenKeys(rootNodePath(NodeType.MASTER)); } } catch (Exception e) { - logger.error("getActiveMasterNum error", e); + log.error("getActiveMasterNum error", e); } return childrenList.size(); } @@ -95,7 +93,7 @@ public class RegistryClient { String serverPath = entry.getKey(); String heartBeatJson = entry.getValue(); if (StringUtils.isEmpty(heartBeatJson)) { - logger.error("The heartBeatJson is empty, serverPath: {}", serverPath); + log.error("The heartBeatJson is empty, serverPath: {}", serverPath); continue; } Server server = new Server(); @@ -139,7 +137,7 @@ public class RegistryClient { serverMap.putIfAbsent(server, get(path + Constants.SINGLE_SLASH + server)); } } catch (Exception e) { - logger.error("get server list failed", e); + log.error("get server list failed", e); } return serverMap; diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java index 55be08784f..ff6afdeea4 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-etcd/src/main/java/org/apache/dolphinscheduler/plugin/registry/etcd/EtcdRegistry.java @@ -38,9 +38,8 @@ import java.util.stream.Collectors; import javax.annotation.PostConstruct; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; @@ -68,9 +67,9 @@ import io.etcd.jetcd.watch.WatchEvent; */ @Component @ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "etcd") +@Slf4j public class EtcdRegistry implements Registry { - private static Logger LOGGER = LoggerFactory.getLogger(EtcdRegistry.class); private final Client client; private EtcdConnectionStateListener etcdConnectionStateListener; public static final String FOLDER_SEPARATOR = "/"; @@ -102,7 +101,7 @@ public class EtcdRegistry implements Registry { clientBuilder.authority(registryProperties.getAuthority()); } client = clientBuilder.build(); - LOGGER.info("Started Etcd Registry..."); + log.info("Started Etcd Registry..."); etcdConnectionStateListener = new EtcdConnectionStateListener(client); } @@ -111,9 +110,9 @@ public class EtcdRegistry implements Registry { */ @PostConstruct public void start() { - LOGGER.info("Starting Etcd ConnectionListener..."); + log.info("Starting Etcd ConnectionListener..."); etcdConnectionStateListener.start(); - LOGGER.info("Started Etcd ConnectionListener..."); + log.info("Started Etcd ConnectionListener..."); } @Override diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlOperator.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlOperator.java index a770fe75c7..2b8e7730e3 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlOperator.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlOperator.java @@ -32,8 +32,6 @@ import java.util.Collection; import java.util.List; import java.util.stream.Collectors; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; @@ -42,8 +40,6 @@ import org.springframework.stereotype.Component; @ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "mysql") public class MysqlOperator { - private static final Logger logger = LoggerFactory.getLogger(MysqlOperator.class); - @Autowired private MysqlRegistryDataMapper mysqlRegistryDataMapper; @Autowired diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlRegistry.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlRegistry.java index 5a72e80f58..a1d57ddb09 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlRegistry.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/MysqlRegistry.java @@ -33,9 +33,8 @@ import java.util.Collection; import javax.annotation.PostConstruct; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; @@ -45,10 +44,9 @@ import org.springframework.stereotype.Component; */ @Component @ConditionalOnProperty(prefix = "registry", name = "type", havingValue = "mysql") +@Slf4j public class MysqlRegistry implements Registry { - private static Logger LOGGER = LoggerFactory.getLogger(MysqlRegistry.class); - private final MysqlRegistryProperties mysqlRegistryProperties; private final EphemeralDateManager ephemeralDateManager; private final SubscribeDataManager subscribeDataManager; @@ -64,17 +62,17 @@ public class MysqlRegistry implements Registry { this.ephemeralDateManager = new EphemeralDateManager(mysqlRegistryProperties, mysqlOperator); this.subscribeDataManager = new SubscribeDataManager(mysqlRegistryProperties, mysqlOperator); this.registryLockManager = new RegistryLockManager(mysqlRegistryProperties, mysqlOperator); - LOGGER.info("Initialize Mysql Registry..."); + log.info("Initialize Mysql Registry..."); } @PostConstruct public void start() { - LOGGER.info("Starting Mysql Registry..."); + log.info("Starting Mysql Registry..."); // start a mysql connect check ephemeralDateManager.start(); subscribeDataManager.start(); registryLockManager.start(); - LOGGER.info("Started Mysql Registry..."); + log.info("Started Mysql Registry..."); } @Override @@ -183,15 +181,15 @@ public class MysqlRegistry implements Registry { @Override public void close() { - LOGGER.info("Closing Mysql Registry..."); + log.info("Closing Mysql Registry..."); // remove the current Ephemeral node, if can connect to mysql try ( EphemeralDateManager closed1 = ephemeralDateManager; SubscribeDataManager close2 = subscribeDataManager; RegistryLockManager close3 = registryLockManager) { } catch (Exception e) { - LOGGER.error("Close Mysql Registry error", e); + log.error("Close Mysql Registry error", e); } - LOGGER.info("Closed Mysql Registry..."); + log.info("Closed Mysql Registry..."); } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/EphemeralDateManager.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/EphemeralDateManager.java index dc4a55797f..0378637945 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/EphemeralDateManager.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/EphemeralDateManager.java @@ -34,18 +34,16 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * This thread is used to check the connect state to mysql. */ +@Slf4j public class EphemeralDateManager implements AutoCloseable { - private static final Logger LOGGER = LoggerFactory.getLogger(EphemeralDateManager.class); - private ConnectionState connectionState; private final MysqlOperator mysqlOperator; private final MysqlRegistryProperties registryProperties; @@ -132,7 +130,7 @@ public class EphemeralDateManager implements AutoCloseable { triggerListener(connectionState); } } catch (Exception e) { - LOGGER.error("Mysql Registry connect state check task execute failed", e); + log.error("Mysql Registry connect state check task execute failed", e); connectionState = ConnectionState.DISCONNECTED; triggerListener(ConnectionState.DISCONNECTED); } @@ -148,14 +146,14 @@ public class EphemeralDateManager implements AutoCloseable { mysqlOperator.clearExpireEphemeralDate(); return ConnectionState.CONNECTED; } catch (Exception ex) { - LOGGER.error("Get connection state error, meet an unknown exception", ex); + log.error("Get connection state error, meet an unknown exception", ex); return ConnectionState.DISCONNECTED; } } private void updateEphemeralDateTerm() throws SQLException { if (!mysqlOperator.updateEphemeralDataTerm(ephemeralDateIds)) { - LOGGER.warn("Update mysql registry ephemeral data: {} term error", ephemeralDateIds); + log.warn("Update mysql registry ephemeral data: {} term error", ephemeralDateIds); } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/RegistryLockManager.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/RegistryLockManager.java index 9c6ca6eeeb..9ef88b64e0 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/RegistryLockManager.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/RegistryLockManager.java @@ -35,16 +35,13 @@ import java.util.stream.Collectors; import lombok.AccessLevel; import lombok.RequiredArgsConstructor; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.util.concurrent.ThreadFactoryBuilder; +@Slf4j public class RegistryLockManager implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(RegistryLockManager.class); - private final MysqlOperator mysqlOperator; private final MysqlRegistryProperties registryProperties; private final Map lockHoldMap; @@ -75,7 +72,7 @@ public class RegistryLockManager implements AutoCloseable { MysqlRegistryLock mysqlRegistryLock; try { while ((mysqlRegistryLock = mysqlOperator.tryToAcquireLock(lockKey)) == null) { - logger.debug("Acquire the lock {} failed try again", key); + log.debug("Acquire the lock {} failed try again", key); // acquire failed, wait and try again ThreadUtils.sleep(MysqlRegistryConstant.LOCK_ACQUIRE_INTERVAL); } @@ -126,11 +123,11 @@ public class RegistryLockManager implements AutoCloseable { .map(MysqlRegistryLock::getId) .collect(Collectors.toList()); if (!mysqlOperator.updateLockTerm(lockIds)) { - logger.warn("Update the lock: {} term failed.", lockIds); + log.warn("Update the lock: {} term failed.", lockIds); } mysqlOperator.clearExpireLock(); } catch (Exception e) { - logger.error("Update lock term error", e); + log.error("Update lock term error", e); } } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/SubscribeDataManager.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/SubscribeDataManager.java index bbcc25d7be..58df4072a1 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/SubscribeDataManager.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-mysql/src/main/java/org/apache/dolphinscheduler/plugin/registry/mysql/task/SubscribeDataManager.java @@ -34,19 +34,16 @@ import java.util.function.Function; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Used to refresh if the subscribe path has been changed. */ +@Slf4j public class SubscribeDataManager implements AutoCloseable { - private static final Logger LOGGER = LoggerFactory.getLogger(SubscribeDataManager.class); - private final MysqlOperator mysqlOperator; private final MysqlRegistryProperties registryProperties; private final Map> dataSubScribeMap = new ConcurrentHashMap<>(); @@ -137,7 +134,7 @@ public class SubscribeDataManager implements AutoCloseable { triggerListener(updatedData, subscribeKey, subscribeListeners, Event.Type.UPDATE); } } catch (Exception e) { - LOGGER.error("Query data from mysql registry error"); + log.error("Query data from mysql registry error"); } } diff --git a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java index ca22f68bf2..380df4d021 100644 --- a/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java +++ b/dolphinscheduler-registry/dolphinscheduler-registry-plugins/dolphinscheduler-registry-zookeeper/src/main/java/org/apache/dolphinscheduler/plugin/registry/zookeeper/ZookeeperConnectionStateListener.java @@ -23,13 +23,11 @@ import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.state.ConnectionStateListener; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public final class ZookeeperConnectionStateListener implements ConnectionStateListener { - private static final Logger logger = LoggerFactory.getLogger(ZookeeperConnectionStateListener.class); - private final ConnectionListener listener; public ZookeeperConnectionStateListener(ConnectionListener listener) { @@ -41,15 +39,15 @@ public final class ZookeeperConnectionStateListener implements ConnectionStateLi org.apache.curator.framework.state.ConnectionState newState) { switch (newState) { case LOST: - logger.warn("Registry disconnected"); + log.warn("Registry disconnected"); listener.onUpdate(ConnectionState.DISCONNECTED); break; case RECONNECTED: - logger.info("Registry reconnected"); + log.info("Registry reconnected"); listener.onUpdate(ConnectionState.RECONNECTED); break; case SUSPENDED: - logger.warn("Registry suspended"); + log.warn("Registry suspended"); listener.onUpdate(ConnectionState.SUSPENDED); break; default: diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java index 5ac5918044..d1f081617c 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingClient.java @@ -47,9 +47,7 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; @@ -62,10 +60,9 @@ import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.handler.timeout.IdleStateHandler; +@Slf4j public class NettyRemotingClient implements AutoCloseable { - private final Logger logger = LoggerFactory.getLogger(NettyRemotingClient.class); - private final Bootstrap bootstrap = new Bootstrap(); private final NettyEncoder encoder = new NettyEncoder(); @@ -183,7 +180,7 @@ public class NettyRemotingClient implements AutoCloseable { try { responseFuture.executeInvokeCallback(); } catch (Exception ex) { - logger.error("execute callback error", ex); + log.error("execute callback error", ex); } finally { responseFuture.release(); } @@ -225,7 +222,7 @@ public class NettyRemotingClient implements AutoCloseable { } responseFuture.setCause(future.cause()); responseFuture.putResponse(null); - logger.error("send command {} to host {} failed", command, host); + log.error("send command {} to host {} failed", command, host); }); /* * sync wait for result @@ -255,16 +252,16 @@ public class NettyRemotingClient implements AutoCloseable { try { ChannelFuture future = channel.writeAndFlush(command).await(); if (future.isSuccess()) { - logger.debug("send command : {} , to : {} successfully.", command, host.getAddress()); + log.debug("send command : {} , to : {} successfully.", command, host.getAddress()); } else { String msg = String.format("send command : %s , to :%s failed", command, host.getAddress()); - logger.error(msg, future.cause()); + log.error(msg, future.cause()); throw new RemotingException(msg); } } catch (RemotingException remotingException) { throw remotingException; } catch (Exception e) { - logger.error("Send command {} to address {} encounter error.", command, host.getAddress()); + log.error("Send command {} to address {} encounter error.", command, host.getAddress()); throw new RemotingException( String.format("Send command : %s , to :%s encounter error", command, host.getAddress()), e); } @@ -325,7 +322,7 @@ public class NettyRemotingClient implements AutoCloseable { return channel; } } catch (Exception ex) { - logger.warn(String.format("connect to %s error", host), ex); + log.warn(String.format("connect to %s error", host), ex); } return null; } @@ -344,9 +341,9 @@ public class NettyRemotingClient implements AutoCloseable { if (this.responseFutureExecutor != null) { this.responseFutureExecutor.shutdownNow(); } - logger.info("netty client closed"); + log.info("netty client closed"); } catch (Exception ex) { - logger.error("netty client close exception", ex); + log.error("netty client close exception", ex); } } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java index 63c1392a06..8480dbd301 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java @@ -33,8 +33,7 @@ import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -52,10 +51,9 @@ import io.netty.handler.timeout.IdleStateHandler; /** * remoting netty server */ +@Slf4j public class NettyRemotingServer { - private final Logger logger = LoggerFactory.getLogger(NettyRemotingServer.class); - /** * server bootstrap */ @@ -142,11 +140,11 @@ public class NettyRemotingServer { try { future = serverBootstrap.bind(serverConfig.getListenPort()).sync(); } catch (Exception e) { - logger.error("NettyRemotingServer bind fail {}, exit", e.getMessage(), e); + log.error("NettyRemotingServer bind fail {}, exit", e.getMessage(), e); throw new RemoteException(String.format(NETTY_BIND_FAILURE_MSG, serverConfig.getListenPort())); } if (future.isSuccess()) { - logger.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort()); + log.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort()); } else if (future.cause() != null) { throw new RemoteException(String.format(NETTY_BIND_FAILURE_MSG, serverConfig.getListenPort()), future.cause()); @@ -212,9 +210,9 @@ public class NettyRemotingServer { } defaultExecutor.shutdown(); } catch (Exception ex) { - logger.error("netty server close exception", ex); + log.error("netty server close exception", ex); } - logger.info("netty server closed"); + log.info("netty server closed"); } } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java index 4066b6ed10..2881d3c91c 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java @@ -24,9 +24,7 @@ import org.apache.dolphinscheduler.remote.command.CommandType; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.ReplayingDecoder; @@ -34,10 +32,9 @@ import io.netty.handler.codec.ReplayingDecoder; /** * netty decoder */ +@Slf4j public class NettyDecoder extends ReplayingDecoder { - private static final Logger logger = LoggerFactory.getLogger(NettyDecoder.class); - public NettyDecoder() { super(State.MAGIC); } @@ -98,7 +95,7 @@ public class NettyDecoder extends ReplayingDecoder { checkpoint(State.MAGIC); break; default: - logger.warn("unknown decoder state {}", state()); + log.warn("unknown decoder state {}", state()); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/factory/NettyRemotingClientFactory.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/factory/NettyRemotingClientFactory.java index 122979cb30..2364f97e06 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/factory/NettyRemotingClientFactory.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/factory/NettyRemotingClientFactory.java @@ -21,18 +21,15 @@ import org.apache.dolphinscheduler.remote.NettyRemotingClient; import org.apache.dolphinscheduler.remote.config.NettyClientConfig; import lombok.experimental.UtilityClass; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; @UtilityClass +@Slf4j public class NettyRemotingClientFactory { - private final Logger logger = LoggerFactory.getLogger(NettyRemotingClientFactory.class); - public NettyRemotingClient buildNettyRemotingClient() { NettyClientConfig nettyClientConfig = new NettyClientConfig(); - logger.info("NettyRemotingClient initialized with config: {}", nettyClientConfig); + log.info("NettyRemotingClient initialized with config: {}", nettyClientConfig); return new NettyRemotingClient(nettyClientConfig); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java index 4e4ae1456c..c21669cbcf 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/future/ResponseFuture.java @@ -27,16 +27,14 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * response future */ +@Slf4j public class ResponseFuture { - private static final Logger LOGGER = LoggerFactory.getLogger(ResponseFuture.class); - private static final ConcurrentHashMap FUTURE_TABLE = new ConcurrentHashMap<>(256); /** @@ -190,7 +188,7 @@ public class ResponseFuture { if ((future.getBeginTimestamp() + future.getTimeoutMillis() + 1000) <= System.currentTimeMillis()) { futureList.add(future); it.remove(); - LOGGER.warn("remove timeout request : {}", future); + log.warn("remove timeout request : {}", future); } } for (ResponseFuture future : futureList) { @@ -198,7 +196,7 @@ public class ResponseFuture { future.release(); future.executeInvokeCallback(); } catch (Exception ex) { - LOGGER.warn("scanFutureTable, execute callback error", ex); + log.warn("scanFutureTable, execute callback error", ex); } } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java index 56c9fc70e4..b1b9229c58 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyClientHandler.java @@ -31,9 +31,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.RejectedExecutionException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.channel.Channel; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandler; @@ -45,10 +43,9 @@ import io.netty.handler.timeout.IdleStateEvent; * netty client request handler */ @ChannelHandler.Sharable +@Slf4j public class NettyClientHandler extends ChannelInboundHandlerAdapter { - private final Logger logger = LoggerFactory.getLogger(NettyClientHandler.class); - /** * netty client */ @@ -154,17 +151,17 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { try { pair.getLeft().process(channel, command); } catch (Exception e) { - logger.error(String.format("process command %s exception", command), e); + log.error(String.format("process command %s exception", command), e); } }; try { pair.getRight().submit(run); } catch (RejectedExecutionException e) { - logger.warn("thread pool is full, discard command {} from {}", command, + log.warn("thread pool is full, discard command {} from {}", command, ChannelUtils.getRemoteAddress(channel)); } } else { - logger.warn("receive response {}, but not matched any request ", command); + log.warn("receive response {}, but not matched any request ", command); } } @@ -176,7 +173,7 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { */ @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { - logger.error("exceptionCaught : {}", cause.getMessage(), cause); + log.error("exceptionCaught : {}", cause.getMessage(), cause); nettyRemotingClient.closeChannel(ChannelUtils.toAddress(ctx.channel())); ctx.channel().close(); } @@ -189,8 +186,8 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { heartBeat.setBody(heartBeatData); ctx.channel().writeAndFlush(heartBeat) .addListener(ChannelFutureListener.CLOSE_ON_FAILURE); - if (logger.isDebugEnabled()) { - logger.debug("Client send heart beat to: {}", ChannelUtils.getRemoteAddress(ctx.channel())); + if (log.isDebugEnabled()) { + log.debug("Client send heart beat to: {}", ChannelUtils.getRemoteAddress(ctx.channel())); } } else { super.userEventTriggered(ctx, evt); diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java index 8ae04a70b6..103e27dc89 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/handler/NettyServerHandler.java @@ -28,9 +28,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.channel.Channel; import io.netty.channel.ChannelConfig; import io.netty.channel.ChannelHandler; @@ -42,10 +40,9 @@ import io.netty.handler.timeout.IdleStateEvent; * netty server request handler */ @ChannelHandler.Sharable +@Slf4j public class NettyServerHandler extends ChannelInboundHandlerAdapter { - private final Logger logger = LoggerFactory.getLogger(NettyServerHandler.class); - /** * netty remote server */ @@ -118,8 +115,8 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { private void processReceived(final Channel channel, final Command msg) { final CommandType commandType = msg.getType(); if (CommandType.HEART_BEAT.equals(commandType)) { - if (logger.isDebugEnabled()) { - logger.debug("server receive heart beat from: host: {}", ChannelUtils.getRemoteAddress(channel)); + if (log.isDebugEnabled()) { + log.debug("server receive heart beat from: host: {}", ChannelUtils.getRemoteAddress(channel)); } return; } @@ -129,16 +126,16 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { try { pair.getLeft().process(channel, msg); } catch (Exception ex) { - logger.error("process msg {} error", msg, ex); + log.error("process msg {} error", msg, ex); } }; try { pair.getRight().submit(r); } catch (RejectedExecutionException e) { - logger.warn("thread pool is full, discard msg {} from {}", msg, ChannelUtils.getRemoteAddress(channel)); + log.warn("thread pool is full, discard msg {} from {}", msg, ChannelUtils.getRemoteAddress(channel)); } } else { - logger.warn("commandType {} not support", commandType); + log.warn("commandType {} not support", commandType); } } @@ -150,7 +147,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { */ @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { - logger.error("exceptionCaught : {}", cause.getMessage(), cause); + log.error("exceptionCaught : {}", cause.getMessage(), cause); ctx.channel().close(); } @@ -165,15 +162,15 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { ChannelConfig config = ch.config(); if (!ch.isWritable()) { - if (logger.isWarnEnabled()) { - logger.warn("{} is not writable, over high water level : {}", + if (log.isWarnEnabled()) { + log.warn("{} is not writable, over high water level : {}", ch, config.getWriteBufferHighWaterMark()); } config.setAutoRead(false); } else { - if (logger.isWarnEnabled()) { - logger.warn("{} is writable, to low water : {}", + if (log.isWarnEnabled()) { + log.warn("{} is writable, to low water : {}", ch, config.getWriteBufferLowWaterMark()); } config.setAutoRead(true); diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/LoggerRequestProcessor.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/LoggerRequestProcessor.java index 8e8eb7bd4e..38807bf18a 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/LoggerRequestProcessor.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/LoggerRequestProcessor.java @@ -49,8 +49,8 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Component; import io.netty.channel.Channel; @@ -59,13 +59,12 @@ import io.netty.channel.Channel; * logger request process logic */ @Component +@Slf4j public class LoggerRequestProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(LoggerRequestProcessor.class); - @Override public void process(Channel channel, Command command) { - logger.info("received command : {}", command); + log.info("received command : {}", command); // request task log command type final CommandType commandType = command.getType(); @@ -168,7 +167,7 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { } return bos.toByteArray(); } catch (IOException e) { - logger.error("get file bytes error", e); + log.error("get file bytes error", e); } return new byte[0]; } @@ -189,10 +188,10 @@ public class LoggerRequestProcessor implements NettyRequestProcessor { try (Stream stream = Files.lines(Paths.get(filePath))) { return stream.skip(skipLine).limit(limit).collect(Collectors.toList()); } catch (IOException e) { - logger.error("read file error", e); + log.error("read file error", e); } } else { - logger.info("file path: {} not exists", filePath); + log.info("file path: {} not exists", filePath); } return Collections.emptyList(); } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/StateEventCallbackService.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/StateEventCallbackService.java index ace3a9d3d7..3ecd1d51e1 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/StateEventCallbackService.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/processor/StateEventCallbackService.java @@ -29,8 +29,8 @@ import org.apache.dolphinscheduler.remote.utils.Host; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Service; import io.netty.channel.Channel; @@ -39,9 +39,9 @@ import io.netty.channel.Channel; * task callback service */ @Service +@Slf4j public class StateEventCallbackService { - private final Logger logger = LoggerFactory.getLogger(StateEventCallbackService.class); private static final int[] RETRY_BACKOFF = {1, 2, 3, 5, 10, 20, 40, 100, 100, 100, 100, 200, 200, 200}; /** @@ -119,7 +119,7 @@ public class StateEventCallbackService { * @param command command need to send */ public void sendResult(Host host, Command command) { - logger.info("send result, host:{}, command:{}", host.getAddress(), command.toString()); + log.info("send result, host:{}, command:{}", host.getAddress(), command.toString()); newRemoteChannel(host).ifPresent(nettyRemoteChannel -> { nettyRemoteChannel.writeAndFlush(command); }); @@ -137,10 +137,10 @@ public class StateEventCallbackService { try { return this.nettyRemotingClient.sendSync(host, requestCommand, HTTP_CONNECTION_REQUEST_TIMEOUT); } catch (InterruptedException e) { - logger.error("send sync fail, host:{}, command:{}", host, requestCommand, e); + log.error("send sync fail, host:{}, command:{}", host, requestCommand, e); Thread.currentThread().interrupt(); } catch (RemotingException e) { - logger.error("send sync fail, host:{}, command:{}", host, requestCommand, e); + log.error("send sync fail, host:{}, command:{}", host, requestCommand, e); } finally { this.nettyRemotingClient.closeChannel(host); } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java index 318e20d967..981b1d8c24 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/CallerThreadExecutePolicy.java @@ -20,19 +20,17 @@ package org.apache.dolphinscheduler.remote.utils; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * caller thread execute */ +@Slf4j public class CallerThreadExecutePolicy implements RejectedExecutionHandler { - private final Logger logger = LoggerFactory.getLogger(CallerThreadExecutePolicy.class); - @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { - logger.warn("queue is full, trigger caller thread execute"); + log.warn("queue is full, trigger caller thread execute"); r.run(); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java index b4177ec25d..66f1794fdb 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java @@ -21,18 +21,15 @@ import org.apache.dolphinscheduler.common.utils.NetUtils; import java.net.InetSocketAddress; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.channel.Channel; /** * channel utils */ +@Slf4j public class ChannelUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(ChannelUtils.class); - private ChannelUtils() { throw new IllegalStateException(ChannelUtils.class.getName()); } @@ -67,7 +64,7 @@ public class ChannelUtils { InetSocketAddress socketAddress = ((InetSocketAddress) channel.remoteAddress()); if (socketAddress == null) { // the remote channel already closed - LOGGER.warn("The channel is already closed"); + log.warn("The channel is already closed"); return Host.EMPTY; } return new Host(NetUtils.getHost(socketAddress.getAddress()), socketAddress.getPort()); diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java index e860a72b3b..193c8ae605 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/JsonSerializer.java @@ -20,8 +20,7 @@ package org.apache.dolphinscheduler.remote.utils; import java.io.IOException; import java.nio.charset.StandardCharsets; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -29,11 +28,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; /** * json serialize or deserialize */ +@Slf4j public class JsonSerializer { private static final ObjectMapper objectMapper = new ObjectMapper(); - private static final Logger logger = LoggerFactory.getLogger(JsonSerializer.class); - private JsonSerializer() { } @@ -50,7 +48,7 @@ public class JsonSerializer { try { json = objectMapper.writeValueAsString(obj); } catch (JsonProcessingException e) { - logger.error("serializeToString exception!", e); + log.error("serializeToString exception!", e); } return json.getBytes(Constants.UTF8); @@ -68,7 +66,7 @@ public class JsonSerializer { try { json = objectMapper.writeValueAsString(obj); } catch (JsonProcessingException e) { - logger.error("serializeToString exception!", e); + log.error("serializeToString exception!", e); } return json; @@ -88,7 +86,7 @@ public class JsonSerializer { try { return objectMapper.readValue(json, clazz); } catch (IOException e) { - logger.error("deserialize exception!", e); + log.error("deserialize exception!", e); return null; } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/client/ConsumerInterceptor.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/client/ConsumerInterceptor.java index d35edcdaea..4456d54996 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/client/ConsumerInterceptor.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/client/ConsumerInterceptor.java @@ -35,15 +35,11 @@ import net.bytebuddy.implementation.bind.annotation.AllArguments; import net.bytebuddy.implementation.bind.annotation.Origin; import net.bytebuddy.implementation.bind.annotation.RuntimeType; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * ConsumerInterceptor */ public class ConsumerInterceptor { - private static final Logger logger = LoggerFactory.getLogger(ConsumerInterceptor.class); private Host host; private NettyClient nettyClient = NettyClient.getInstance(); diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/config/ServiceBean.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/config/ServiceBean.java index 9cfc189eb3..e1746d8c84 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/config/ServiceBean.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/config/ServiceBean.java @@ -25,17 +25,16 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; +import lombok.extern.slf4j.Slf4j; + import org.reflections.Reflections; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * ServiceBean find all rpcService */ +@Slf4j public class ServiceBean { - private static final Logger logger = LoggerFactory.getLogger(ServiceBean.class); - private static Map serviceMap = new HashMap<>(); private static AtomicBoolean initialized = new AtomicBoolean(false); @@ -54,7 +53,7 @@ public class ServiceBean { list.forEach(rpcClass -> { RpcService rpcService = rpcClass.getAnnotation(RpcService.class); serviceMap.put(rpcService.value(), rpcClass); - logger.info("load rpc service {}", rpcService.value()); + log.info("load rpc service {}", rpcService.value()); }); initialized.set(true); } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClient.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClient.java index 633efa21d6..23c3ee8b35 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClient.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClient.java @@ -37,9 +37,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; @@ -57,6 +55,7 @@ import io.netty.handler.timeout.IdleStateHandler; /** * NettyClient */ +@Slf4j public class NettyClient { public static NettyClient getInstance() { @@ -68,8 +67,6 @@ public class NettyClient { private static final NettyClient INSTANCE = new NettyClient(new NettyClientConfig()); } - private final Logger logger = LoggerFactory.getLogger(NettyClient.class); - /** * worker group */ @@ -128,7 +125,7 @@ public class NettyClient { return channel; } } catch (Exception ex) { - logger.warn(String.format("connect to %s error", host), ex); + log.warn(String.format("connect to %s error", host), ex); } return null; } @@ -223,7 +220,7 @@ public class NettyClient { assert future != null; result = future.get(); } catch (InterruptedException e) { - logger.error("send msg error,service name is {}", serviceName, e); + log.error("send msg error,service name is {}", serviceName, e); Thread.currentThread().interrupt(); } return result; @@ -240,9 +237,9 @@ public class NettyClient { this.workerGroup.shutdownGracefully(); } } catch (Exception ex) { - logger.error("netty client close exception", ex); + log.error("netty client close exception", ex); } - logger.info("netty client closed"); + log.info("netty client closed"); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClientHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClientHandler.java index c8baec2074..cbef395cd6 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClientHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyClientHandler.java @@ -30,9 +30,7 @@ import org.apache.dolphinscheduler.rpc.protocol.RpcProtocol; import java.lang.reflect.InvocationTargetException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; @@ -42,10 +40,9 @@ import io.netty.handler.timeout.IdleStateEvent; * NettyClientHandler */ @ChannelHandler.Sharable +@Slf4j public class NettyClientHandler extends ChannelInboundHandlerAdapter { - private static final Logger logger = LoggerFactory.getLogger(NettyClientHandler.class); - private static final ThreadPoolManager threadPoolManager = ThreadPoolManager.INSTANCE; @Override @@ -62,7 +59,7 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { RpcRequestCache rpcRequest = RpcRequestTable.get(reqId); if (null == rpcRequest) { - logger.warn("rpc read error,this request does not exist"); + log.warn("rpc read error,this request does not exist"); return; } threadPoolManager.addExecuteTask(() -> readHandler(rsp, rpcRequest, reqId)); @@ -88,10 +85,10 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { consumerConfig.getServiceCallBackClass().getDeclaredConstructor().newInstance().run(rsp.getResult()); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { - logger.error("rpc service call back error,serviceName {},rsp {}", serviceName, rsp); + log.error("rpc service call back error,serviceName {},rsp {}", serviceName, rsp); } } else { - logger.error("rpc response error ,serviceName {},rsp {}", serviceName, rsp); + log.error("rpc response error ,serviceName {},rsp {}", serviceName, rsp); } } @@ -105,7 +102,7 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { messageHeader.setEventType(EventType.HEARTBEAT.getType()); rpcProtocol.setMsgHeader(messageHeader); ctx.channel().writeAndFlush(rpcProtocol); - logger.debug("send heart beat msg..."); + log.debug("send heart beat msg..."); } else { super.userEventTriggered(ctx, evt); } @@ -113,7 +110,7 @@ public class NettyClientHandler extends ChannelInboundHandlerAdapter { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { - logger.error("exceptionCaught : {}", cause.getMessage(), cause); + log.error("exceptionCaught : {}", cause.getMessage(), cause); ctx.channel().close(); } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServer.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServer.java index 559eb90e46..e55ad79537 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServer.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServer.java @@ -29,9 +29,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; @@ -48,10 +46,9 @@ import io.netty.handler.timeout.IdleStateHandler; /** * NettyServer */ +@Slf4j public class NettyServer { - private static final Logger logger = LoggerFactory.getLogger(NettyServer.class); - /** * boss group */ @@ -157,12 +154,12 @@ public class NettyServer { try { future = serverBootstrap.bind(serverConfig.getListenPort()).sync(); } catch (Exception e) { - logger.error("NettyRemotingServer bind fail {}, exit", e.getMessage(), e); + log.error("NettyRemotingServer bind fail {}, exit", e.getMessage(), e); throw new RuntimeException( String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort())); } if (future.isSuccess()) { - logger.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort()); + log.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort()); } else if (future.cause() != null) { throw new RuntimeException( String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()), @@ -199,9 +196,9 @@ public class NettyServer { } } catch (Exception ex) { - logger.error("netty server close exception", ex); + log.error("netty server close exception", ex); } - logger.info("netty server closed"); + log.info("netty server closed"); } } diff --git a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServerHandler.java b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServerHandler.java index 9dd4315a16..ba3a8b5fcf 100644 --- a/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServerHandler.java +++ b/dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/rpc/remote/NettyServerHandler.java @@ -26,9 +26,7 @@ import org.apache.dolphinscheduler.rpc.protocol.RpcProtocol; import java.lang.reflect.Method; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.handler.timeout.IdleStateEvent; @@ -36,21 +34,20 @@ import io.netty.handler.timeout.IdleStateEvent; /** * NettyServerHandler */ +@Slf4j public class NettyServerHandler extends ChannelInboundHandlerAdapter { - private static final Logger logger = LoggerFactory.getLogger(NettyServerHandler.class); - private static final ThreadPoolManager threadPoolManager = ThreadPoolManager.INSTANCE; @Override public void channelInactive(ChannelHandlerContext ctx) { - logger.info("channel close"); + log.info("channel close"); ctx.channel().close(); } @Override public void channelActive(ChannelHandlerContext ctx) { - logger.info("client connect success !" + ctx.channel().remoteAddress()); + log.info("client connect success !" + ctx.channel().remoteAddress()); } @Override @@ -58,7 +55,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { public void channelRead(ChannelHandlerContext ctx, Object msg) { RpcProtocol rpcProtocol = (RpcProtocol) msg; if (rpcProtocol.getMsgHeader().getEventType() == EventType.HEARTBEAT.getType()) { - logger.info("heart beat"); + log.info("heart beat"); return; } threadPoolManager.addExecuteTask(() -> readHandler(ctx, rpcProtocol)); @@ -87,7 +84,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { result = method.invoke(object, arguments); } catch (Exception e) { - logger.error("netty server execute error,service name :{} method name :{} ", classname + methodName, e); + log.error("netty server execute error,service name :{} method name :{} ", classname + methodName, e); response.setStatus((byte) -1); } @@ -100,7 +97,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof IdleStateEvent) { - logger.debug("IdleStateEvent triggered, send heartbeat to channel " + ctx.channel()); + log.debug("IdleStateEvent triggered, send heartbeat to channel " + ctx.channel()); } else { super.userEventTriggered(ctx, evt); } @@ -108,7 +105,7 @@ public class NettyServerHandler extends ChannelInboundHandlerAdapter { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { - logger.error("exceptionCaught : {}", cause.getMessage(), cause); + log.error("exceptionCaught : {}", cause.getMessage(), cause); ctx.channel().close(); } } diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java index 90c4644338..b0d0bb39ff 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/ProcessScheduleTask.java @@ -31,22 +31,21 @@ import org.apache.commons.lang3.StringUtils; import java.util.Date; +import lombok.extern.slf4j.Slf4j; + import org.quartz.JobDataMap; import org.quartz.JobExecutionContext; import org.quartz.JobKey; import org.quartz.Scheduler; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.quartz.QuartzJobBean; import io.micrometer.core.annotation.Counted; import io.micrometer.core.annotation.Timed; +@Slf4j public class ProcessScheduleTask extends QuartzJobBean { - private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleTask.class); - @Autowired private ProcessService processService; @@ -66,12 +65,12 @@ public class ProcessScheduleTask extends QuartzJobBean { Date fireTime = context.getFireTime(); - logger.info("scheduled fire time :{}, fire time :{}, scheduleId :{}", scheduledFireTime, fireTime, scheduleId); + log.info("scheduled fire time :{}, fire time :{}, scheduleId :{}", scheduledFireTime, fireTime, scheduleId); // query schedule Schedule schedule = processService.querySchedule(scheduleId); if (schedule == null || ReleaseState.OFFLINE == schedule.getReleaseState()) { - logger.warn( + log.warn( "process schedule does not exist in db or process schedule offline,delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId); deleteJob(context, projectId, scheduleId); @@ -83,7 +82,7 @@ public class ProcessScheduleTask extends QuartzJobBean { // release state : online/offline ReleaseState releaseState = processDefinition.getReleaseState(); if (releaseState == ReleaseState.OFFLINE) { - logger.warn( + log.warn( "process definition does not exist in db or offline,need not to create command, projectId:{}, processDefinitionId:{}", projectId, processDefinition.getId()); return; @@ -113,11 +112,11 @@ public class ProcessScheduleTask extends QuartzJobBean { JobKey jobKey = QuartzTaskUtils.getJobKey(scheduleId, projectId); try { if (scheduler.checkExists(jobKey)) { - logger.info("Try to delete job: {}, projectId: {}, schedulerId", projectId, scheduleId); + log.info("Try to delete job: {}, projectId: {}, schedulerId", projectId, scheduleId); scheduler.deleteJob(jobKey); } } catch (Exception e) { - logger.error("Failed to delete job: {}", jobKey); + log.error("Failed to delete job: {}", jobKey); } } } diff --git a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java index bf970c0491..7d0f1ce73c 100644 --- a/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java +++ b/dolphinscheduler-scheduler-plugin/dolphinscheduler-scheduler-quartz/src/main/java/org/apache/dolphinscheduler/scheduler/quartz/QuartzScheduler.java @@ -32,21 +32,20 @@ import java.util.Map; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import lombok.extern.slf4j.Slf4j; + import org.quartz.CronTrigger; import org.quartz.JobDetail; import org.quartz.JobKey; import org.quartz.Scheduler; import org.quartz.TriggerKey; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import com.google.common.base.Strings; +@Slf4j public class QuartzScheduler implements SchedulerApi { - private static final Logger logger = LoggerFactory.getLogger(QuartzScheduler.class); - @Autowired private Scheduler scheduler; @@ -102,7 +101,7 @@ public class QuartzScheduler implements SchedulerApi { scheduler.addJob(jobDetail, false, true); - logger.info("Add job, job name: {}, group name: {}", jobKey.getName(), jobKey.getGroup()); + log.info("Add job, job name: {}, group name: {}", jobKey.getName(), jobKey.getGroup()); } TriggerKey triggerKey = new TriggerKey(jobKey.getName(), jobKey.getGroup()); @@ -129,19 +128,19 @@ public class QuartzScheduler implements SchedulerApi { if (!Strings.nullToEmpty(cronExpression).equalsIgnoreCase(Strings.nullToEmpty(oldCronExpression))) { // reschedule job trigger scheduler.rescheduleJob(triggerKey, cronTrigger); - logger.info( + log.info( "reschedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", triggerKey.getName(), triggerKey.getGroup(), cronExpression, startDate, endDate); } } else { scheduler.scheduleJob(cronTrigger); - logger.info( + log.info( "schedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", triggerKey.getName(), triggerKey.getGroup(), cronExpression, startDate, endDate); } } catch (Exception e) { - logger.error("Failed to add scheduler task, projectId: {}, scheduler: {}", projectId, schedule, e); + log.error("Failed to add scheduler task, projectId: {}, scheduler: {}", projectId, schedule, e); throw new SchedulerException("Add schedule job failed", e); } finally { lock.writeLock().unlock(); @@ -153,11 +152,11 @@ public class QuartzScheduler implements SchedulerApi { JobKey jobKey = QuartzTaskUtils.getJobKey(scheduleId, projectId); try { if (scheduler.checkExists(jobKey)) { - logger.info("Try to delete scheduler task, projectId: {}, schedulerId: {}", projectId, scheduleId); + log.info("Try to delete scheduler task, projectId: {}, schedulerId: {}", projectId, scheduleId); scheduler.deleteJob(jobKey); } } catch (Exception e) { - logger.error("Failed to delete scheduler task, projectId: {}, schedulerId: {}", projectId, scheduleId, e); + log.error("Failed to delete scheduler task, projectId: {}, schedulerId: {}", projectId, scheduleId, e); throw new SchedulerException("Failed to delete scheduler task"); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/AlertClientService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/AlertClientService.java index 4f915a97fd..8bfb3dd3c3 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/AlertClientService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/AlertClientService.java @@ -27,13 +27,11 @@ import org.apache.dolphinscheduler.remote.utils.JsonSerializer; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class AlertClientService implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(AlertClientService.class); - private final NettyRemotingClient client; private final AtomicBoolean isRunning; @@ -70,13 +68,13 @@ public class AlertClientService implements AutoCloseable { @Override public void close() { if (isRunning.compareAndSet(true, false)) { - logger.warn("Alert client is already closed"); + log.warn("Alert client is already closed"); return; } - logger.info("Alter client closing"); + log.info("Alter client closing"); this.client.close(); - logger.info("Alter client closed"); + log.info("Alter client closed"); } /** @@ -101,7 +99,7 @@ public class AlertClientService implements AutoCloseable { */ public AlertSendResponseCommand sendAlert(String host, int port, int groupId, String title, String content, int strategy) { - logger.info("sync alert send, host : {}, port : {}, groupId : {}, title : {} , strategy : {} ", host, port, + log.info("sync alert send, host : {}, port : {}, groupId : {}, title : {} , strategy : {} ", host, port, groupId, title, strategy); AlertSendRequestCommand request = new AlertSendRequestCommand(groupId, title, content, strategy); final Host address = new Host(host, port); @@ -112,7 +110,7 @@ public class AlertClientService implements AutoCloseable { return JsonSerializer.deserialize(response.getBody(), AlertSendResponseCommand.class); } } catch (Exception e) { - logger.error("sync alert send error", e); + log.error("sync alert send error", e); } finally { this.client.closeChannel(address); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java index c568343931..02dcdfa962 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java @@ -41,8 +41,8 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -50,13 +50,9 @@ import org.springframework.stereotype.Component; * process alert manager */ @Component +@Slf4j public class ProcessAlertManager { - /** - * logger of AlertManager - */ - private static final Logger logger = LoggerFactory.getLogger(ProcessAlertManager.class); - /** * alert dao */ @@ -204,7 +200,7 @@ public class ProcessAlertManager { alertDao.addAlert(alert); } catch (Exception e) { - logger.error("send alert failed:{} ", e.getMessage()); + log.error("send alert failed:{} ", e.getMessage()); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java index 8063946b4c..b6f3e4ec96 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cache/impl/CacheNotifyServiceImpl.java @@ -32,8 +32,8 @@ import org.apache.commons.collections4.CollectionUtils; import java.util.List; import java.util.concurrent.ConcurrentHashMap; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @@ -43,10 +43,9 @@ import io.netty.channel.Channel; * cache notify service */ @Service +@Slf4j public class CacheNotifyServiceImpl implements CacheNotifyService { - private final Logger logger = LoggerFactory.getLogger(CacheNotifyServiceImpl.class); - @Autowired private RegistryClient registryClient; @@ -113,7 +112,7 @@ public class CacheNotifyServiceImpl implements CacheNotifyService { */ @Override public void notifyMaster(Command command) { - logger.info("send result, command:{}", command.toString()); + log.info("send result, command:{}", command.toString()); try { List serverList = registryClient.getServerList(NodeType.MASTER); if (CollectionUtils.isEmpty(serverList)) { @@ -129,7 +128,7 @@ public class CacheNotifyServiceImpl implements CacheNotifyService { nettyRemoteChannel.writeAndFlush(command); } } catch (Exception e) { - logger.error("notify master error", e); + log.error("notify master error", e); } } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java index 546cece17b..483899446b 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/command/CommandServiceImpl.java @@ -50,9 +50,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import lombok.extern.slf4j.Slf4j; + import org.jetbrains.annotations.NotNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -64,10 +64,9 @@ import io.micrometer.core.annotation.Counted; * Command Service implementation */ @Component +@Slf4j public class CommandServiceImpl implements CommandService { - private final Logger logger = LoggerFactory.getLogger(CommandServiceImpl.class); - @Autowired private ErrorCommandMapper errorCommandMapper; @@ -218,7 +217,7 @@ public class CommandServiceImpl implements CommandService { Long.parseLong( String.valueOf(subProcessParam.get(CMD_PARAM_SUB_PROCESS_DEFINE_CODE))); } catch (NumberFormatException nfe) { - logger.error("processDefinitionCode is not a number", nfe); + log.error("processDefinitionCode is not a number", nfe); return null; } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java index 39012b3673..0ffb483a1a 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/cron/CronUtils.java @@ -49,9 +49,7 @@ import java.util.Optional; import java.util.stream.Collectors; import lombok.NonNull; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.cronutils.model.Cron; import com.cronutils.model.definition.CronDefinitionBuilder; @@ -62,14 +60,13 @@ import com.cronutils.parser.CronParser; * // todo: this utils is heavy, it rely on quartz and corn-utils. * cron utils */ +@Slf4j public class CronUtils { private CronUtils() { throw new IllegalStateException("CronUtils class"); } - private static final Logger logger = LoggerFactory.getLogger(CronUtils.class); - private static final CronParser QUARTZ_CRON_PARSER = new CronParser(CronDefinitionBuilder.instanceDefinitionFor(QUARTZ)); @@ -261,12 +258,12 @@ public class CronUtils { calendar.add(Calendar.DATE, 1); break; default: - logger.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum); + log.error("Dependent process definition's cycleEnum is {},not support!!", cycleEnum); break; } maxExpirationTime = calendar.getTime(); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } return DateUtils.compare(startTimeMax, maxExpirationTime) ? maxExpirationTime : startTimeMax; } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClient.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClient.java index 114f5c1640..dc208c2763 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClient.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/log/LogClient.java @@ -46,16 +46,14 @@ import java.util.List; import javax.annotation.Nullable; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @Service +@Slf4j public class LogClient implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(LogClient.class); - private static final byte[] EMPTY_BYTE_ARRAY = new byte[0]; private final NettyRemotingClient client; @@ -77,7 +75,7 @@ public class LogClient implements AutoCloseable { * @return log content */ public String rollViewLog(String host, int port, String path, int skipLineNum, int limit) { - logger.info("Roll view log from host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, + log.info("Roll view log from host : {}, port : {}, path {}, skipLineNum {} ,limit {}", host, port, path, skipLineNum, limit); RollViewLogRequestCommand request = new RollViewLogRequestCommand(path, skipLineNum, limit); final Host address = new Host(host, port); @@ -92,12 +90,12 @@ public class LogClient implements AutoCloseable { return "Roll view log response is null"; } catch (InterruptedException ex) { Thread.currentThread().interrupt(); - logger.error( + log.error( "Roll view log from host : {}, port : {}, path {}, skipLineNum {} ,limit {} error, the current thread has been interrupted", host, port, path, skipLineNum, limit, ex); return "Roll view log error: " + ex.getMessage(); } catch (Exception e) { - logger.error("Roll view log from host : {}, port : {}, path {}, skipLineNum {} ,limit {} error", host, port, + log.error("Roll view log from host : {}, port : {}, path {}, skipLineNum {} ,limit {} error", host, port, path, skipLineNum, limit, e); return "Roll view log error: " + e.getMessage(); } @@ -112,7 +110,7 @@ public class LogClient implements AutoCloseable { * @return log content */ public String viewLog(String host, int port, String path) { - logger.info("View log from host: {}, port: {}, logPath: {}", host, port, path); + log.info("View log from host: {}, port: {}, logPath: {}", host, port, path); ViewLogRequestCommand request = new ViewLogRequestCommand(path); final Host address = new Host(host, port); try { @@ -130,11 +128,11 @@ public class LogClient implements AutoCloseable { } } catch (InterruptedException ex) { Thread.currentThread().interrupt(); - logger.error("View log from host: {}, port: {}, logPath: {} error, the current thread has been interrupted", + log.error("View log from host: {}, port: {}, logPath: {} error, the current thread has been interrupted", host, port, path, ex); return "View log error: " + ex.getMessage(); } catch (Exception e) { - logger.error("View log from host: {}, port: {}, logPath: {} error", host, port, path, e); + log.error("View log from host: {}, port: {}, logPath: {} error", host, port, path, e); return "View log error: " + e.getMessage(); } } @@ -148,7 +146,7 @@ public class LogClient implements AutoCloseable { * @return log content bytes */ public byte[] getLogBytes(String host, int port, String path) { - logger.info("Get log bytes from host: {}, port: {}, logPath {}", host, port, path); + log.info("Get log bytes from host: {}, port: {}, logPath {}", host, port, path); GetLogBytesRequestCommand request = new GetLogBytesRequestCommand(path); final Host address = new Host(host, port); try { @@ -162,12 +160,12 @@ public class LogClient implements AutoCloseable { return EMPTY_BYTE_ARRAY; } catch (InterruptedException ex) { Thread.currentThread().interrupt(); - logger.error( + log.error( "Get logSize from host: {}, port: {}, logPath: {} error, the current thread has been interrupted", host, port, path, ex); return EMPTY_BYTE_ARRAY; } catch (Exception e) { - logger.error("Get logSize from host: {}, port: {}, logPath: {} error", host, port, path, e); + log.error("Get logSize from host: {}, port: {}, logPath: {} error", host, port, path, e); return EMPTY_BYTE_ARRAY; } } @@ -179,42 +177,42 @@ public class LogClient implements AutoCloseable { * @param path path */ public void removeTaskLog(@NonNull Host host, String path) { - logger.info("Begin remove task log from host: {} logPath {}", host, path); + log.info("Begin remove task log from host: {} logPath {}", host, path); RemoveTaskLogRequestCommand request = new RemoveTaskLogRequestCommand(path); try { Command command = request.convert2Command(); client.sendAsync(host, command, LOG_REQUEST_TIMEOUT, responseFuture -> { if (responseFuture.getCause() != null) { - logger.error("Remove task log from host: {} logPath {} error, meet an unknown exception", host, + log.error("Remove task log from host: {} logPath {} error, meet an unknown exception", host, path, responseFuture.getCause()); return; } Command response = responseFuture.getResponseCommand(); if (response == null) { - logger.error("Remove task log from host: {} logPath {} error, response is null", host, path); + log.error("Remove task log from host: {} logPath {} error, response is null", host, path); return; } RemoveTaskLogResponseCommand removeTaskLogResponse = JSONUtils.parseObject(response.getBody(), RemoveTaskLogResponseCommand.class); if (removeTaskLogResponse.getStatus()) { - logger.info("Success remove task log from host: {} logPath {}", host, path); + log.info("Success remove task log from host: {} logPath {}", host, path); } else { - logger.error("Remove task log from host: {} logPath {} error", host, path); + log.error("Remove task log from host: {} logPath {} error", host, path); } }); } catch (InterruptedException interruptedException) { Thread.currentThread().interrupt(); - logger.error("Remove task log from host: {} logPath {} error, the current thread has been interrupted", + log.error("Remove task log from host: {} logPath {} error, the current thread has been interrupted", host, path, interruptedException); } catch (Exception e) { - logger.error("Remove task log from host: {}, logPath: {} error", host, path, e); + log.error("Remove task log from host: {}, logPath: {} error", host, path, e); } } public @Nullable List getAppIds(@NonNull String host, int port, @NonNull String taskLogFilePath, @NonNull String taskAppInfoPath) throws RemotingException, InterruptedException { - logger.info("Begin to get appIds from worker: {}:{} taskLogPath: {}, taskAppInfoPath: {}", host, port, + log.info("Begin to get appIds from worker: {}:{} taskLogPath: {}, taskAppInfoPath: {}", host, port, taskLogFilePath, taskAppInfoPath); final Host workerAddress = new Host(host, port); List appIds = null; @@ -230,7 +228,7 @@ public class LogClient implements AutoCloseable { appIds = responseCommand.getAppIds(); } } - logger.info("Get appIds: {} from worker: {}:{} taskLogPath: {}, taskAppInfoPath: {}", appIds, host, port, + log.info("Get appIds: {} from worker: {}:{} taskLogPath: {}, taskAppInfoPath: {}", appIds, host, port, taskLogFilePath, taskAppInfoPath); return appIds; } @@ -238,7 +236,7 @@ public class LogClient implements AutoCloseable { @Override public void close() { this.client.close(); - logger.info("LogClientService closed"); + log.info("LogClientService closed"); } } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java index dbb579f984..2863274708 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessServiceImpl.java @@ -163,8 +163,8 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; @@ -180,10 +180,9 @@ import com.google.common.collect.Lists; * process relative dao that some mappers in this. */ @Component +@Slf4j public class ProcessServiceImpl implements ProcessService { - private final Logger logger = LoggerFactory.getLogger(getClass()); - @Autowired private UserMapper userMapper; @@ -324,7 +323,7 @@ public class ProcessServiceImpl implements ProcessService { ProcessInstance processInstance = constructProcessInstance(command, host); // cannot construct process instance, return null if (processInstance == null) { - logger.error("scan command, command parameter is error: {}", command); + log.error("scan command, command parameter is error: {}", command); commandService.moveToErrorCommand(command, "process instance is null"); return null; } @@ -404,7 +403,7 @@ public class ProcessServiceImpl implements ProcessService { Host host = new Host(info.getHost()); stateEventCallbackService.sendResult(host, workflowStateEventChangeCommand.convert2Command()); } catch (Exception e) { - logger.error("sendResultError", e); + log.error("sendResultError", e); } } } @@ -571,7 +570,7 @@ public class ProcessServiceImpl implements ProcessService { if (CollectionUtils.isNotEmpty(complementDateList)) { scheduleTime = complementDateList.get(0); } else { - logger.error("set scheduler time error: complement date list is empty, command: {}", + log.error("set scheduler time error: complement date list is empty, command: {}", command.toString()); } } @@ -743,7 +742,7 @@ public class ProcessServiceImpl implements ProcessService { if (cmdParam == null || !cmdParam.containsKey(CommandKeyConstants.CMD_PARAM_START_NODES) || cmdParam.get(CommandKeyConstants.CMD_PARAM_START_NODES).isEmpty()) { - logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); + log.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType()); return false; } } @@ -766,7 +765,7 @@ public class ProcessServiceImpl implements ProcessService { processDefinition = this.findProcessDefinition(command.getProcessDefinitionCode(), command.getProcessDefinitionVersion()); if (processDefinition == null) { - logger.error("cannot find the work process define! define code : {}", command.getProcessDefinitionCode()); + log.error("cannot find the work process define! define code : {}", command.getProcessDefinitionCode()); throw new IllegalArgumentException("Cannot find the process definition for this workflowInstance"); } Map cmdParam = JSONUtils.toMap(command.getCommandParam()); @@ -816,7 +815,7 @@ public class ProcessServiceImpl implements ProcessService { processInstance.setCommandParam(command.getCommandParam()); } if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) { - logger.error("command parameter check failed!"); + log.error("command parameter check failed!"); return null; } if (command.getScheduleTime() != null) { @@ -1050,7 +1049,7 @@ public class ProcessServiceImpl implements ProcessService { .setVarPool(joinVarPool(parentInstance.getVarPool(), subProcessInstance.getVarPool())); processInstanceDao.upsertProcessInstance(subProcessInstance); } else { - logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam); + log.error("sub process command params error, cannot find parent instance: {} ", cmdParam); } } ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class); @@ -1145,13 +1144,13 @@ public class ProcessServiceImpl implements ProcessService { if (task != null && task.getId() != null) { break; } - logger.error( + log.error( "task commit to db failed , taskCode: {} has already retry {} times, please check the database", taskInstance.getTaskCode(), retryTimes); Thread.sleep(commitInterval); } catch (Exception e) { - logger.error("task commit to db failed", e); + log.error("task commit to db failed", e); } finally { retryTimes += 1; } @@ -1171,14 +1170,14 @@ public class ProcessServiceImpl implements ProcessService { @Override @Transactional public TaskInstance submitTask(ProcessInstance processInstance, TaskInstance taskInstance) { - logger.info("Start save taskInstance to database : {}, processInstance id:{}, state: {}", + log.info("Start save taskInstance to database : {}, processInstance id:{}, state: {}", taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState()); // submit to db TaskInstance task = taskInstanceDao.submitTaskInstanceToDB(taskInstance, processInstance); if (task == null) { - logger.error("Save taskInstance to db error, task name:{}, process id:{} state: {} ", + log.error("Save taskInstance to db error, task name:{}, process id:{} state: {} ", taskInstance.getName(), taskInstance.getProcessInstance().getId(), processInstance.getState()); @@ -1189,7 +1188,7 @@ public class ProcessServiceImpl implements ProcessService { createSubWorkProcess(processInstance, task); } - logger.info( + log.info( "End save taskInstance to db successfully:{}, taskInstanceName: {}, taskInstance state:{}, processInstanceId:{}, processInstanceState: {}", task.getId(), task.getName(), @@ -1255,7 +1254,7 @@ public class ProcessServiceImpl implements ProcessService { } } } - logger.info("sub process instance is not found,parent task:{},parent instance:{}", + log.info("sub process instance is not found,parent task:{},parent instance:{}", parentTask.getId(), parentProcessInstance.getId()); return null; } @@ -1286,19 +1285,19 @@ public class ProcessServiceImpl implements ProcessService { } if (childInstance != null && childInstance.getState() == WorkflowExecutionStatus.SUCCESS && CommandType.START_FAILURE_TASK_PROCESS == parentProcessInstance.getCommandType()) { - logger.info("sub process instance {} status is success, so skip creating command", childInstance.getId()); + log.info("sub process instance {} status is success, so skip creating command", childInstance.getId()); return; } Command subProcessCommand = commandService.createSubProcessCommand(parentProcessInstance, childInstance, instanceMap, task); if (subProcessCommand == null) { - logger.error("create sub process command failed, so skip creating command"); + log.error("create sub process command failed, so skip creating command"); return; } updateSubProcessDefinitionByParent(parentProcessInstance, subProcessCommand.getProcessDefinitionCode()); initSubInstanceState(childInstance); commandService.createCommand(subProcessCommand); - logger.info("sub process command created: {} ", subProcessCommand); + log.info("sub process command created: {} ", subProcessCommand); } /** @@ -1411,7 +1410,7 @@ public class ProcessServiceImpl implements ProcessService { if (res != null) { String resourceFullName = res.getResourceName(); if (StringUtils.isBlank(resourceFullName)) { - logger.error("invalid resource full name, {}", resourceFullName); + log.error("invalid resource full name, {}", resourceFullName); return new ResourceInfo(); } resourceInfo = new ResourceInfo(); @@ -1422,7 +1421,7 @@ public class ProcessServiceImpl implements ProcessService { resourceInfo.setRes(res.getRes()); resourceInfo.setResourceName(resourceFullName); } - logger.info("updated resource info {}", + log.info("updated resource info {}", JSONUtils.toJsonString(resourceInfo)); } return resourceInfo; @@ -2453,7 +2452,7 @@ public class ProcessServiceImpl implements ProcessService { taskGroupPriority, TaskGroupQueueStatus.WAIT_QUEUE); } else { - logger.info("The task queue is already exist, taskId: {}", taskInstanceId); + log.info("The task queue is already exist, taskId: {}", taskInstanceId); if (taskGroupQueue.getStatus() == TaskGroupQueueStatus.ACQUIRE_SUCCESS) { return true; } @@ -2472,10 +2471,10 @@ public class ProcessServiceImpl implements ProcessService { // try to get taskGroup int count = taskGroupMapper.selectAvailableCountById(taskGroupId); if (count == 1 && robTaskGroupResource(taskGroupQueue)) { - logger.info("Success acquire taskGroup, taskInstanceId: {}, taskGroupId: {}", taskInstanceId, taskGroupId); + log.info("Success acquire taskGroup, taskInstanceId: {}, taskGroupId: {}", taskInstanceId, taskGroupId); return true; } - logger.info("Failed to acquire taskGroup, taskInstanceId: {}, taskGroupId: {}", taskInstanceId, taskGroupId); + log.info("Failed to acquire taskGroup, taskInstanceId: {}, taskGroupId: {}", taskInstanceId, taskGroupId); this.taskGroupQueueMapper.updateInQueue(Flag.NO.getCode(), taskGroupQueue.getId()); return false; } @@ -2489,7 +2488,7 @@ public class ProcessServiceImpl implements ProcessService { for (int i = 0; i < 10; i++) { TaskGroup taskGroup = taskGroupMapper.selectById(taskGroupQueue.getGroupId()); if (taskGroup.getGroupSize() <= taskGroup.getUseSize()) { - logger.info("The current task Group is full, taskGroup: {}", taskGroup); + log.info("The current task Group is full, taskGroup: {}", taskGroup); return false; } int affectedCount = taskGroupMapper.robTaskGroupResource(taskGroup.getId(), @@ -2497,7 +2496,7 @@ public class ProcessServiceImpl implements ProcessService { taskGroupQueue.getId(), TaskGroupQueueStatus.WAIT_QUEUE.getCode()); if (affectedCount > 0) { - logger.info("Success rob taskGroup, taskInstanceId: {}, taskGroupId: {}", taskGroupQueue.getTaskId(), + log.info("Success rob taskGroup, taskInstanceId: {}, taskGroupId: {}", taskGroupQueue.getTaskId(), taskGroupQueue.getId()); taskGroupQueue.setStatus(TaskGroupQueueStatus.ACQUIRE_SUCCESS); this.taskGroupQueueMapper.updateById(taskGroupQueue); @@ -2505,7 +2504,7 @@ public class ProcessServiceImpl implements ProcessService { return true; } } - logger.info("Failed to rob taskGroup, taskGroupQueue: {}", taskGroupQueue); + log.info("Failed to rob taskGroup, taskGroupQueue: {}", taskGroupQueue); return false; } @@ -2528,21 +2527,21 @@ public class ProcessServiceImpl implements ProcessService { TaskGroup taskGroup; TaskGroupQueue thisTaskGroupQueue; - logger.info("Begin to release task group: {}", taskInstance.getTaskGroupId()); + log.info("Begin to release task group: {}", taskInstance.getTaskGroupId()); try { do { taskGroup = taskGroupMapper.selectById(taskInstance.getTaskGroupId()); if (taskGroup == null) { - logger.error("The taskGroup is null, taskGroupId: {}", taskInstance.getTaskGroupId()); + log.error("The taskGroup is null, taskGroupId: {}", taskInstance.getTaskGroupId()); return null; } thisTaskGroupQueue = this.taskGroupQueueMapper.queryByTaskId(taskInstance.getId()); if (thisTaskGroupQueue.getStatus() == TaskGroupQueueStatus.RELEASE) { - logger.info("The taskGroupQueue's status is release, taskInstanceId: {}", taskInstance.getId()); + log.info("The taskGroupQueue's status is release, taskInstanceId: {}", taskInstance.getId()); return null; } if (thisTaskGroupQueue.getStatus() == TaskGroupQueueStatus.WAIT_QUEUE) { - logger.info("The taskGroupQueue's status is in waiting, will not need to release task group"); + log.info("The taskGroupQueue's status is in waiting, will not need to release task group"); break; } } while (thisTaskGroupQueue.getForceStart() == Flag.NO.getCode() @@ -2551,12 +2550,12 @@ public class ProcessServiceImpl implements ProcessService { thisTaskGroupQueue.getId(), TaskGroupQueueStatus.ACQUIRE_SUCCESS.getCode()) != 1); } catch (Exception e) { - logger.error("release the task group error", e); + log.error("release the task group error", e); return null; } - logger.info("Finished to release task group, taskGroupId: {}", taskInstance.getTaskGroupId()); + log.info("Finished to release task group, taskGroupId: {}", taskInstance.getTaskGroupId()); - logger.info("Begin to release task group queue, taskGroupId: {}", taskInstance.getTaskGroupId()); + log.info("Begin to release task group queue, taskGroupId: {}", taskInstance.getTaskGroupId()); changeTaskGroupQueueStatus(taskInstance.getId(), TaskGroupQueueStatus.RELEASE); TaskGroupQueue taskGroupQueue; do { @@ -2565,13 +2564,13 @@ public class ProcessServiceImpl implements ProcessService { Flag.NO.getCode(), Flag.NO.getCode()); if (taskGroupQueue == null) { - logger.info("The taskGroupQueue is null, taskGroup: {}", taskGroup.getId()); + log.info("The taskGroupQueue is null, taskGroup: {}", taskGroup.getId()); return null; } } while (this.taskGroupQueueMapper.updateInQueueCAS(Flag.NO.getCode(), Flag.YES.getCode(), taskGroupQueue.getId()) != 1); - logger.info("Finished to release task group queue: taskGroupId: {}, taskGroupQueueId: {}", + log.info("Finished to release task group queue: taskGroupId: {}, taskGroupQueueId: {}", taskInstance.getTaskGroupId(), taskGroupQueue.getId()); return this.taskInstanceMapper.selectById(taskGroupQueue.getTaskId()); } @@ -2637,7 +2636,7 @@ public class ProcessServiceImpl implements ProcessService { processInstance.getId(), taskId); Host host = new Host(processInstance.getHost()); stateEventCallbackService.sendResult(host, taskEventChangeCommand.convert2Command(taskType)); - logger.info("Success send command to master: {}, command: {}", host, taskEventChangeCommand); + log.info("Success send command to master: {}, command: {}", host, taskEventChangeCommand); } @Override diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java index 3781716685..e00212823a 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/CommonUtils.java @@ -27,16 +27,14 @@ import org.apache.commons.lang3.StringUtils; import java.net.URL; import java.nio.charset.StandardCharsets; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * common utils */ +@Slf4j public class CommonUtils { - private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class); - private static final Base64 BASE64 = new Base64(); protected CommonUtils() { @@ -53,7 +51,7 @@ public class CommonUtils { if (envDefaultPath != null) { envPath = envDefaultPath.getPath(); - logger.debug("env path :{}", envPath); + log.debug("env path :{}", envPath); } else { envPath = "/etc/profile"; } @@ -101,7 +99,7 @@ public class CommonUtils { DataSourceConstants.DATASOURCE_ENCRYPTION_SALT_DEFAULT); String passwordWithSalt = new String(BASE64.decode(password), StandardCharsets.UTF_8); if (!passwordWithSalt.startsWith(salt)) { - logger.warn("There is a password and salt mismatch: {} ", password); + log.warn("There is a password and salt mismatch: {} ", password); return password; } return new String(BASE64.decode(passwordWithSalt.substring(salt.length())), StandardCharsets.UTF_8); diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java index 403e212ee6..0f47466153 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/DagHelper.java @@ -42,16 +42,14 @@ import java.util.Map; import java.util.Optional; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * dag tools */ +@Slf4j public class DagHelper { - private static final Logger logger = LoggerFactory.getLogger(DagHelper.class); - /** * generate flow node relation list by task node list; * Edges that are not in the task Node List will not be added to the result @@ -92,7 +90,7 @@ public class DagHelper { List startNodeList = startNodeNameList; if (taskDependType != TaskDependType.TASK_POST && CollectionUtils.isEmpty(startNodeList)) { - logger.error("start node list is empty! cannot continue run the process "); + log.error("start node list is empty! cannot continue run the process "); return destFlowNodeList; } @@ -112,7 +110,7 @@ public class DagHelper { TaskNode startNode = findNodeByCode(taskNodeList, startNodeCode); List childNodeList = new ArrayList<>(); if (startNode == null) { - logger.error("start node name [{}] is not in task node list [{}] ", + log.error("start node name [{}] is not in task node list [{}] ", startNodeCode, taskNodeList); continue; @@ -316,7 +314,7 @@ public class DagHelper { for (String subsequent : startVertexes) { TaskNode taskNode = dag.getNode(subsequent); if (taskNode == null) { - logger.error("taskNode {} is null, please check dag", subsequent); + log.error("taskNode {} is null, please check dag", subsequent); continue; } if (isTaskNodeNeedSkip(taskNode, skipTaskNodeList)) { diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/LoggerUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/LoggerUtils.java index af96dc97fa..b8ac5647c2 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/LoggerUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/LoggerUtils.java @@ -29,9 +29,8 @@ import java.io.InputStreamReader; import java.util.Date; import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.slf4j.MDC; /** @@ -39,10 +38,9 @@ import org.slf4j.MDC; */ @Deprecated @UtilityClass +@Slf4j public class LoggerUtils { - private static final Logger logger = LoggerFactory.getLogger(LoggerUtils.class); - public static String buildTaskId(Date firstSubmitTime, Long processDefineCode, int processDefineVersion, @@ -70,7 +68,7 @@ public class LoggerUtils { } return sb.toString(); } catch (IOException e) { - logger.error("read file error", e); + log.error("read file error", e); } return ""; } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java index 52d4fd6148..78f5cb44a6 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/utils/ProcessUtils.java @@ -40,17 +40,16 @@ import java.util.regex.Pattern; import javax.annotation.Nullable; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * mainly used to get the start command line of a process. */ +@Slf4j public class ProcessUtils { - private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); - /** * Initialization regularization, solve the problem of pre-compilation performance, * avoid the thread safety problem of multi-thread operation @@ -66,7 +65,7 @@ public class ProcessUtils { * get kerberos init command */ static String getKerberosInitCommand() { - logger.info("get kerberos init command"); + log.info("get kerberos init command"); StringBuilder kerberosCommandBuilder = new StringBuilder(); boolean hadoopKerberosState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); @@ -78,7 +77,7 @@ public class ProcessUtils { PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH), PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME))) .append("\n\n"); - logger.info("kerberos init command: {}", kerberosCommandBuilder); + log.info("kerberos init command: {}", kerberosCommandBuilder); } return kerberosCommandBuilder.toString(); } @@ -114,10 +113,10 @@ public class ProcessUtils { String runCmd = String.format("%s %s", Constants.SH, commandFile); runCmd = OSUtils.getSudoCmd(tenantCode, runCmd); - logger.info("kill cmd:{}", runCmd); + log.info("kill cmd:{}", runCmd); OSUtils.exeCmd(runCmd); } catch (Exception e) { - logger.error(String.format("Kill yarn application app id [%s] failed: [%s]", appId, e.getMessage())); + log.error(String.format("Kill yarn application app id [%s] failed: [%s]", appId, e.getMessage())); } } @@ -184,16 +183,16 @@ public class ProcessUtils { taskExecutionContext.getTaskInstanceId())); } FileUtils.createWorkDirIfAbsent(taskExecutionContext.getExecutePath()); - org.apache.dolphinscheduler.plugin.task.api.utils.ProcessUtils.cancelApplication(appIds, logger, + org.apache.dolphinscheduler.plugin.task.api.utils.ProcessUtils.cancelApplication(appIds, log, taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); return appIds; } else { - logger.info("The current appId is empty, don't need to kill the yarn job, taskInstanceId: {}", + log.info("The current appId is empty, don't need to kill the yarn job, taskInstanceId: {}", taskExecutionContext.getTaskInstanceId()); } } catch (Exception e) { - logger.error("Kill yarn job failure, taskInstanceId: {}", taskExecutionContext.getTaskInstanceId(), e); + log.error("Kill yarn job failure, taskInstanceId: {}", taskExecutionContext.getTaskInstanceId(), e); } return Collections.emptyList(); } diff --git a/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java b/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java index 1a95ff53ce..e6f0bc5ee5 100644 --- a/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java +++ b/dolphinscheduler-standalone-server/src/main/java/org/apache/dolphinscheduler/StandaloneServer.java @@ -22,9 +22,8 @@ import org.apache.curator.test.TestingServer; import java.io.IOException; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.context.event.ApplicationFailedEvent; @@ -33,10 +32,9 @@ import org.springframework.context.ApplicationListener; import org.springframework.context.event.ContextClosedEvent; @SpringBootApplication +@Slf4j public class StandaloneServer implements ApplicationListener { - private static final Logger logger = LoggerFactory.getLogger(StandaloneServer.class); - private static TestingServer zookeeperServer; public static void main(String[] args) throws Exception { @@ -50,9 +48,9 @@ public class StandaloneServer implements ApplicationListener { if (event instanceof ApplicationFailedEvent || event instanceof ContextClosedEvent) { try (TestingServer closedServer = zookeeperServer) { // close the zookeeper server - logger.info("Receive spring context close event: {}, will closed zookeeper server", event); + log.info("Receive spring context close event: {}, will closed zookeeper server", event); } catch (IOException e) { - logger.error("Close zookeeper server error", e); + log.error("Close zookeeper server error", e); } } } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java index f38565af5d..80edb15455 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-hdfs/src/main/java/org/apache/dolphinscheduler/plugin/storage/hdfs/HdfsStorageOperator.java @@ -65,8 +65,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.base.Joiner; @@ -74,9 +73,9 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; +@Slf4j public class HdfsStorageOperator implements Closeable, StorageOperate { - private static final Logger logger = LoggerFactory.getLogger(HdfsStorageOperator.class); private static HdfsStorageProperties hdfsProperties; private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY"; @@ -122,7 +121,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { fs.mkdirs(path); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } } @@ -146,13 +145,13 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { configuration.set(Constants.HDFS_DEFAULT_FS, defaultFS); fsRelatedProps.forEach((key, value) -> configuration.set(key, value)); } else { - logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULT_FS); + log.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULT_FS); throw new NullPointerException( String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULT_FS)); } if (!defaultFS.startsWith("file")) { - logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULT_FS, + log.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULT_FS, defaultFS); } @@ -163,12 +162,12 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { return true; }); } else { - logger.warn("resource.hdfs.root.user is not set value!"); + log.warn("resource.hdfs.root.user is not set value!"); fs = FileSystem.get(configuration); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } } @@ -209,7 +208,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { if (StringUtils.isBlank(appUrl)) { throw new BaseException("yarn application url generation failed"); } - logger.debug("yarn application url:{}, applicationId:{}", appUrl, applicationId); + log.debug("yarn application url:{}, applicationId:{}", appUrl, applicationId); return String.format(appUrl, hdfsProperties.getHadoopResourceManagerHttpAddressPort(), applicationId); } @@ -229,7 +228,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { public byte[] catFile(String hdfsFilePath) throws IOException { if (StringUtils.isBlank(hdfsFilePath)) { - logger.error("hdfs file path:{} is blank", hdfsFilePath); + log.error("hdfs file path:{} is blank", hdfsFilePath); return new byte[0]; } @@ -250,7 +249,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { public List catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException { if (StringUtils.isBlank(hdfsFilePath)) { - logger.error("hdfs file path:{} is blank", hdfsFilePath); + log.error("hdfs file path:{} is blank", hdfsFilePath); return Collections.emptyList(); } @@ -402,7 +401,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { Files.delete(dstPath.toPath()); } } else { - logger.error("destination file must be a file"); + log.error("destination file must be a file"); } } @@ -741,7 +740,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { try { fs.close(); } catch (IOException e) { - logger.error("Close HadoopUtils instance failed", e); + log.error("Close HadoopUtils instance failed", e); throw new IOException("Close HadoopUtils instance failed", e); } } @@ -779,7 +778,7 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { } } catch (Exception e) { - logger.error("yarn ha application url generation failed, message:{}", e.getMessage()); + log.error("yarn ha application url generation failed, message:{}", e.getMessage()); } return null; } @@ -850,11 +849,11 @@ public class HdfsStorageOperator implements Closeable, StorageOperate { storageEntityList.addAll(tempList); } catch (FileNotFoundException e) { - logger.error("Resource path: {}", pathToExplore, e); + log.error("Resource path: {}", pathToExplore, e); // return the resources fetched before error occurs. return storageEntityList; } catch (IOException e) { - logger.error("Resource path: {}", pathToExplore, e); + log.error("Resource path: {}", pathToExplore, e); // return the resources fetched before error occurs. return storageEntityList; } diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java index dcb7d32567..523b021358 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-oss/src/main/java/org/apache/dolphinscheduler/plugin/storage/oss/OssStorageOperator.java @@ -54,9 +54,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Data; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.aliyun.oss.OSS; import com.aliyun.oss.OSSException; @@ -71,10 +69,9 @@ import com.aliyun.oss.model.ObjectMetadata; import com.aliyun.oss.model.PutObjectRequest; @Data +@Slf4j public class OssStorageOperator implements Closeable, StorageOperate { - private static final Logger logger = LoggerFactory.getLogger(OssStorageOperator.class); - private String accessKeyId; private String accessKeySecret; @@ -208,7 +205,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { try { ossClient.deleteObjects(deleteObjectsRequest); } catch (Exception e) { - logger.error("delete objects error", e); + log.error("delete objects error", e); return false; } @@ -236,7 +233,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { } catch (OSSException e) { throw new IOException(e); } catch (FileNotFoundException e) { - logger.error("cannot fin the destination file {}", dstFilePath); + log.error("cannot fin the destination file {}", dstFilePath); throw e; } } @@ -252,7 +249,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { ossClient.deleteObject(bucketName, filePath); return true; } catch (OSSException e) { - logger.error("fail to delete the object, the resource path is {}", filePath, e); + log.error("fail to delete the object, the resource path is {}", filePath, e); return false; } } @@ -283,7 +280,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { ossClient.putObject(bucketName, dstPath, new File(srcFile)); return true; } catch (OSSException e) { - logger.error("upload failed, the bucketName is {}, the filePath is {}", bucketName, dstPath, e); + log.error("upload failed, the bucketName is {}, the filePath is {}", bucketName, dstPath, e); return false; } } @@ -291,7 +288,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { @Override public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { if (StringUtils.isBlank(filePath)) { - logger.error("file path:{} is empty", filePath); + log.error("file path:{} is empty", filePath); return Collections.emptyList(); } OSSObject ossObject = ossClient.getObject(bucketName, filePath); @@ -316,7 +313,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { try { initialEntity = getFileStatus(path, defaultPath, tenantCode, type); } catch (Exception e) { - logger.error("error while listing files status recursively, path: {}", path, e); + log.error("error while listing files status recursively, path: {}", path, e); return storageEntityList; } foldersToFetch.add(initialEntity); @@ -332,7 +329,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { } storageEntityList.addAll(tempList); } catch (Exception e) { - logger.error("error while listing files status recursively, path: {}", pathToExplore, e); + log.error("error while listing files stat:wus recursively, path: {}", pathToExplore, e); } } @@ -518,7 +515,7 @@ public class OssStorageOperator implements Closeable, StorageOperate { "bucketName: " + bucketName + " does not exist, you need to create them by yourself"); }); - logger.info("bucketName: {} has been found, the current regionName is {}", existsBucket.getName(), region); + log.info("bucketName: {} has been found, the current regionName is {}", existsBucket.getName(), region); } protected void deleteDir(String directoryName) { diff --git a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java index b68ce663a3..e9600a0737 100644 --- a/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java +++ b/dolphinscheduler-storage-plugin/dolphinscheduler-storage-s3/src/main/java/org/apache/dolphinscheduler/plugin/storage/s3/S3StorageOperator.java @@ -53,8 +53,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.AWSStaticCredentialsProvider; @@ -78,10 +77,9 @@ import com.amazonaws.services.s3.transfer.TransferManager; import com.amazonaws.services.s3.transfer.TransferManagerBuilder; import com.google.common.base.Joiner; +@Slf4j public class S3StorageOperator implements Closeable, StorageOperate { - private static final Logger logger = LoggerFactory.getLogger(S3StorageOperator.class); - // todo: move to s3 private static final String ACCESS_KEY_ID = PropertyUtils.getString(TaskConstants.AWS_ACCESS_KEY_ID); @@ -205,7 +203,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { } catch (AmazonServiceException e) { throw new IOException(e.getMessage()); } catch (FileNotFoundException e) { - logger.error("the destination file {} not found", dstFilePath); + log.error("the destination file {} not found", dstFilePath); throw e; } } @@ -221,7 +219,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { s3Client.deleteObject(BUCKET_NAME, fullName); return true; } catch (AmazonServiceException e) { - logger.error("delete the object error,the resource path is {}", fullName); + log.error("delete the object error,the resource path is {}", fullName); return false; } } @@ -236,7 +234,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { try { s3Client.deleteObjects(deleteObjectsRequest); } catch (AmazonServiceException e) { - logger.error("delete objects error", e); + log.error("delete objects error", e); return false; } @@ -270,7 +268,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { s3Client.putObject(BUCKET_NAME, dstPath, new File(srcFile)); return true; } catch (AmazonServiceException e) { - logger.error("upload failed,the bucketName is {},the filePath is {}", BUCKET_NAME, dstPath); + log.error("upload failed,the bucketName is {},the filePath is {}", BUCKET_NAME, dstPath); return false; } } @@ -278,7 +276,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { @Override public List vimFile(String tenantCode, String filePath, int skipLineNums, int limit) throws IOException { if (StringUtils.isBlank(filePath)) { - logger.error("file path:{} is blank", filePath); + log.error("file path:{} is blank", filePath); return Collections.emptyList(); } S3Object s3Object = s3Client.getObject(BUCKET_NAME, filePath); @@ -366,7 +364,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { tm.downloadDirectory(BUCKET_NAME, tenantCode + FOLDER_SEPARATOR + keyPrefix, new File(srcPath)); download.waitForCompletion(); } catch (AmazonS3Exception | InterruptedException e) { - logger.error("download the directory failed with the bucketName is {} and the keyPrefix is {}", BUCKET_NAME, + log.error("download the directory failed with the bucketName is {} and the keyPrefix is {}", BUCKET_NAME, tenantCode + FOLDER_SEPARATOR + keyPrefix); Thread.currentThread().interrupt(); } finally { @@ -389,7 +387,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { "bucketName: " + bucketName + " is not exists, you need to create them by yourself"); }); - logger.info("bucketName: {} has been found, the current regionName is {}", existsBucket.getName(), + log.info("bucketName: {} has been found, the current regionName is {}", existsBucket.getName(), s3Client.getRegionName()); } @@ -417,7 +415,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { try { initialEntity = getFileStatus(path, defaultPath, tenantCode, type); } catch (Exception e) { - logger.error("error while listing files status recursively, path: {}", path, e); + log.error("error while listing files status recursively, path: {}", path, e); return storageEntityList; } foldersToFetch.add(initialEntity); @@ -433,7 +431,7 @@ public class S3StorageOperator implements Closeable, StorageOperate { } storageEntityList.addAll(tempList); } catch (Exception e) { - logger.error("error while listing files status recursively, path: {}", pathToExplore, e); + log.error("error while listing files status recursively, path: {}", pathToExplore, e); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractShell.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractShell.java index 98e33761b6..0f5fba1a19 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractShell.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractShell.java @@ -28,8 +28,7 @@ import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * A base class for running a Unix command. @@ -38,10 +37,9 @@ import org.slf4j.LoggerFactory; * df. It also offers facilities to gate commands by * time-intervals. */ +@Slf4j public abstract class AbstractShell { - private static final Logger logger = LoggerFactory.getLogger(AbstractShell.class); - /** * Time after which the executing script would be timedout */ @@ -173,7 +171,7 @@ public abstract class AbstractShell { line = errReader.readLine(); } } catch (IOException ioe) { - logger.warn("Error reading the error stream", ioe); + log.warn("Error reading the error stream", ioe); } } }; @@ -184,7 +182,7 @@ public abstract class AbstractShell { try { parseExecResult(inReader); } catch (IOException ioe) { - logger.warn("Error reading the in stream", ioe); + log.warn("Error reading the in stream", ioe); } super.run(); } @@ -193,7 +191,7 @@ public abstract class AbstractShell { errThread.start(); inThread.start(); } catch (IllegalStateException e) { - logger.error(" read error and input streams start error", e); + log.error(" read error and input streams start error", e); } try { // parse the output @@ -203,7 +201,7 @@ public abstract class AbstractShell { errThread.join(); inThread.join(); } catch (InterruptedException ie) { - logger.warn("Interrupted while reading the error and in stream", ie); + log.warn("Interrupted while reading the error and in stream", ie); } completed.compareAndSet(false, true); // the timeout thread handling @@ -221,7 +219,7 @@ public abstract class AbstractShell { try { inReader.close(); } catch (IOException ioe) { - logger.warn("Error while closing the input stream", ioe); + log.warn("Error while closing the input stream", ioe); } if (!completed.get()) { errThread.interrupt(); @@ -229,7 +227,7 @@ public abstract class AbstractShell { try { errReader.close(); } catch (IOException ioe) { - logger.warn("Error while closing the error stream", ioe); + log.warn("Error while closing the error stream", ioe); } ProcessContainer.removeProcess(process); process.destroy(); @@ -353,11 +351,11 @@ public abstract class AbstractShell { try { entry.getValue().destroy(); } catch (Exception e) { - logger.error("Destroy All Processes error", e); + log.error("Destroy All Processes error", e); } } - logger.info("close " + set.size() + " executing process tasks"); + log.info("close " + set.size() + " executing process tasks"); } } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTask.java index 89b80a4f83..b012a5fff3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractTask.java @@ -40,7 +40,7 @@ public abstract class AbstractTask { public static final Marker FINALIZE_SESSION_MARKER = MarkerFactory.getMarker("FINALIZE_SESSION"); - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); public String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*"; @@ -199,13 +199,13 @@ public abstract class AbstractTask { public void logHandle(LinkedBlockingQueue logs) { // note that the "new line" is added here to facilitate log parsing if (logs.contains(FINALIZE_SESSION_MARKER.toString())) { - logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString()); + log.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString()); } else { StringJoiner joiner = new StringJoiner("\n\t"); while (!logs.isEmpty()) { joiner.add(logs.poll()); } - logger.info(" -> {}", joiner); + log.info(" -> {}", joiner); } } @@ -232,14 +232,14 @@ public abstract class AbstractTask { Property prop = paramsPropsMap.get(paramName); if (prop == null) { - logger.error( + log.error( "setSqlParamsMap: No Property with paramName: {} is found in paramsPropsMap of task instance" + " with id: {}. So couldn't put Property in sqlParamsMap.", paramName, taskInstanceId); } else { sqlParamsMap.put(index, prop); index++; - logger.info( + log.info( "setSqlParamsMap: Property with paramName: {} put in sqlParamsMap of content {} successfully.", paramName, content); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java index 8ef0925825..68993dc9dc 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/AbstractYarnTask.java @@ -52,7 +52,7 @@ public abstract class AbstractYarnTask extends AbstractRemoteTask { super(taskRequest); this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskRequest, - logger); + log); } // todo split handle to submit and track @@ -67,11 +67,11 @@ public abstract class AbstractYarnTask extends AbstractRemoteTask { setProcessId(response.getProcessId()); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); - logger.info("The current yarn task has been interrupted", ex); + log.info("The current yarn task has been interrupted", ex); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw new TaskException("The current yarn task has been interrupted", ex); } catch (Exception e) { - logger.error("yarn process failure", e); + log.error("yarn process failure", e); exitStatusCode = -1; throw new TaskException("Execute task failed", e); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java index 72ff327cf7..d16121f150 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/TaskPluginManager.java @@ -33,15 +33,14 @@ import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Component; @Component +@Slf4j public class TaskPluginManager { - private static final Logger logger = LoggerFactory.getLogger(TaskPluginManager.class); - private final Map taskChannelFactoryMap = new HashMap<>(); private final Map taskChannelMap = new HashMap<>(); @@ -52,7 +51,7 @@ public class TaskPluginManager { */ public void loadPlugin() { if (!loadedFlag.compareAndSet(false, true)) { - logger.warn("The task plugin has already been loaded"); + log.warn("The task plugin has already been loaded"); return; } PrioritySPIFactory prioritySPIFactory = new PrioritySPIFactory<>(TaskChannelFactory.class); @@ -60,12 +59,12 @@ public class TaskPluginManager { String factoryName = entry.getKey(); TaskChannelFactory factory = entry.getValue(); - logger.info("Registering task plugin: {} - {}", factoryName, factory.getClass()); + log.info("Registering task plugin: {} - {}", factoryName, factory.getClass()); taskChannelFactoryMap.put(factoryName, factory); taskChannelMap.put(factoryName, factory.create()); - logger.info("Registered task plugin: {} - {}", factoryName, factory.getClass()); + log.info("Registered task plugin: {} - {}", factoryName, factory.getClass()); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTask.java index 9cab4ed419..c4df23cadf 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTask.java @@ -37,7 +37,7 @@ public abstract class AbstractK8sTask extends AbstractRemoteTask { */ protected AbstractK8sTask(TaskExecutionContext taskRequest) { super(taskRequest); - this.abstractK8sTaskExecutor = new K8sTaskExecutor(logger, taskRequest); + this.abstractK8sTaskExecutor = new K8sTaskExecutor(log, taskRequest); } // todo split handle to submit and track diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java index d38064a07d..c7bc47db0a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/AbstractK8sTaskExecutor.java @@ -28,13 +28,13 @@ import org.slf4j.Logger; public abstract class AbstractK8sTaskExecutor { - protected Logger logger; + protected Logger log; protected TaskExecutionContext taskRequest; protected K8sUtils k8sUtils; protected StringBuilder logStringBuffer; - protected AbstractK8sTaskExecutor(Logger logger, TaskExecutionContext taskRequest) { - this.logger = logger; + protected AbstractK8sTaskExecutor(Logger log, TaskExecutionContext taskRequest) { + this.log = log; this.taskRequest = taskRequest; this.k8sUtils = new K8sUtils(); this.logStringBuffer = new StringBuilder(); @@ -52,9 +52,9 @@ public abstract class AbstractK8sTaskExecutor { public void flushLog(TaskResponse taskResponse) { if (logStringBuffer.length() != 0 && taskResponse.getExitStatusCode() == EXIT_CODE_FAILURE) { - logger.error(logStringBuffer.toString()); + log.error(logStringBuffer.toString()); } else if (logStringBuffer.length() != 0) { - logger.info(logStringBuffer.toString()); + log.info(logStringBuffer.toString()); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java index 27a31818f4..5b82f83f94 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/k8s/impl/K8sTaskExecutor.java @@ -155,10 +155,10 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { @Override public void eventReceived(Action action, Job job) { - logger.info("event received : job:{} action:{}", job.getMetadata().getName(), action); + log.info("event received : job:{} action:{}", job.getMetadata().getName(), action); if (action != Action.ADDED) { int jobStatus = getK8sJobStatus(job); - logger.info("job {} status {}", job.getMetadata().getName(), jobStatus); + log.info("job {} status {}", job.getMetadata().getName(), jobStatus); if (jobStatus == TaskConstants.RUNNING_CODE) { return; } @@ -188,11 +188,11 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { } flushLog(taskResponse); } catch (InterruptedException e) { - logger.error("job failed in k8s: {}", e.getMessage(), e); + log.error("job failed in k8s: {}", e.getMessage(), e); Thread.currentThread().interrupt(); taskResponse.setExitStatusCode(EXIT_CODE_FAILURE); } catch (Exception e) { - logger.error("job failed in k8s: {}", e.getMessage(), e); + log.error("job failed in k8s: {}", e.getMessage(), e); taskResponse.setExitStatusCode(EXIT_CODE_FAILURE); } finally { if (watch != null) { @@ -243,14 +243,14 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { K8sTaskMainParameters k8STaskMainParameters = JSONUtils.parseObject(k8sParameterStr, K8sTaskMainParameters.class); try { - logger.info("[K8sJobExecutor-{}-{}] start to submit job", taskName, taskInstanceId); + log.info("[K8sJobExecutor-{}-{}] start to submit job", taskName, taskInstanceId); job = buildK8sJob(k8STaskMainParameters); stopJobOnK8s(k8sParameterStr); String namespaceName = k8STaskMainParameters.getNamespaceName(); k8sUtils.createJob(namespaceName, job); - logger.info("[K8sJobExecutor-{}-{}] submitted job successfully", taskName, taskInstanceId); + log.info("[K8sJobExecutor-{}-{}] submitted job successfully", taskName, taskInstanceId); } catch (Exception e) { - logger.error("[K8sJobExecutor-{}-{}] fail to submit job", taskName, taskInstanceId); + log.error("[K8sJobExecutor-{}-{}] fail to submit job", taskName, taskInstanceId); throw new TaskException("K8sJobExecutor fail to submit job", e); } } @@ -266,7 +266,7 @@ public class K8sTaskExecutor extends AbstractK8sTaskExecutor { k8sUtils.deleteJob(jobName, namespaceName); } } catch (Exception e) { - logger.error("[K8sJobExecutor-{}] fail to stop job", jobName); + log.error("[K8sJobExecutor-{}] fail to stop job", jobName); throw new TaskException("K8sJobExecutor fail to stop job", e); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogDiscriminator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogDiscriminator.java index 7a8db804e4..503d9baf0f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogDiscriminator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogDiscriminator.java @@ -19,19 +19,16 @@ package org.apache.dolphinscheduler.plugin.task.api.log; import org.apache.dolphinscheduler.common.constants.Constants; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.sift.AbstractDiscriminator; /** * Task Log Discriminator */ +@Slf4j public class TaskLogDiscriminator extends AbstractDiscriminator { - private static Logger logger = LoggerFactory.getLogger(TaskLogDiscriminator.class); - /** * key */ @@ -43,7 +40,7 @@ public class TaskLogDiscriminator extends AbstractDiscriminator { private String logBase; /** - * logger name should be like: + * log name should be like: * Task Logger name should be like: Task-{processDefinitionId}-{processInstanceId}-{taskInstanceId} */ @Override @@ -61,7 +58,7 @@ public class TaskLogDiscriminator extends AbstractDiscriminator { key = part1.substring(prefix.length()).replaceFirst("-", "/"); } } - logger.debug("task log discriminator end, key is:{}, thread name:{}, loggerName:{}", key, event.getThreadName(), + log.debug("task log discriminator end, key is:{}, thread name:{}, loggerName:{}", key, event.getThreadName(), event.getLoggerName()); return key; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogFilter.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogFilter.java index 652bcfe5c5..f6724ea19e 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogFilter.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/log/TaskLogFilter.java @@ -19,9 +19,7 @@ package org.apache.dolphinscheduler.plugin.task.api.log; import org.apache.dolphinscheduler.plugin.task.api.TaskConstants; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.filter.Filter; @@ -30,10 +28,9 @@ import ch.qos.logback.core.spi.FilterReply; /** * task log filter */ +@Slf4j public class TaskLogFilter extends Filter { - private static Logger logger = LoggerFactory.getLogger(TaskLogFilter.class); - /** * level */ @@ -57,7 +54,7 @@ public class TaskLogFilter extends Filter { || event.getLevel().isGreaterOrEqual(level)) { filterReply = FilterReply.ACCEPT; } - logger.debug("task log filter, thread name:{}, loggerName:{}, filterReply:{}, level:{}", event.getThreadName(), + log.debug("task log filter, thread name:{}, loggerName:{}, filterReply:{}, level:{}", event.getThreadName(), event.getLoggerName(), filterReply.name(), level); return filterReply; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/BaseLoopTaskExecutor.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/BaseLoopTaskExecutor.java index 36fcef97c3..7b75def685 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/BaseLoopTaskExecutor.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/loop/BaseLoopTaskExecutor.java @@ -73,24 +73,24 @@ public abstract class BaseLoopTaskExecutor extends AbstractRemoteTask { } if (loopTaskInstanceStatus != null && loopTaskInstanceStatus.isSuccess()) { setExitStatusCode(TaskConstants.EXIT_CODE_SUCCESS); - logger.info("The task instance: {} execute successfully.", appIds); + log.info("The task instance: {} execute successfully.", appIds); } else { setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); - logger.info("The task instance: {} is execute failure.", appIds); + log.info("The task instance: {} is execute failure.", appIds); } } catch (InterruptedException e) { setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); - logger.error("The current loop thread has been interrupted", e); + log.error("The current loop thread has been interrupted", e); Thread.currentThread().interrupt(); throw new TaskException("The current loop thread has been interrupted"); } catch (TaskException ex) { - // print the error message with task logger. - logger.error("Loop task execute error", ex); + // print the error message with task log. + log.error("Loop task execute error", ex); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw ex; } catch (Exception ex) { setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); - logger.error("Loop task execute error", ex); + log.error("Loop task execute error", ex); throw new TaskException("Loop task execute error", ex); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java index c6475c9631..273cf57193 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parameters/dataquality/DataQualityParameters.java @@ -26,16 +26,14 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * DataQualityParameters */ +@Slf4j public class DataQualityParameters extends AbstractParameters { - private static final Logger logger = LoggerFactory.getLogger(DataQualityParameters.class); - /** * rule id */ @@ -74,12 +72,12 @@ public class DataQualityParameters extends AbstractParameters { public boolean checkParameters() { if (ruleId == 0) { - logger.error("rule id is null"); + log.error("rule id is null"); return false; } if (MapUtils.isEmpty(ruleInputParameter)) { - logger.error("rule input parameter is empty"); + log.error("rule input parameter is empty"); return false; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/ParameterUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/ParameterUtils.java index 85bed24a89..aca4ed267a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/ParameterUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/ParameterUtils.java @@ -37,16 +37,11 @@ import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * parameter parse utils */ public class ParameterUtils { - private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class); - private static final Pattern DATE_PARSE_PATTERN = Pattern.compile("\\$\\[([^\\$\\]]+)]"); private static final Pattern DATE_START_PATTERN = Pattern.compile("^[0-9]"); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PlaceholderUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PlaceholderUtils.java index 3820646364..c19b7ea456 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PlaceholderUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PlaceholderUtils.java @@ -19,16 +19,14 @@ package org.apache.dolphinscheduler.plugin.task.api.parser; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * placeholder utils */ +@Slf4j public class PlaceholderUtils { - private static final Logger logger = LoggerFactory.getLogger(PlaceholderUtils.class); - /** * Prefix of the position to be replaced */ @@ -96,7 +94,7 @@ public class PlaceholderUtils { try { return paramsMap.get(placeholderName); } catch (Exception ex) { - logger.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex); + log.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex); return null; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PropertyPlaceholderHelper.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PropertyPlaceholderHelper.java index 98b6e01dc8..e10657d084 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PropertyPlaceholderHelper.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/PropertyPlaceholderHelper.java @@ -23,8 +23,7 @@ import java.util.Map; import java.util.Properties; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * Utility class for working with Strings that have placeholder values in them. A placeholder takes the form @@ -36,10 +35,9 @@ import org.slf4j.LoggerFactory; * @author Rob Harrop * @since 3.0 */ +@Slf4j public class PropertyPlaceholderHelper { - private static final Logger logger = LoggerFactory.getLogger(PropertyPlaceholderHelper.class); - private static final Map wellKnownSimplePrefixes = new HashMap(4); static { @@ -160,8 +158,8 @@ public class PropertyPlaceholderHelper { // previously resolved placeholder value. propVal = parseStringValue(propVal, placeholderResolver, visitedPlaceholders); result.replace(startIndex, endIndex + this.placeholderSuffix.length(), propVal); - if (logger.isTraceEnabled()) { - logger.trace("Resolved placeholder '" + placeholder + "'"); + if (log.isTraceEnabled()) { + log.trace("Resolved placeholder '" + placeholder + "'"); } startIndex = result.indexOf(this.placeholderPrefix, startIndex + propVal.length()); } else if (this.ignoreUnresolvablePlaceholders) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/TimePlaceholderUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/TimePlaceholderUtils.java index 74b4441b84..c4b0fc84ef 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/TimePlaceholderUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/parser/TimePlaceholderUtils.java @@ -63,16 +63,14 @@ import java.util.List; import java.util.Map; import java.util.Stack; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * time place holder utils */ +@Slf4j public class TimePlaceholderUtils { - private static final Logger logger = LoggerFactory.getLogger(TimePlaceholderUtils.class); - /** * Prefix of the position to be replaced */ @@ -325,7 +323,7 @@ public class TimePlaceholderUtils { try { return calculateTime(placeholderName, date); } catch (Exception ex) { - logger.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex); + log.error("resolve placeholder '{}' in [ {} ]", placeholderName, value, ex); return null; } } @@ -376,7 +374,7 @@ public class TimePlaceholderUtils { value = DateUtils.format(entry.getKey(), entry.getValue()); } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw e; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/K8sUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/K8sUtils.java index c50596bb02..bf398630ff 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/K8sUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/K8sUtils.java @@ -24,9 +24,7 @@ import org.apache.dolphinscheduler.plugin.task.api.TaskException; import java.util.List; import java.util.Optional; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +import lombok.extern.slf4j.Slf4j; import io.fabric8.kubernetes.api.model.Pod; import io.fabric8.kubernetes.api.model.batch.v1.Job; import io.fabric8.kubernetes.api.model.batch.v1.JobList; @@ -36,9 +34,9 @@ import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.Watch; import io.fabric8.kubernetes.client.Watcher; +@Slf4j public class K8sUtils { - private static final Logger log = LoggerFactory.getLogger(K8sUtils.class); private KubernetesClient client; public void createJob(String namespace, Job job) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java index 21015c34dc..abaa688c9a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/src/main/java/org/apache/dolphinscheduler/plugin/task/api/utils/ProcessUtils.java @@ -37,16 +37,15 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.fasterxml.jackson.databind.node.ObjectNode; +@Slf4j public final class ProcessUtils { - private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class); - private ProcessUtils() { throw new IllegalStateException("Utility class"); } @@ -74,22 +73,22 @@ public final class ProcessUtils { */ public static boolean kill(@NonNull TaskExecutionContext request) { try { - logger.info("Begin kill task instance, processId: {}", request.getProcessId()); + log.info("Begin kill task instance, processId: {}", request.getProcessId()); int processId = request.getProcessId(); if (processId == 0) { - logger.error("Task instance kill failed, processId is not exist"); + log.error("Task instance kill failed, processId is not exist"); return false; } String cmd = String.format("kill -9 %s", getPidsStr(processId)); cmd = OSUtils.getSudoCmd(request.getTenantCode(), cmd); - logger.info("process id:{}, cmd:{}", processId, cmd); + log.info("process id:{}, cmd:{}", processId, cmd); OSUtils.exeCmd(cmd); - logger.info("Success kill task instance, processId: {}", request.getProcessId()); + log.info("Success kill task instance, processId: {}", request.getProcessId()); return true; } catch (Exception e) { - logger.error("Kill task instance error, processId: {}", request.getProcessId(), e); + log.error("Kill task instance error, processId: {}", request.getProcessId(), e); return false; } } @@ -147,7 +146,7 @@ public final class ProcessUtils { execYarnKillCommand(logger, tenantCode, appId, commandFile, cmd); } } catch (Exception e) { - logger.error("Get yarn application app id [{}}] status failed", appId, e); + log.error("Get yarn application app id [{}}] status failed", appId, e); } } } @@ -165,7 +164,7 @@ public final class ProcessUtils { String result; String applicationUrl = getApplicationUrl(applicationId); - logger.debug("generate yarn application url, applicationUrl={}", applicationUrl); + log.debug("generate yarn application url, applicationUrl={}", applicationUrl); String responseContent = Boolean.TRUE .equals(PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) @@ -181,7 +180,7 @@ public final class ProcessUtils { } else { // may be in job history String jobHistoryUrl = getJobHistoryUrl(applicationId); - logger.debug("generate yarn job history application url, jobHistoryUrl={}", jobHistoryUrl); + log.debug("generate yarn job history application url, jobHistoryUrl={}", jobHistoryUrl); responseContent = Boolean.TRUE .equals(PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) ? KerberosHttpClient.get(jobHistoryUrl) @@ -217,7 +216,7 @@ public final class ProcessUtils { if (StringUtils.isBlank(appUrl)) { throw new BaseException("yarn application url generation failed"); } - logger.debug("yarn application url:{}, applicationId:{}", appUrl, applicationId); + log.debug("yarn application url:{}, applicationId:{}", appUrl, applicationId); return String.format(appUrl, HADOOP_RESOURCE_MANAGER_HTTP_ADDRESS_PORT_VALUE, applicationId); } @@ -256,10 +255,10 @@ public final class ProcessUtils { String runCmd = String.format("%s %s", Constants.SH, commandFile); runCmd = org.apache.dolphinscheduler.common.utils.OSUtils.getSudoCmd(tenantCode, runCmd); - logger.info("kill cmd:{}", runCmd); + log.info("kill cmd:{}", runCmd); org.apache.dolphinscheduler.common.utils.OSUtils.exeCmd(runCmd); } catch (Exception e) { - logger.error(String.format("Kill yarn application app id [%s] failed: [%s]", appId, e.getMessage())); + log.error(String.format("Kill yarn application app id [%s] failed: [%s]", appId, e.getMessage())); } } @@ -321,7 +320,7 @@ public final class ProcessUtils { * get kerberos init command */ private static String getKerberosInitCommand() { - logger.info("get kerberos init command"); + log.info("get kerberos init command"); StringBuilder kerberosCommandBuilder = new StringBuilder(); boolean hadoopKerberosState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false); @@ -333,7 +332,7 @@ public final class ProcessUtils { PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH), PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME))) .append("\n\n"); - logger.info("kerberos init command: {}", kerberosCommandBuilder); + log.info("kerberos init command: {}", kerberosCommandBuilder); } return kerberosCommandBuilder.toString(); } @@ -370,7 +369,7 @@ public final class ProcessUtils { } } catch (Exception e) { - logger.error("yarn ha application url generation failed, message:{}", e.getMessage()); + log.error("yarn ha application url generation failed, message:{}", e.getMessage()); } return null; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTask.java index 2e4e8792bc..6a832488c4 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-chunjun/src/main/java/org/apache/dolphinscheduler/plugin/task/chunjun/ChunJunTask.java @@ -86,7 +86,7 @@ public class ChunJunTask extends AbstractTask { this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, - taskExecutionContext, logger); + taskExecutionContext, log); } /** @@ -95,7 +95,7 @@ public class ChunJunTask extends AbstractTask { @Override public void init() { chunJunParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ChunJunParameters.class); - logger.info("Initialize chunjun task params {}", + log.info("Initialize chunjun task params {}", JSONUtils.toPrettyJsonString(taskExecutionContext.getTaskParams())); if (!chunJunParameters.checkParameters()) { @@ -124,11 +124,11 @@ public class ChunJunTask extends AbstractTask { setProcessId(commandExecuteResult.getProcessId()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current ChunJun Task has been interrupted", e); + log.error("The current ChunJun Task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current ChunJun Task has been interrupted", e); } catch (Exception e) { - logger.error("chunjun task failed.", e); + log.error("chunjun task failed.", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute chunjun task failed", e); } @@ -161,7 +161,7 @@ public class ChunJunTask extends AbstractTask { // replace placeholder json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap)); - logger.debug("chunjun job json : {}", json); + log.debug("chunjun job json : {}", json); // create chunjun json file FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8); @@ -219,7 +219,7 @@ public class ChunJunTask extends AbstractTask { // replace placeholder String chunjunCommand = ParameterUtils.convertParameterPlaceholders(command, ParamUtils.convert(paramsMap)); - logger.info("raw script : {}", chunjunCommand); + log.info("raw script : {}", chunjunCommand); // create shell command file Set perms = PosixFilePermissions.fromString(RWXR_XR_X); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryHook.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryHook.java index 763024169d..8467af86f9 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryHook.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryHook.java @@ -46,7 +46,7 @@ public class DatafactoryHook { public static DatafactoryStatus[] taskFinishFlags = {DatafactoryStatus.Failed, DatafactoryStatus.Succeeded, DatafactoryStatus.Cancelled}; - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); private final int QUERY_INTERVAL = PropertyUtils.getInt(TaskConstants.QUERY_INTERVAL, 10000); private DataFactoryManager client; @@ -55,7 +55,7 @@ public class DatafactoryHook { private String runId; public DatafactoryHook() { - logger.info("initDatafactoryClient ......"); + log.info("initDatafactoryClient ......"); client = createClient(); } @@ -76,12 +76,12 @@ public class DatafactoryHook { } public Boolean startDatafactoryTask(DatafactoryParameters parameters) { - logger.info("initDatafactoryTask ......"); + log.info("initDatafactoryTask ......"); PipelineResource pipelineResource = getPipelineResource(parameters); if (pipelineResource == null) { return false; } - logger.info("startDatafactoryTask ......"); + log.info("startDatafactoryTask ......"); CreateRunResponse run = pipelineResource.createRun(); if (StringUtils.isEmpty(run.runId())) { return false; @@ -92,26 +92,26 @@ public class DatafactoryHook { } public Boolean cancelDatafactoryTask(DatafactoryParameters parameters) { - logger.info("cancelTask ......"); + log.info("cancelTask ......"); PipelineRuns pipelineRuns = client.pipelineRuns(); try { pipelineRuns.cancel(parameters.getResourceGroupName(), parameters.getFactoryName(), runId); } catch (RuntimeException e) { - logger.error("failed to cancel datafactory task: " + e.getMessage()); + log.error("failed to cancel datafactory task: " + e.getMessage()); return false; } return true; } public DatafactoryStatus queryDatafactoryTaskStatus(DatafactoryParameters parameters) { - logger.info("queryDatafactoryTaskStatus ......"); + log.info("queryDatafactoryTaskStatus ......"); PipelineRuns pipelineRuns = client.pipelineRuns(); PipelineRun pipelineRun = pipelineRuns.get(parameters.getResourceGroupName(), parameters.getFactoryName(), runId); if (pipelineRun != null) { - logger.info("queryDatafactoryTaskStatus ......{}", pipelineRun.status()); + log.info("queryDatafactoryTaskStatus ......{}", pipelineRun.status()); return DatafactoryStatus.valueOf(pipelineRun.status()); } return null; @@ -140,7 +140,7 @@ public class DatafactoryHook { } return false; } - logger.debug("wait {}ms to recheck finish status....", QUERY_INTERVAL); + log.debug("wait {}ms to recheck finish status....", QUERY_INTERVAL); Thread.sleep(QUERY_INTERVAL); } return false; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTask.java index b8a5db4371..e36f0659d3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datafactory/src/main/java/org/apache/dolphinscheduler/plugin/task/datafactory/DatafactoryTask.java @@ -52,7 +52,7 @@ public class DatafactoryTask extends AbstractRemoteTask { @Override public void init() { parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DatafactoryParameters.class); - logger.info("Initialize Datafactory task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Initialize Datafactory task params {}", JSONUtils.toPrettyJsonString(parameters)); hook = new DatafactoryHook(); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java index c1360eca94..54b88c5977 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java @@ -85,10 +85,10 @@ public class DataQualityTask extends AbstractYarnTask { dataQualityParameters = JSONUtils.parseObject(dqTaskExecutionContext.getTaskParams(), DataQualityParameters.class); - logger.info("Initialize data quality task params {}", JSONUtils.toPrettyJsonString(dataQualityParameters)); + log.info("Initialize data quality task params {}", JSONUtils.toPrettyJsonString(dataQualityParameters)); if (null == dataQualityParameters) { - logger.error("data quality params is null"); + log.error("data quality params is null"); return; } @@ -175,7 +175,7 @@ public class DataQualityTask extends AbstractYarnTask { Map paramsMap = dqTaskExecutionContext.getPrepareParamsMap(); String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); - logger.info("data quality task command: {}", command); + log.info("data quality task command: {}", command); return command; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java index d03a3ecd97..7a60a08ee3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/Md5Utils.java @@ -21,16 +21,14 @@ import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.util.Base64; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * Md5Utils */ +@Slf4j public class Md5Utils { - private static final Logger logger = LoggerFactory.getLogger(Md5Utils.class); - private Md5Utils() { throw new IllegalStateException("Utility class"); } @@ -42,7 +40,7 @@ public class Md5Utils { Base64.Encoder encoder = Base64.getEncoder(); md5 = encoder.encodeToString(md.digest(src.getBytes(StandardCharsets.UTF_8))); } catch (Exception e) { - logger.error("get md5 error: {}", e.getMessage()); + log.error("get md5 error: {}", e.getMessage()); } if (isUpper) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java index 5e91082a68..f8586179d2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncHook.java @@ -62,7 +62,7 @@ public class DatasyncHook { public static TaskExecutionStatus[] doneStatus = {TaskExecutionStatus.ERROR, TaskExecutionStatus.SUCCESS, TaskExecutionStatus.UNKNOWN_TO_SDK_VERSION}; public static TaskStatus[] taskFinishFlags = {TaskStatus.UNAVAILABLE, TaskStatus.UNKNOWN_TO_SDK_VERSION}; - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); private DataSyncClient client; private String taskArn; @@ -86,7 +86,7 @@ public class DatasyncHook { } public Boolean createDatasyncTask(DatasyncParameters parameters) { - logger.info("createDatasyncTask ......"); + log.info("createDatasyncTask ......"); CreateTaskRequest.Builder builder = CreateTaskRequest.builder() .name(parameters.getName()) .sourceLocationArn(parameters.getSourceLocationArn()) @@ -102,12 +102,12 @@ public class DatasyncHook { if (task.sdkHttpResponse().isSuccessful()) { taskArn = task.taskArn(); } - logger.info("finished createDatasyncTask ......"); + log.info("finished createDatasyncTask ......"); return doubleCheckTaskStatus(TaskStatus.AVAILABLE, taskFinishFlags); } public Boolean startDatasyncTask() { - logger.info("startDatasyncTask ......"); + log.info("startDatasyncTask ......"); StartTaskExecutionRequest start = StartTaskExecutionRequest.builder().taskArn(taskArn).build(); StartTaskExecutionResponse response = client.startTaskExecution(start); if (response.sdkHttpResponse().isSuccessful()) { @@ -117,7 +117,7 @@ public class DatasyncHook { } public Boolean cancelDatasyncTask() { - logger.info("cancelTask ......"); + log.info("cancelTask ......"); CancelTaskExecutionRequest cancel = CancelTaskExecutionRequest.builder().taskExecutionArn(taskExecArn).build(); CancelTaskExecutionResponse response = client.cancelTaskExecution(cancel); if (response.sdkHttpResponse().isSuccessful()) { @@ -127,26 +127,26 @@ public class DatasyncHook { } public TaskStatus queryDatasyncTaskStatus() { - logger.info("queryDatasyncTaskStatus ......"); + log.info("queryDatasyncTaskStatus ......"); DescribeTaskRequest request = DescribeTaskRequest.builder().taskArn(taskArn).build(); DescribeTaskResponse describe = client.describeTask(request); if (describe.sdkHttpResponse().isSuccessful()) { - logger.info("queryDatasyncTaskStatus ......{}", describe.statusAsString()); + log.info("queryDatasyncTaskStatus ......{}", describe.statusAsString()); return describe.status(); } return null; } public TaskExecutionStatus queryDatasyncTaskExecStatus() { - logger.info("queryDatasyncTaskExecStatus ......"); + log.info("queryDatasyncTaskExecStatus ......"); DescribeTaskExecutionRequest request = DescribeTaskExecutionRequest.builder().taskExecutionArn(taskExecArn).build(); DescribeTaskExecutionResponse describe = client.describeTaskExecution(request); if (describe.sdkHttpResponse().isSuccessful()) { - logger.info("queryDatasyncTaskExecStatus ......{}", describe.statusAsString()); + log.info("queryDatasyncTaskExecStatus ......{}", describe.statusAsString()); return describe.status(); } return null; @@ -165,13 +165,13 @@ public class DatasyncHook { } if (exceptStatus.equals(status)) { - logger.info("double check success"); + log.info("double check success"); return true; } else if (stopStatusSet.contains(status)) { break; } } - logger.warn("double check error"); + log.warn("double check error"); return false; } @@ -188,13 +188,13 @@ public class DatasyncHook { } if (exceptStatus.equals(status)) { - logger.info("double check success"); + log.info("double check success"); return true; } else if (stopStatusSet.contains(status)) { break; } } - logger.warn("double check error"); + log.warn("double check error"); return false; } @@ -210,15 +210,15 @@ public class DatasyncHook { } if (expectStatus.equals(status)) { - logger.info("double check finish status success"); + log.info("double check finish status success"); return true; } else if (stopStatusSet.contains(status)) { break; } - logger.debug("wait 10s to recheck finish status...."); + log.debug("wait 10s to recheck finish status...."); Thread.sleep(10000); } - logger.warn("double check error"); + log.warn("double check error"); return false; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTask.java index 16a6827879..c9e3afd29b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datasync/src/main/java/org/apache/dolphinscheduler/plugin/task/datasync/DatasyncTask.java @@ -70,7 +70,7 @@ public class DatasyncTask extends AbstractRemoteTask { public void init() { parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DatasyncParameters.class); - logger.info("Initialize Datasync task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Initialize Datasync task params {}", JSONUtils.toPrettyJsonString(parameters)); initParams(); hook = new DatasyncHook(); @@ -86,7 +86,7 @@ public class DatasyncTask extends AbstractRemoteTask { } catch (JsonProcessingException e) { throw new TaskException("Convert json to task params failed", e); } - logger.info("Success convert json to task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Success convert json to task params {}", JSONUtils.toPrettyJsonString(parameters)); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java index 1caf9cfa4f..722cace03c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-datax/src/main/java/org/apache/dolphinscheduler/plugin/task/datax/DataxTask.java @@ -126,7 +126,7 @@ public class DataxTask extends AbstractTask { this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, - taskExecutionContext, logger); + taskExecutionContext, log); } /** @@ -135,7 +135,7 @@ public class DataxTask extends AbstractTask { @Override public void init() { dataXParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DataxParameters.class); - logger.info("Initialize datax task params {}", JSONUtils.toPrettyJsonString(dataXParameters)); + log.info("Initialize datax task params {}", JSONUtils.toPrettyJsonString(dataXParameters)); if (dataXParameters == null || !dataXParameters.checkParameters()) { throw new RuntimeException("datax task params is not valid"); @@ -165,11 +165,11 @@ public class DataxTask extends AbstractTask { setProcessId(commandExecuteResult.getProcessId()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current DataX task has been interrupted", e); + log.error("The current DataX task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current DataX task has been interrupted", e); } catch (Exception e) { - logger.error("datax task error", e); + log.error("datax task error", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute DataX task failed", e); } @@ -224,7 +224,7 @@ public class DataxTask extends AbstractTask { // replace placeholder json = ParameterUtils.convertParameterPlaceholders(json, ParamUtils.convert(paramsMap)); - logger.debug("datax job json : {}", json); + log.debug("datax job json : {}", json); // create datax json file FileUtils.writeStringToFile(new File(fileName), json, StandardCharsets.UTF_8); @@ -406,7 +406,7 @@ public class DataxTask extends AbstractTask { // replace placeholder String dataxCommand = ParameterUtils.convertParameterPlaceholders(sbr, ParamUtils.convert(paramsMap)); - logger.debug("raw script : {}", dataxCommand); + log.debug("raw script : {}", dataxCommand); // create shell command file Set perms = PosixFilePermissions.fromString(RWXR_XR_X); @@ -473,7 +473,7 @@ public class DataxTask extends AbstractTask { String[] columnNames = tryGrammaticalAnalysisSqlColumnNames(sourceType, sql); if (columnNames == null || columnNames.length == 0) { - logger.info("try to execute sql analysis query column name"); + log.info("try to execute sql analysis query column name"); columnNames = tryExecuteSqlResolveColumnNames(sourceType, dataSourceCfg, sql); } @@ -496,7 +496,7 @@ public class DataxTask extends AbstractTask { try { SQLStatementParser parser = DataxUtils.getSqlStatementParser(dbType, sql); if (parser == null) { - logger.warn("database driver [{}] is not support grammatical analysis sql", dbType); + log.warn("database driver [{}] is not support grammatical analysis sql", dbType); return new String[0]; } @@ -546,7 +546,7 @@ public class DataxTask extends AbstractTask { columnNames[i] = columnName; } } catch (Exception e) { - logger.warn(e.getMessage(), e); + log.warn(e.getMessage(), e); return new String[0]; } @@ -578,7 +578,7 @@ public class DataxTask extends AbstractTask { columnNames[i - 1] = md.getColumnName(i).replace("t.", ""); } } catch (SQLException | ExecutionException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); return null; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java index f4b4b1eb0d..fdaf70add4 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dinky/src/main/java/org/apache/dolphinscheduler/plugin/task/dinky/DinkyTask.java @@ -81,7 +81,7 @@ public class DinkyTask extends AbstractRemoteTask { public void init() { final String taskParams = taskExecutionContext.getTaskParams(); this.dinkyParameters = JSONUtils.parseObject(taskParams, DinkyParameters.class); - logger.info("Initialize dinky task params: {}", JSONUtils.toPrettyJsonString(dinkyParameters)); + log.info("Initialize dinky task params: {}", JSONUtils.toPrettyJsonString(dinkyParameters)); if (this.dinkyParameters == null || !this.dinkyParameters.checkParameters()) { throw new DinkyTaskException("dinky task params is not valid"); } @@ -120,7 +120,7 @@ public class DinkyTask extends AbstractRemoteTask { // Use address-taskId as app id setAppIds(String.format("%s-%s", address, taskId)); setExitStatusCode(exitStatusCode); - logger.info("dinky task finished with results: {}", + log.info("dinky task finished with results: {}", result.get(DinkyTaskConstants.API_RESULT_DATAS)); finishFlag = true; break; @@ -138,7 +138,7 @@ public class DinkyTask extends AbstractRemoteTask { } } catch (InterruptedException ex) { Thread.currentThread().interrupt(); - logger.error("Execute dinkyTask failed", ex); + log.error("Execute dinkyTask failed", ex); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute dinkyTask failed", ex); } @@ -181,7 +181,7 @@ public class DinkyTask extends AbstractRemoteTask { private void errorHandle(Object msg) { setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); - logger.error("dinky task submit failed with error: {}", msg); + log.error("dinky task submit failed with error: {}", msg); } @Override @@ -193,12 +193,12 @@ public class DinkyTask extends AbstractRemoteTask { public void cancelApplication() throws TaskException { String address = this.dinkyParameters.getAddress(); String taskId = this.dinkyParameters.getTaskId(); - logger.info("trying terminate dinky task, taskId: {}, address: {}, taskId: {}", + log.info("trying terminate dinky task, taskId: {}, address: {}, taskId: {}", this.taskExecutionContext.getTaskInstanceId(), address, taskId); cancelTask(address, taskId); - logger.warn("dinky task terminated, taskId: {}, address: {}, taskId: {}", + log.warn("dinky task terminated, taskId: {}, address: {}, taskId: {}", this.taskExecutionContext.getTaskInstanceId(), address, taskId); @@ -235,7 +235,7 @@ public class DinkyTask extends AbstractRemoteTask { try { result = mapper.readTree(res); } catch (JsonProcessingException e) { - logger.error("dinky task submit failed with error", e); + log.error("dinky task submit failed with error", e); } return result; } @@ -253,18 +253,18 @@ public class DinkyTask extends AbstractRemoteTask { } URI uri = uriBuilder.build(); httpGet = new HttpGet(uri); - logger.info("access url: {}", uri); + log.info("access url: {}", uri); HttpResponse response = httpClient.execute(httpGet); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { result = EntityUtils.toString(response.getEntity()); - logger.info("dinky task succeed with results: {}", result); + log.info("dinky task succeed with results: {}", result); } else { - logger.error("dinky task terminated,response: {}", response); + log.error("dinky task terminated,response: {}", response); } } catch (IllegalArgumentException ie) { - logger.error("dinky task terminated: {}", ie.getMessage()); + log.error("dinky task terminated: {}", ie.getMessage()); } catch (Exception e) { - logger.error("dinky task terminated: ", e); + log.error("dinky task terminated: ", e); } finally { if (null != httpGet) { httpGet.releaseConnection(); @@ -286,14 +286,14 @@ public class DinkyTask extends AbstractRemoteTask { HttpResponse response = httpClient.execute(httpPost); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { result = EntityUtils.toString(response.getEntity()); - logger.info("dinky task succeed with results: {}", result); + log.info("dinky task succeed with results: {}", result); } else { - logger.error("dinky task terminated,response: {}", response); + log.error("dinky task terminated,response: {}", response); } } catch (IllegalArgumentException ie) { - logger.error("dinky task terminated: {}", ie.getMessage()); + log.error("dinky task terminated: {}", ie.getMessage()); } catch (Exception he) { - logger.error("dinky task terminated: ", he); + log.error("dinky task terminated: ", he); } return result; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java index d3197429b9..dfa1d3830b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsHook.java @@ -63,7 +63,7 @@ import com.amazonaws.services.databasemigrationservice.model.TestConnectionReque @Data public class DmsHook { - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); private AWSDatabaseMigrationService client; private String replicationTaskIdentifier; @@ -101,7 +101,7 @@ public class DmsHook { } public Boolean createReplicationTask() throws Exception { - logger.info("createReplicationTask ......"); + log.info("createReplicationTask ......"); CreateReplicationTaskRequest request = new CreateReplicationTaskRequest() .withReplicationTaskIdentifier(replicationTaskIdentifier) .withSourceEndpointArn(sourceEndpointArn) @@ -123,13 +123,13 @@ public class DmsHook { CreateReplicationTaskResult result = client.createReplicationTask(request); replicationTaskIdentifier = result.getReplicationTask().getReplicationTaskIdentifier(); replicationTaskArn = result.getReplicationTask().getReplicationTaskArn(); - logger.info("replicationTaskIdentifier: {}, replicationTaskArn: {}", replicationTaskIdentifier, + log.info("replicationTaskIdentifier: {}, replicationTaskArn: {}", replicationTaskIdentifier, replicationTaskArn); return awaitReplicationTaskStatus(STATUS.READY); } public Boolean startReplicationTask() { - logger.info("startReplicationTask ......"); + log.info("startReplicationTask ......"); StartReplicationTaskRequest request = new StartReplicationTaskRequest() .withReplicationTaskArn(replicationTaskArn) .withStartReplicationTaskType(startReplicationTaskType) @@ -142,14 +142,14 @@ public class DmsHook { } public Boolean checkFinishedReplicationTask() { - logger.info("checkFinishedReplicationTask ......"); + log.info("checkFinishedReplicationTask ......"); awaitReplicationTaskStatus(STATUS.STOPPED); String stopReason = describeReplicationTasks().getStopReason(); return stopReason.endsWith(STATUS.FINISH_END_TOKEN); } public void stopReplicationTask() { - logger.info("stopReplicationTask ......"); + log.info("stopReplicationTask ......"); if (replicationTaskArn == null) { return; } @@ -160,7 +160,7 @@ public class DmsHook { } public Boolean deleteReplicationTask() { - logger.info("deleteReplicationTask ......"); + log.info("deleteReplicationTask ......"); DeleteReplicationTaskRequest request = new DeleteReplicationTaskRequest() .withReplicationTaskArn(replicationTaskArn); client.deleteReplicationTask(request); @@ -179,13 +179,13 @@ public class DmsHook { } public Boolean testConnection(String replicationInstanceArn, String endpointArn) { - logger.info("Test connect replication instance: {} and endpoint: {}", replicationInstanceArn, endpointArn); + log.info("Test connect replication instance: {} and endpoint: {}", replicationInstanceArn, endpointArn); TestConnectionRequest request = new TestConnectionRequest().withReplicationInstanceArn(replicationInstanceArn) .withEndpointArn(endpointArn); try { client.testConnection(request); } catch (InvalidResourceStateException e) { - logger.info(e.getErrorMessage()); + log.info(e.getErrorMessage()); } return awaitConnectSuccess(replicationInstanceArn, endpointArn); @@ -203,13 +203,13 @@ public class DmsHook { DescribeConnectionsResult response = client.describeConnections(request); String status = response.getConnections().get(0).getStatus(); if (status.equals(STATUS.SUCCESSFUL)) { - logger.info("Connect successful"); + log.info("Connect successful"); return true; } else if (!status.equals(STATUS.TESTING)) { break; } } - logger.info("Connect error"); + log.info("Connect error"); return false; } @@ -258,19 +258,19 @@ public class DmsHook { } if (!lastPercent.equals(percent)) { String runningMessage = String.format("fullLoadProgressPercent: %s ", percent); - logger.info(runningMessage); + log.info(runningMessage); } lastPercent = percent; } if (exceptStatus.equals(status)) { - logger.info("success"); + log.info("success"); return true; } else if (stopStatusSet.contains(status)) { break; } } - logger.info("error"); + log.info("error"); return false; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTask.java index ef60e67f58..5b29d54a46 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dms/src/main/java/org/apache/dolphinscheduler/plugin/task/dms/DmsTask.java @@ -72,7 +72,7 @@ public class DmsTask extends AbstractRemoteTask { @Override public void init() throws TaskException { parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DmsParameters.class); - logger.info("Initialize Dms task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Initialize Dms task params {}", JSONUtils.toPrettyJsonString(parameters)); initDmsHook(); } @@ -105,7 +105,7 @@ public class DmsTask extends AbstractRemoteTask { dmsHook.setReplicationTaskArn(appId.getReplicationTaskArn()); // if CdcStopPosition is not set, the task will not continue to check the running status if (isStopTaskWhenCdc()) { - logger.info( + log.info( "This is a cdc task and cdcStopPosition is not set, the task will not continue to check the running status"); exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS; return; @@ -168,7 +168,7 @@ public class DmsTask extends AbstractRemoteTask { try { isStartSuccessfully = dmsHook.startReplicationTask(); } catch (InvalidResourceStateException e) { - logger.error("Failed to start a task, error message: {}", e.getErrorMessage()); + log.error("Failed to start a task, error message: {}", e.getErrorMessage()); // Only restart task when the error contains "Test connection", means instance can not connect to source or // target @@ -176,7 +176,7 @@ public class DmsTask extends AbstractRemoteTask { return TaskConstants.EXIT_CODE_FAILURE; } - logger.info("restart replication task"); + log.info("restart replication task"); // if only restart task, run dmsHook.describeReplicationTasks to get replication task arn if (parameters.getIsRestartTask()) { dmsHook.describeReplicationTasks(); @@ -244,7 +244,7 @@ public class DmsTask extends AbstractRemoteTask { parameters = objectMapper.readValue(jsonData, DmsParameters.class); parameters.setIsRestartTask(isRestartTask); } catch (Exception e) { - logger.error("Failed to convert json data to DmsParameters object.", e); + log.error("Failed to convert json data to DmsParameters object.", e); throw new TaskException(e.getMessage()); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTask.java index af87f87e27..32d1b48f75 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dvc/src/main/java/org/apache/dolphinscheduler/plugin/task/dvc/DvcTask.java @@ -60,14 +60,14 @@ public class DvcTask extends AbstractTask { super(taskExecutionContext); this.taskExecutionContext = taskExecutionContext; - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, logger); + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, log); } @Override public void init() { parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), DvcParameters.class); - logger.info("Initialize dvc task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Initialize dvc task params {}", JSONUtils.toPrettyJsonString(parameters)); if (parameters == null || !parameters.checkParameters()) { throw new TaskException("dvc task params is not valid"); @@ -85,11 +85,11 @@ public class DvcTask extends AbstractTask { parameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current DvcTask has been interrupted", e); + log.error("The current DvcTask has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current DvcTask has been interrupted", e); } catch (Exception e) { - logger.error("dvc task error", e); + log.error("dvc task error", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute dvc task failed", e); } @@ -115,7 +115,7 @@ public class DvcTask extends AbstractTask { } else if (taskType.equals(DvcConstants.DVC_TASK_TYPE.INIT)) { command = buildInitDvcCommond(); } - logger.info("Run DVC task with command: \n{}", command); + log.info("Run DVC task with command: \n{}", command); return command; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java index e3f8a8d73d..b4603c4605 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/AbstractEmrTask.java @@ -79,7 +79,7 @@ public abstract class AbstractEmrTask extends AbstractRemoteTask { public void init() { final String taskParams = taskExecutionContext.getTaskParams(); emrParameters = JSONUtils.parseObject(taskParams, EmrParameters.class); - logger.info("Initialize emr task params:{}", JSONUtils.toPrettyJsonString(taskParams)); + log.info("Initialize emr task params:{}", JSONUtils.toPrettyJsonString(taskParams)); if (emrParameters == null || !emrParameters.checkParameters()) { throw new EmrTaskException("emr task params is not valid"); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java index 4fd2c9e067..ff3331d530 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrAddStepsTask.java @@ -85,12 +85,12 @@ public class EmrAddStepsTask extends AbstractEmrTask { stepStatus = getStepStatus(); } catch (EmrTaskException | SdkBaseException e) { - logger.error("emr task submit failed with error", e); + log.error("emr task submit failed with error", e); throw new TaskException("emr task submit fail", e); } finally { final int exitStatusCode = calculateExitStatusCode(stepStatus); setExitStatusCode(exitStatusCode); - logger.info("emr task finished with step status : {}", stepStatus); + log.info("emr task finished with step status : {}", stepStatus); } } @@ -104,14 +104,14 @@ public class EmrAddStepsTask extends AbstractEmrTask { stepStatus = getStepStatus(); } } catch (EmrTaskException | SdkBaseException e) { - logger.error("emr task failed with error", e); + log.error("emr task failed with error", e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new TaskException("Execute emr task failed", e); } finally { final int exitStatusCode = calculateExitStatusCode(stepStatus); setExitStatusCode(exitStatusCode); - logger.info("emr task finished with step status : {}", stepStatus); + log.info("emr task finished with step status : {}", stepStatus); } } @@ -172,14 +172,14 @@ public class EmrAddStepsTask extends AbstractEmrTask { throw new EmrTaskException("fetch step status failed"); } StepStatus stepStatus = result.getStep().getStatus(); - logger.info("emr step [clusterId:{}, stepId:{}] running with status:{}", clusterId, stepId, stepStatus); + log.info("emr step [clusterId:{}, stepId:{}] running with status:{}", clusterId, stepId, stepStatus); return stepStatus; } @Override public void cancelApplication() throws TaskException { - logger.info("trying cancel emr step, taskId:{}, clusterId:{}, stepId:{}", + log.info("trying cancel emr step, taskId:{}, clusterId:{}, stepId:{}", this.taskExecutionContext.getTaskInstanceId(), clusterId, stepId); CancelStepsRequest cancelStepsRequest = new CancelStepsRequest().withClusterId(clusterId).withStepIds(stepId); CancelStepsResult cancelStepsResult = emrClient.cancelSteps(cancelStepsRequest); @@ -198,7 +198,7 @@ public class EmrAddStepsTask extends AbstractEmrTask { throw new EmrTaskException("cancel emr step failed, message:" + cancelEmrStepInfo.getReason()); } - logger.info("the result of cancel emr step is:{}", cancelStepsResult); + log.info("the result of cancel emr step is:{}", cancelStepsResult); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java index 53d879f58f..9abff26818 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-emr/src/main/java/org/apache/dolphinscheduler/plugin/task/emr/EmrJobFlowTask.java @@ -78,12 +78,12 @@ public class EmrJobFlowTask extends AbstractEmrTask { clusterStatus = getClusterStatus(); } catch (EmrTaskException | SdkBaseException e) { - logger.error("emr task submit failed with error", e); + log.error("emr task submit failed with error", e); throw new TaskException("emr task submit failed", e); } finally { final int exitStatusCode = calculateExitStatusCode(clusterStatus); setExitStatusCode(exitStatusCode); - logger.info("emr task finished with cluster status : {}", clusterStatus); + log.info("emr task finished with cluster status : {}", clusterStatus); } } @@ -98,14 +98,14 @@ public class EmrJobFlowTask extends AbstractEmrTask { clusterStatus = getClusterStatus(); } } catch (EmrTaskException | SdkBaseException e) { - logger.error("emr task failed with error", e); + log.error("emr task failed with error", e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new TaskException("Execute emr task failed", e); } finally { final int exitStatusCode = calculateExitStatusCode(clusterStatus); setExitStatusCode(exitStatusCode); - logger.info("emr task finished with cluster status : {}", clusterStatus); + log.info("emr task finished with cluster status : {}", clusterStatus); } } @@ -165,18 +165,18 @@ public class EmrJobFlowTask extends AbstractEmrTask { throw new EmrTaskException("fetch cluster status failed"); } ClusterStatus clusterStatus = result.getCluster().getStatus(); - logger.info("emr cluster [clusterId:{}] running with status:{}", clusterId, clusterStatus); + log.info("emr cluster [clusterId:{}] running with status:{}", clusterId, clusterStatus); return clusterStatus; } @Override public void cancelApplication() throws TaskException { - logger.info("trying terminate job flow, taskId:{}, clusterId:{}", this.taskExecutionContext.getTaskInstanceId(), + log.info("trying terminate job flow, taskId:{}, clusterId:{}", this.taskExecutionContext.getTaskInstanceId(), clusterId); TerminateJobFlowsRequest terminateJobFlowsRequest = new TerminateJobFlowsRequest().withJobFlowIds(clusterId); TerminateJobFlowsResult terminateJobFlowsResult = emrClient.terminateJobFlows(terminateJobFlowsRequest); - logger.info("the result of terminate job flow is:{}", terminateJobFlowsResult); + log.info("the result of terminate job flow is:{}", terminateJobFlowsResult); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTask.java index 6608d701f3..750711502f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink-stream/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkStreamTask.java @@ -52,7 +52,7 @@ public class FlinkStreamTask extends FlinkTask implements StreamTask { public void init() { flinkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), FlinkStreamParameters.class); - logger.info("Initialize Flink task params {}", JSONUtils.toPrettyJsonString(flinkParameters)); + log.info("Initialize Flink task params {}", JSONUtils.toPrettyJsonString(flinkParameters)); if (flinkParameters == null || !flinkParameters.checkParameters()) { throw new RuntimeException("flink task params is not valid"); @@ -76,7 +76,7 @@ public class FlinkStreamTask extends FlinkTask implements StreamTask { String command = ParameterUtils .convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams()); - logger.info("flink task command : {}", command); + log.info("flink task command : {}", command); return command; } @@ -97,13 +97,13 @@ public class FlinkStreamTask extends FlinkTask implements StreamTask { public void cancelApplication() throws TaskException { List appIds = getApplicationIds(); if (CollectionUtils.isEmpty(appIds)) { - logger.error("can not get appId, taskInstanceId:{}", taskExecutionContext.getTaskInstanceId()); + log.error("can not get appId, taskInstanceId:{}", taskExecutionContext.getTaskInstanceId()); return; } taskExecutionContext.setAppIds(String.join(TaskConstants.COMMA, appIds)); List args = FlinkArgsUtils.buildCancelCommandLine(taskExecutionContext); - logger.info("cancel application args:{}", args); + log.info("cancel application args:{}", args); ProcessBuilder processBuilder = new ProcessBuilder(); processBuilder.command(args); @@ -118,13 +118,13 @@ public class FlinkStreamTask extends FlinkTask implements StreamTask { public void savePoint() throws Exception { List appIds = getApplicationIds(); if (CollectionUtils.isEmpty(appIds)) { - logger.warn("can not get appId, taskInstanceId:{}", taskExecutionContext.getTaskInstanceId()); + log.warn("can not get appId, taskInstanceId:{}", taskExecutionContext.getTaskInstanceId()); return; } taskExecutionContext.setAppIds(String.join(TaskConstants.COMMA, appIds)); List args = FlinkArgsUtils.buildSavePointCommandLine(taskExecutionContext); - logger.info("savepoint args:{}", args); + log.info("savepoint args:{}", args); ProcessBuilder processBuilder = new ProcessBuilder(); processBuilder.command(args); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FileUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FileUtils.java index c932609162..70a1bbb43c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FileUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FileUtils.java @@ -35,12 +35,11 @@ import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class FileUtils { - private static final Logger LOGGER = LoggerFactory.getLogger(FileUtils.class); private FileUtils() { } @@ -99,8 +98,8 @@ public class FileUtils { private static void writeStringToFile(File file, String content, StandardOpenOption standardOpenOption) { try { - LOGGER.info("Writing content: " + content); - LOGGER.info("To file: " + file.getAbsolutePath()); + log.info("Writing content: " + content); + log.info("To file: " + file.getAbsolutePath()); Files.write(file.getAbsoluteFile().toPath(), content.getBytes(StandardCharsets.UTF_8), standardOpenOption); } catch (IOException e) { throw new RuntimeException("Error writing file: " + file.getAbsoluteFile(), e); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java index 115c5954a5..2e58ae0534 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-flink/src/main/java/org/apache/dolphinscheduler/plugin/task/flink/FlinkTask.java @@ -55,7 +55,7 @@ public class FlinkTask extends AbstractYarnTask { public void init() { flinkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), FlinkParameters.class); - logger.info("Initialize flink task params {}", JSONUtils.toPrettyJsonString(flinkParameters)); + log.info("Initialize flink task params {}", JSONUtils.toPrettyJsonString(flinkParameters)); if (flinkParameters == null || !flinkParameters.checkParameters()) { throw new RuntimeException("flink task params is not valid"); @@ -79,14 +79,14 @@ public class FlinkTask extends AbstractYarnTask { String command = ParameterUtils .convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams()); - logger.info("flink task command : {}", command); + log.info("flink task command : {}", command); return command; } @Override protected void setMainJarName() { if (flinkParameters.getProgramType() == ProgramType.SQL) { - logger.info("The current flink job type is SQL, will no need to set main jar"); + log.info("The current flink job type is SQL, will no need to set main jar"); return; } @@ -94,7 +94,7 @@ public class FlinkTask extends AbstractYarnTask { String resourceName = getResourceNameOfMainJar(mainJar); mainJar.setRes(resourceName); flinkParameters.setMainJar(mainJar); - logger.info("Success set flink jar: {}", resourceName); + log.info("Success set flink jar: {}", resourceName); } @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-hivecli/src/main/java/org/apache/dolphinscheduler/plugin/task/hivecli/HiveCliTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-hivecli/src/main/java/org/apache/dolphinscheduler/plugin/task/hivecli/HiveCliTask.java index 1a4c674853..efd12b682a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-hivecli/src/main/java/org/apache/dolphinscheduler/plugin/task/hivecli/HiveCliTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-hivecli/src/main/java/org/apache/dolphinscheduler/plugin/task/hivecli/HiveCliTask.java @@ -53,7 +53,7 @@ public class HiveCliTask extends AbstractRemoteTask { this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, - logger); + log); } @Override @@ -63,7 +63,7 @@ public class HiveCliTask extends AbstractRemoteTask { @Override public void init() { - logger.info("hiveCli task params {}", taskExecutionContext.getTaskParams()); + log.info("hiveCli task params {}", taskExecutionContext.getTaskParams()); hiveCliParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), HiveCliParameters.class); @@ -83,11 +83,11 @@ public class HiveCliTask extends AbstractRemoteTask { setVarPool(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current HiveCLI Task has been interrupted", e); + log.error("The current HiveCLI Task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current HiveCLI Task has been interrupted", e); } catch (Exception e) { - logger.error("hiveCli task failure", e); + log.error("hiveCli task failure", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("run hiveCli task error", e); } @@ -114,7 +114,7 @@ public class HiveCliTask extends AbstractRemoteTask { args.add(HiveCliConstants.HIVE_CLI_EXECUTE_FILE); final List resourceInfos = hiveCliParameters.getResourceList(); if (resourceInfos.size() > 1) { - logger.warn("more than 1 files detected, use the first one by default"); + log.warn("more than 1 files detected, use the first one by default"); } args.add(StringUtils.stripStart(resourceInfos.get(0).getResourceName(), "/")); @@ -132,7 +132,7 @@ public class HiveCliTask extends AbstractRemoteTask { final String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); - logger.info("hiveCli task command: {}", command); + log.info("hiveCli task command: {}", command); return command; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java index 735c4a4830..a866e09876 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-http/src/main/java/org/apache/dolphinscheduler/plugin/task/http/HttpTask.java @@ -81,7 +81,7 @@ public class HttpTask extends AbstractTask { @Override public void init() { this.httpParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), HttpParameters.class); - logger.info("Initialize http task params {}", JSONUtils.toPrettyJsonString(httpParameters)); + log.info("Initialize http task params {}", JSONUtils.toPrettyJsonString(httpParameters)); if (httpParameters == null || !httpParameters.checkParameters()) { throw new RuntimeException("http task params is not valid"); @@ -102,14 +102,14 @@ public class HttpTask extends AbstractTask { body = getResponseBody(response); exitStatusCode = validResponse(body, statusCode); long costTime = System.currentTimeMillis() - startTime; - logger.info( + log.info( "startTime: {}, httpUrl: {}, httpMethod: {}, costTime : {} milliseconds, statusCode : {}, body : {}, log : {}", formatTimeStamp, httpParameters.getUrl(), httpParameters.getHttpMethod(), costTime, statusCode, body, output); } catch (Exception e) { appendMessage(e.toString()); exitStatusCode = -1; - logger.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:" + output, e); + log.error("httpUrl[" + httpParameters.getUrl() + "] connection failed:" + output, e); throw new TaskException("Execute http task failed", e); } @@ -138,7 +138,7 @@ public class HttpTask extends AbstractTask { for (HttpProperty httpProperty : httpParameters.getHttpParams()) { String jsonObject = JSONUtils.toJsonString(httpProperty); String params = ParameterUtils.convertParameterPlaceholders(jsonObject, ParamUtils.convert(paramsMap)); - logger.info("http request params:{}", params); + log.info("http request params:{}", params); httpPropertyList.add(JSONUtils.parseObject(params, HttpProperty.class)); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-java/src/main/java/org/apache/dolphinscheduler/plugin/task/java/JavaTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-java/src/main/java/org/apache/dolphinscheduler/plugin/task/java/JavaTask.java index 8ac5cf8ae1..19083de50f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-java/src/main/java/org/apache/dolphinscheduler/plugin/task/java/JavaTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-java/src/main/java/org/apache/dolphinscheduler/plugin/task/java/JavaTask.java @@ -80,7 +80,7 @@ public class JavaTask extends AbstractTask { this.taskRequest = taskRequest; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskRequest, - logger); + log); } /** @@ -96,7 +96,7 @@ public class JavaTask extends AbstractTask { if (javaParameters.getRunType().equals(JavaConstants.RUN_TYPE_JAR)) { setMainJarName(); } - logger.info("Initialize java task params {}", JSONUtils.toPrettyJsonString(javaParameters)); + log.info("Initialize java task params {}", JSONUtils.toPrettyJsonString(javaParameters)); } /** @@ -110,7 +110,7 @@ public class JavaTask extends AbstractTask { try { rawJavaScript = convertJavaSourceCodePlaceholders(rawJavaScript); } catch (StringIndexOutOfBoundsException e) { - logger.error("setShareVar field format error, raw java script: {}", rawJavaScript); + log.error("setShareVar field format error, raw java script: {}", rawJavaScript); } return rawJavaScript; } @@ -143,21 +143,21 @@ public class JavaTask extends AbstractTask { } Preconditions.checkNotNull(command, "command not be null."); TaskResponse taskResponse = shellCommandExecutor.run(command); - logger.info("java task run result: {}", taskResponse); + log.info("java task run result: {}", taskResponse); setExitStatusCode(taskResponse.getExitStatusCode()); setAppIds(taskResponse.getAppIds()); setProcessId(taskResponse.getProcessId()); setVarPool(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { - logger.error("java task interrupted ", e); + log.error("java task interrupted ", e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); Thread.currentThread().interrupt(); } catch (RunTypeNotFoundException e) { - logger.error(e.getMessage()); + log.error(e.getMessage()); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw e; } catch (Exception e) { - logger.error("java task failed ", e); + log.error("java task failed ", e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw new TaskException("run java task error", e); } @@ -279,9 +279,9 @@ public class JavaTask extends AbstractTask { * @return String **/ protected void createJavaSourceFileIfNotExists(String sourceCode, String fileName) throws IOException { - logger.info("tenantCode: {}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath()); + log.info("tenantCode: {}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath()); if (!Files.exists(Paths.get(fileName))) { - logger.info("the java source code:{}, will be write to the file: {}", fileName, sourceCode); + log.info("the java source code:{}, will be write to the file: {}", fileName, sourceCode); // write data to file FileUtils.writeStringToFile(new File(fileName), sourceCode, @@ -360,7 +360,7 @@ public class JavaTask extends AbstractTask { if (MapUtils.isNotEmpty(taskRequest.getParamsMap())) { paramsMap.putAll(taskRequest.getParamsMap()); } - logger.info("The current java source code will begin to replace the placeholder: {}", rawJavaScript); + log.info("The current java source code will begin to replace the placeholder: {}", rawJavaScript); rawJavaScript = ParameterUtils.convertParameterPlaceholders(rawJavaScript, ParamUtils.convert(paramsMap)); return rawJavaScript; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-jupyter/src/main/java/org/apache/dolphinscheduler/plugin/task/jupyter/JupyterTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-jupyter/src/main/java/org/apache/dolphinscheduler/plugin/task/jupyter/JupyterTask.java index f3155a933a..61fc07094b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-jupyter/src/main/java/org/apache/dolphinscheduler/plugin/task/jupyter/JupyterTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-jupyter/src/main/java/org/apache/dolphinscheduler/plugin/task/jupyter/JupyterTask.java @@ -55,7 +55,7 @@ public class JupyterTask extends AbstractRemoteTask { this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, - logger); + log); } @Override @@ -67,10 +67,10 @@ public class JupyterTask extends AbstractRemoteTask { public void init() { jupyterParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), JupyterParameters.class); - logger.info("Initialize jupyter task params {}", JSONUtils.toPrettyJsonString(jupyterParameters)); + log.info("Initialize jupyter task params {}", JSONUtils.toPrettyJsonString(jupyterParameters)); if (null == jupyterParameters) { - logger.error("jupyter params is null"); + log.error("jupyter params is null"); return; } @@ -89,11 +89,11 @@ public class JupyterTask extends AbstractRemoteTask { setProcessId(response.getProcessId()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Jupyter task has been interrupted", e); + log.error("The current Jupyter task has been interrupted", e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw new TaskException("The current Jupyter task has been interrupted", e); } catch (Exception e) { - logger.error("jupyter task execution failure", e); + log.error("jupyter task execution failure", e); exitStatusCode = -1; throw new TaskException("Execute jupyter task failed", e); } @@ -153,7 +153,7 @@ public class JupyterTask extends AbstractRemoteTask { String command = ParameterUtils .convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); - logger.info("jupyter task command: {}", command); + log.info("jupyter task command: {}", command); return command; } @@ -177,7 +177,7 @@ public class JupyterTask extends AbstractRemoteTask { } } catch (IOException e) { - logger.error("fail to parse jupyter parameterization", e); + log.error("fail to parse jupyter parameterization", e); throw e; } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-k8s/src/main/java/org/apache/dolphinscheduler/plugin/task/k8s/K8sTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-k8s/src/main/java/org/apache/dolphinscheduler/plugin/task/k8s/K8sTask.java index cd756c76b3..6a180d7d05 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-k8s/src/main/java/org/apache/dolphinscheduler/plugin/task/k8s/K8sTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-k8s/src/main/java/org/apache/dolphinscheduler/plugin/task/k8s/K8sTask.java @@ -53,7 +53,7 @@ public class K8sTask extends AbstractK8sTask { super(taskRequest); this.taskExecutionContext = taskRequest; this.k8sTaskParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), K8sTaskParameters.class); - logger.info("Initialize k8s task parameters {}", JSONUtils.toPrettyJsonString(k8sTaskParameters)); + log.info("Initialize k8s task parameters {}", JSONUtils.toPrettyJsonString(k8sTaskParameters)); if (k8sTaskParameters == null || !k8sTaskParameters.checkParameters()) { throw new TaskException("K8S task params is not valid"); } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowHelper.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowHelper.java index 5dd111369d..302f23700b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowHelper.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowHelper.java @@ -38,7 +38,7 @@ import com.google.common.collect.Sets; public class KubeflowHelper { - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); private final String clusterConfigPath; @@ -83,7 +83,7 @@ public class KubeflowHelper { for (int x = messageIndex; x < conditions.size(); x = x + 1) { JsonNode condition = conditions.get(x); String stepMessage = condition.toString(); - logger.info(stepMessage); + log.info(stepMessage); } messageIndex = conditions.size(); JsonNode lastCondition = conditions.get(conditions.size() - 1); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowTask.java index 5355ae1d24..beea17b0bc 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-kubeflow/src/main/java/org/apache/dolphinscheduler/plugin/kubeflow/KubeflowTask.java @@ -54,7 +54,7 @@ public class KubeflowTask extends AbstractRemoteTask { @Override public void init() throws TaskException { kubeflowParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), KubeflowParameters.class); - logger.info("Initialize Kubeflow task params {}", taskExecutionContext.getTaskParams()); + log.info("Initialize Kubeflow task params {}", taskExecutionContext.getTaskParams()); kubeflowParameters.setClusterYAML(taskExecutionContext.getK8sTaskExecutionContext().getConfigYaml()); if (!kubeflowParameters.checkParameters()) { @@ -68,9 +68,9 @@ public class KubeflowTask extends AbstractRemoteTask { @Override public void submitApplication() throws TaskException { String command = kubeflowHelper.buildSubmitCommand(yamlPath.toString()); - logger.info("Kubeflow task submit command: \n{}", command); + log.info("Kubeflow task submit command: \n{}", command); String message = runCommand(command); - logger.info("Kubeflow task submit result: \n{}", message); + log.info("Kubeflow task submit result: \n{}", message); KubeflowHelper.ApplicationIds applicationIds = new KubeflowHelper.ApplicationIds(); applicationIds.setAlreadySubmitted(true); @@ -85,18 +85,18 @@ public class KubeflowTask extends AbstractRemoteTask { @Override public void trackApplicationStatus() throws TaskException { String command = kubeflowHelper.buildGetCommand(yamlPath.toString()); - logger.info("Kubeflow task get command: \n{}", command); + log.info("Kubeflow task get command: \n{}", command); do { ThreadUtils.sleep(KubeflowHelper.CONSTANTS.TRACK_INTERVAL); String message = runCommand(command); String phase = kubeflowHelper.parseGetMessage(message); if (KubeflowHelper.STATUS.FAILED_SET.contains(phase)) { exitStatusCode = TaskConstants.EXIT_CODE_FAILURE; - logger.info("Kubeflow task get Failed result: \n{}", message); + log.info("Kubeflow task get Failed result: \n{}", message); break; } else if (KubeflowHelper.STATUS.SUCCESS_SET.contains(phase)) { exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS; - logger.info("Kubeflow task get Succeeded result: \n{}", message); + log.info("Kubeflow task get Succeeded result: \n{}", message); break; } } while (true); @@ -106,9 +106,9 @@ public class KubeflowTask extends AbstractRemoteTask { @Override public void cancelApplication() throws TaskException { String command = kubeflowHelper.buildDeleteCommand(yamlPath.toString()); - logger.info("Kubeflow task delete command: \n{}", command); + log.info("Kubeflow task delete command: \n{}", command); String message = runCommand(command); - logger.info("Kubeflow task delete result: \n{}", message); + log.info("Kubeflow task delete result: \n{}", message); exitStatusCode = TaskConstants.EXIT_CODE_KILL; } @@ -138,7 +138,7 @@ public class KubeflowTask extends AbstractRemoteTask { clusterYAMLPath = Paths.get(taskExecutionContext.getExecutePath(), KubeflowHelper.CONSTANTS.CLUSTER_CONFIG_PATH); - logger.info("Kubeflow task yaml content: \n{}", yamlContent); + log.info("Kubeflow task yaml content: \n{}", yamlContent); try { Files.write(yamlPath, yamlContent.getBytes(), StandardOpenOption.CREATE); Files.write(clusterYAMLPath, clusterYAML.getBytes(), StandardOpenOption.CREATE); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-linkis/src/main/java/org/apache/dolphinscheduler/plugin/task/linkis/LinkisTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-linkis/src/main/java/org/apache/dolphinscheduler/plugin/task/linkis/LinkisTask.java index 9e4a2944f3..d6a1b8243d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-linkis/src/main/java/org/apache/dolphinscheduler/plugin/task/linkis/LinkisTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-linkis/src/main/java/org/apache/dolphinscheduler/plugin/task/linkis/LinkisTask.java @@ -79,7 +79,7 @@ public class LinkisTask extends AbstractRemoteTask { this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, - logger); + log); } @Override @@ -90,7 +90,7 @@ public class LinkisTask extends AbstractRemoteTask { @Override public void init() { linkisParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), LinkisParameters.class); - logger.info("Initialize Linkis task params {}", JSONUtils.toPrettyJsonString(linkisParameters)); + log.info("Initialize Linkis task params {}", JSONUtils.toPrettyJsonString(linkisParameters)); if (!linkisParameters.checkParameters()) { throw new RuntimeException("Linkis task params is not valid"); @@ -109,11 +109,11 @@ public class LinkisTask extends AbstractRemoteTask { linkisParameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Linkis task has been interrupted", e); + log.error("The current Linkis task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current Linkis task has been interrupted", e); } catch (Exception e) { - logger.error("Linkis task error", e); + log.error("Linkis task error", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute Linkis task failed", e); } @@ -144,7 +144,7 @@ public class LinkisTask extends AbstractRemoteTask { } } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Linkis task has been interrupted", e); + log.error("The current Linkis task has been interrupted", e); throw new TaskException("The current Linkis task has been interrupted", e); } catch (Exception e) { throw new TaskException("track linkis status error", e); @@ -165,7 +165,7 @@ public class LinkisTask extends AbstractRemoteTask { setExitStatusCode(EXIT_CODE_KILL); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Linkis task has been interrupted", e); + log.error("The current Linkis task has been interrupted", e); throw new TaskException("The current Linkis task has been interrupted", e); } catch (Exception e) { throw new TaskException("cancel linkis task error", e); @@ -178,7 +178,7 @@ public class LinkisTask extends AbstractRemoteTask { args.addAll(buildOptions()); String command = String.join(Constants.SPACE, args); - logger.info("Linkis task command: {}", command); + log.info("Linkis task command: {}", command); return command; } @@ -196,14 +196,14 @@ public class LinkisTask extends AbstractRemoteTask { } private String buildCustomConfigContent() { - logger.info("raw custom config content : {}", linkisParameters.getRawScript()); + log.info("raw custom config content : {}", linkisParameters.getRawScript()); String script = linkisParameters.getRawScript().replaceAll("\\r\\n", "\n"); script = parseScript(script); return script; } private String buildParamConfigContent() { - logger.info("raw param config content : {}", linkisParameters.getParamScript()); + log.info("raw param config content : {}", linkisParameters.getParamScript()); String script = ""; List paramList = linkisParameters.getParamScript(); for (LinkisParameters.Param param : paramList) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-mlflow/src/main/java/org/apache/dolphinscheduler/plugin/task/mlflow/MlflowTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-mlflow/src/main/java/org/apache/dolphinscheduler/plugin/task/mlflow/MlflowTask.java index 3800cd846c..7b252c8fd5 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-mlflow/src/main/java/org/apache/dolphinscheduler/plugin/task/mlflow/MlflowTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-mlflow/src/main/java/org/apache/dolphinscheduler/plugin/task/mlflow/MlflowTask.java @@ -69,7 +69,7 @@ public class MlflowTask extends AbstractTask { super(taskExecutionContext); this.taskExecutionContext = taskExecutionContext; - this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, logger); + this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, log); } static public String getPresetRepository() { @@ -105,7 +105,7 @@ public class MlflowTask extends AbstractTask { mlflowParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), MlflowParameters.class); - logger.info("Initialize MLFlow task params {}", JSONUtils.toPrettyJsonString(mlflowParameters)); + log.info("Initialize MLFlow task params {}", JSONUtils.toPrettyJsonString(mlflowParameters)); if (mlflowParameters == null || !mlflowParameters.checkParameters()) { throw new RuntimeException("MLFlow task params is not valid"); } @@ -128,11 +128,11 @@ public class MlflowTask extends AbstractTask { mlflowParameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Mlflow task has been interrupted", e); + log.error("The current Mlflow task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current Mlflow task has been interrupted", e); } catch (Exception e) { - logger.error("Mlflow task error", e); + log.error("Mlflow task error", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute Mlflow task failed", e); } @@ -155,7 +155,7 @@ public class MlflowTask extends AbstractTask { } else if (mlflowParameters.getMlflowTaskType().equals(MlflowConstants.MLFLOW_TASK_TYPE_MODELS)) { command = buildCommandForMlflowModels(); } - logger.info("mlflow task command: \n{}", command); + log.info("mlflow task command: \n{}", command); return command; } @@ -259,7 +259,7 @@ public class MlflowTask extends AbstractTask { } public int checkDockerHealth() { - logger.info("checking container healthy ... "); + log.info("checking container healthy ... "); int exitCode = -1; String[] command = {"sh", "-c", String.format(MlflowConstants.DOCKER_HEALTH_CHECK, mlflowParameters.getContainerName())}; @@ -270,20 +270,20 @@ public class MlflowTask extends AbstractTask { } catch (Exception e) { status = String.format("error --- %s", e.getMessage()); } - logger.info("container healthy status: {}", status); + log.info("container healthy status: {}", status); if (status.equals("healthy")) { exitCode = 0; - logger.info("container is healthy"); + log.info("container is healthy"); return exitCode; } else { - logger.info("The health check has been running for {} seconds", + log.info("The health check has been running for {} seconds", x * MlflowConstants.DOCKER_HEALTH_CHECK_INTERVAL / 1000); ThreadUtils.sleep(MlflowConstants.DOCKER_HEALTH_CHECK_INTERVAL); } } - logger.info("health check fail"); + log.info("health check fail"); return exitCode; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTask.java index 563d55346d..8d9befd2d0 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-mr/src/main/java/org/apache/dolphinscheduler/plugin/task/mr/MapReduceTask.java @@ -87,7 +87,7 @@ public class MapReduceTask extends AbstractYarnTask { ParamUtils.convert(paramsMap)); mapreduceParameters.setOthers(others); } - logger.info("Initialize mapreduce task params {}", JSONUtils.toPrettyJsonString(mapreduceParameters)); + log.info("Initialize mapreduce task params {}", JSONUtils.toPrettyJsonString(mapreduceParameters)); } /** @@ -105,7 +105,7 @@ public class MapReduceTask extends AbstractYarnTask { String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams()); - logger.info("mapreduce task command: {}", command); + log.info("mapreduce task command: {}", command); return command; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-openmldb/src/main/java/org/apache/dolphinscheduler/plugin/task/openmldb/OpenmldbTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-openmldb/src/main/java/org/apache/dolphinscheduler/plugin/task/openmldb/OpenmldbTask.java index a3559cd93f..75e6b3bce3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-openmldb/src/main/java/org/apache/dolphinscheduler/plugin/task/openmldb/OpenmldbTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-openmldb/src/main/java/org/apache/dolphinscheduler/plugin/task/openmldb/OpenmldbTask.java @@ -64,7 +64,7 @@ public class OpenmldbTask extends PythonTask { public void init() { pythonParameters = JSONUtils.parseObject(taskRequest.getTaskParams(), OpenmldbParameters.class); - logger.info("Initialize openmldb task params {}", JSONUtils.toPrettyJsonString(pythonParameters)); + log.info("Initialize openmldb task params {}", JSONUtils.toPrettyJsonString(pythonParameters)); if (pythonParameters == null || !pythonParameters.checkParameters()) { throw new TaskException("openmldb task params is not valid"); } @@ -94,7 +94,7 @@ public class OpenmldbTask extends PythonTask { @Override protected String buildPythonScriptContent() { OpenmldbParameters openmldbParameters = (OpenmldbParameters) pythonParameters; - logger.info("raw sql script : {}", openmldbParameters.getSql()); + log.info("raw sql script : {}", openmldbParameters.getSql()); String rawSQLScript = openmldbParameters.getSql().replaceAll("[\\r]?\\n", "\n"); Map paramsMap = mergeParamsWithContext(openmldbParameters); @@ -102,7 +102,7 @@ public class OpenmldbTask extends PythonTask { // convert sql to python script String pythonScript = buildPythonScriptsFromSql(rawSQLScript); - logger.info("rendered python script : {}", pythonScript); + log.info("rendered python script : {}", pythonScript); return pythonScript; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParameters.java index 7d5abe293d..f2b9aaf3eb 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParameters.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonParameters.java @@ -25,15 +25,14 @@ import org.apache.commons.lang3.StringUtils; import java.util.Collections; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * TIS parameter */ +@Slf4j public class PigeonParameters extends AbstractParameters { - private static final Logger logger = LoggerFactory.getLogger(PigeonParameters.class); /** * Pigeon target job name */ @@ -50,7 +49,7 @@ public class PigeonParameters extends AbstractParameters { @Override public boolean checkParameters() { if (StringUtils.isBlank(this.targetJobName)) { - logger.error("checkParameters faild targetJobName can not be null"); + log.error("checkParameters faild targetJobName can not be null"); return false; } return true; diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java index a2ff351b88..8bab12133e 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTask.java @@ -76,7 +76,7 @@ public class PigeonTask extends AbstractRemoteTask { public void init() throws TaskException { super.init(); parameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), PigeonParameters.class); - logger.info("Initialize PIGEON task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Initialize PIGEON task params {}", JSONUtils.toPrettyJsonString(parameters)); if (parameters == null || !parameters.checkParameters()) { throw new TaskException("datax task params is not valid"); } @@ -86,7 +86,7 @@ public class PigeonTask extends AbstractRemoteTask { @Override public void handle(TaskCallBack taskCallBack) throws TaskException { // Trigger PIGEON DataX pipeline - logger.info("start execute PIGEON task"); + log.info("start execute PIGEON task"); long startTime = System.currentTimeMillis(); String targetJobName = this.parameters.getTargetJobName(); String host = getHost(); @@ -146,18 +146,18 @@ public class PigeonTask extends AbstractRemoteTask { try { webSocket.close(); } catch (Throwable e) { - logger.warn(e.getMessage(), e); + log.warn(e.getMessage(), e); } } } long costTime = System.currentTimeMillis() - startTime; - logger.info("PIGEON task: {},taskId:{} costTime : {} milliseconds, statusCode : {}", + log.info("PIGEON task: {},taskId:{} costTime : {} milliseconds, statusCode : {}", targetJobName, taskId, costTime, (execState == ExecResult.SUCCESS) ? "'success'" : "'failure'"); setExitStatusCode((execState == ExecResult.SUCCESS) ? TaskConstants.EXIT_CODE_SUCCESS : TaskConstants.EXIT_CODE_FAILURE); } catch (Exception e) { - logger.error("execute PIGEON dataX faild,PIGEON task name:" + targetJobName, e); + log.error("execute PIGEON dataX faild,PIGEON task name:" + targetJobName, e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); @@ -182,9 +182,9 @@ public class PigeonTask extends AbstractRemoteTask { @Override public void cancelApplication() throws TaskException { - logger.info("start to cancelApplication"); + log.info("start to cancelApplication"); Objects.requireNonNull(triggerResult, "triggerResult can not be null"); - logger.info("start to cancelApplication taskId:{}", triggerResult.getTaskId()); + log.info("start to cancelApplication taskId:{}", triggerResult.getTaskId()); final String triggerUrl = getTriggerUrl(); StringEntity entity = @@ -229,28 +229,28 @@ public class PigeonTask extends AbstractRemoteTask { private WebSocketClient receiveRealtimeLog(final String tisHost, String dataXName, int taskId) throws Exception { final String applyURI = config.getJobLogsFetchUrl(tisHost, dataXName, taskId); - logger.info("apply ws connection,uri:{}", applyURI); + log.info("apply ws connection,uri:{}", applyURI); WebSocketClient webSocketClient = new WebSocketClient(new URI(applyURI)) { @Override public void onOpen(ServerHandshake handshakedata) { - logger.info("start to receive remote execute log"); + log.info("start to receive remote execute log"); } @Override public void onMessage(String message) { ExecLog execLog = JSONUtils.parseObject(message, ExecLog.class); - logger.info(execLog.getMsg()); + log.info(execLog.getMsg()); } @Override public void onClose(int code, String reason, boolean remote) { - logger.info("stop to receive remote log,reason:{},taskId:{}", reason, taskId); + log.info("stop to receive remote log,reason:{},taskId:{}", reason, taskId); } @Override public void onError(Exception t) { - logger.error(t.getMessage(), t); + log.error(t.getMessage(), t); } }; webSocketClient.connect(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannel.java index 20ca18fcc0..9a1c20fcd1 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannel.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pigeon/src/main/java/org/apache/dolphinscheduler/plugin/task/pigeon/PigeonTaskChannel.java @@ -25,16 +25,14 @@ import org.apache.dolphinscheduler.plugin.task.api.parameters.AbstractParameters import org.apache.dolphinscheduler.plugin.task.api.parameters.ParametersNode; import org.apache.dolphinscheduler.plugin.task.api.parameters.resource.ResourceParametersHelper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class PigeonTaskChannel implements TaskChannel { - private static final Logger logger = LoggerFactory.getLogger(PigeonTaskChannel.class); - @Override public void cancelApplication(boolean status) { - logger.info("pigeon task cancel"); + log.info("pigeon task cancel"); } @Override diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java index 688374ba62..c0fecc2513 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-procedure/src/main/java/org/apache/dolphinscheduler/plugin/task/procedure/ProcedureTask.java @@ -76,7 +76,7 @@ public class ProcedureTask extends AbstractTask { this.procedureParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ProcedureParameters.class); - logger.info("Initialize procedure task params {}", JSONUtils.toPrettyJsonString(procedureParameters)); + log.info("Initialize procedure task params {}", JSONUtils.toPrettyJsonString(procedureParameters)); // check parameters if (procedureParameters == null || !procedureParameters.checkParameters()) { @@ -89,7 +89,7 @@ public class ProcedureTask extends AbstractTask { @Override public void handle(TaskCallBack taskCallBack) throws TaskException { - logger.info("procedure type : {}, datasource : {}, method : {} , localParams : {}", + log.info("procedure type : {}, datasource : {}, method : {} , localParams : {}", procedureParameters.getType(), procedureParameters.getDatasource(), procedureParameters.getMethod(), @@ -126,7 +126,7 @@ public class ProcedureTask extends AbstractTask { } } catch (Exception e) { setExitStatusCode(EXIT_CODE_FAILURE); - logger.error("procedure task error", e); + log.error("procedure task error", e); throw new TaskException("Execute procedure task failed", e); } } @@ -222,39 +222,39 @@ public class ProcedureTask extends AbstractTask { Object value = null; switch (dataType) { case VARCHAR: - logger.info("out prameter varchar key : {} , value : {}", prop, stmt.getString(index)); + log.info("out prameter varchar key : {} , value : {}", prop, stmt.getString(index)); value = stmt.getString(index); break; case INTEGER: - logger.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index)); + log.info("out prameter integer key : {} , value : {}", prop, stmt.getInt(index)); value = stmt.getInt(index); break; case LONG: - logger.info("out prameter long key : {} , value : {}", prop, stmt.getLong(index)); + log.info("out prameter long key : {} , value : {}", prop, stmt.getLong(index)); value = stmt.getLong(index); break; case FLOAT: - logger.info("out prameter float key : {} , value : {}", prop, stmt.getFloat(index)); + log.info("out prameter float key : {} , value : {}", prop, stmt.getFloat(index)); value = stmt.getFloat(index); break; case DOUBLE: - logger.info("out prameter double key : {} , value : {}", prop, stmt.getDouble(index)); + log.info("out prameter double key : {} , value : {}", prop, stmt.getDouble(index)); value = stmt.getDouble(index); break; case DATE: - logger.info("out prameter date key : {} , value : {}", prop, stmt.getDate(index)); + log.info("out prameter date key : {} , value : {}", prop, stmt.getDate(index)); value = stmt.getDate(index); break; case TIME: - logger.info("out prameter time key : {} , value : {}", prop, stmt.getTime(index)); + log.info("out prameter time key : {} , value : {}", prop, stmt.getTime(index)); value = stmt.getTime(index); break; case TIMESTAMP: - logger.info("out prameter timestamp key : {} , value : {}", prop, stmt.getTimestamp(index)); + log.info("out prameter timestamp key : {} , value : {}", prop, stmt.getTimestamp(index)); value = stmt.getTimestamp(index); break; case BOOLEAN: - logger.info("out prameter boolean key : {} , value : {}", prop, stmt.getBoolean(index)); + log.info("out prameter boolean key : {} , value : {}", prop, stmt.getBoolean(index)); value = stmt.getBoolean(index); break; default: diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java index 13e9277352..c3b33d064d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-python/src/main/java/org/apache/dolphinscheduler/plugin/task/python/PythonTask.java @@ -73,7 +73,7 @@ public class PythonTask extends AbstractTask { this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskRequest, - logger); + log); } @Override @@ -81,7 +81,7 @@ public class PythonTask extends AbstractTask { pythonParameters = JSONUtils.parseObject(taskRequest.getTaskParams(), PythonParameters.class); - logger.info("Initialize python task params {}", JSONUtils.toPrettyJsonString(pythonParameters)); + log.info("Initialize python task params {}", JSONUtils.toPrettyJsonString(pythonParameters)); if (pythonParameters == null || !pythonParameters.checkParameters()) { throw new TaskException("python task params is not valid"); } @@ -93,7 +93,7 @@ public class PythonTask extends AbstractTask { try { rawPythonScript = convertPythonScriptPlaceholders(rawPythonScript); } catch (StringIndexOutOfBoundsException e) { - logger.error("setShareVar field format error, raw python script : {}", rawPythonScript); + log.error("setShareVar field format error, raw python script : {}", rawPythonScript); } return rawPythonScript; } @@ -116,7 +116,7 @@ public class PythonTask extends AbstractTask { setVarPool(shellCommandExecutor.getVarPool()); pythonParameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (Exception e) { - logger.error("python task failure", e); + log.error("python task failure", e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw new TaskException("run python task error", e); } @@ -177,17 +177,17 @@ public class PythonTask extends AbstractTask { * @throws IOException io exception */ protected void createPythonCommandFileIfNotExists(String pythonScript, String pythonScriptFile) throws IOException { - logger.info("tenantCode :{}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath()); + log.info("tenantCode :{}, task dir:{}", taskRequest.getTenantCode(), taskRequest.getExecutePath()); if (!Files.exists(Paths.get(pythonScriptFile))) { - logger.info("generate python script file:{}", pythonScriptFile); + log.info("generate python script file:{}", pythonScriptFile); StringBuilder sb = new StringBuilder(); sb.append("#-*- encoding=utf8 -*-").append(System.lineSeparator()); sb.append(System.lineSeparator()); sb.append(pythonScript); - logger.info(sb.toString()); + log.info(sb.toString()); // write data to file FileUtils.writeStringToFile(new File(pythonScriptFile), @@ -212,7 +212,7 @@ public class PythonTask extends AbstractTask { * @throws Exception exception */ protected String buildPythonScriptContent() throws Exception { - logger.info("raw python script : {}", pythonParameters.getRawScript()); + log.info("raw python script : {}", pythonParameters.getRawScript()); String rawPythonScript = pythonParameters.getRawScript().replaceAll("\\r\\n", System.lineSeparator()); Map paramsMap = mergeParamsWithContext(pythonParameters); return ParameterUtils.convertParameterPlaceholders(rawPythonScript, ParamUtils.convert(paramsMap)); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/GitProjectManager.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/GitProjectManager.java index bd11bb3546..5e4321cb4f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/GitProjectManager.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/GitProjectManager.java @@ -36,7 +36,7 @@ public class GitProjectManager { public static final String GIT_PATH_LOCAL = "GIT_PROJECT"; private static final Pattern GIT_CHECK_PATTERN = Pattern.compile("^(git@|https?://)"); - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); private String path; private String baseDir = "."; @@ -48,7 +48,7 @@ public class GitProjectManager { public void prepareProject() throws Exception { String savePath = Paths.get(baseDir, GIT_PATH_LOCAL).toString(); - logger.info("clone project {} to {}", path, savePath); + log.info("clone project {} to {}", path, savePath); String[] command = {"sh", "-c", String.format("git clone %s %s", getGitUrl(), savePath)}; try { OSUtils.exeShell(command); @@ -57,7 +57,7 @@ public class GitProjectManager { throw e; } } - logger.info("clone project done"); + log.info("clone project done"); } public String getGitUrl() { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/PytorchTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/PytorchTask.java index 31d63e27f3..d48ae11473 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/PytorchTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-pytorch/src/main/java/org/apache/dolphinscheduler/plugin/task/pytorch/PytorchTask.java @@ -47,14 +47,14 @@ public class PytorchTask extends AbstractTask { this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, - logger); + log); } @Override public void init() { pytorchParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), PytorchParameters.class); - logger.info("Initialize pytorch task params {}", JSONUtils.toPrettyJsonString(taskExecutionContext)); + log.info("Initialize pytorch task params {}", JSONUtils.toPrettyJsonString(taskExecutionContext)); if (pytorchParameters == null || !pytorchParameters.checkParameters()) { throw new TaskException("python task params is not valid"); @@ -75,7 +75,7 @@ public class PytorchTask extends AbstractTask { setVarPool(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Pytorch task has been interrupted", e); + log.error("The current Pytorch task has been interrupted", e); setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); throw new TaskException("The current Pytorch task has been interrupted", e); } catch (Exception e) { diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/PipelineUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/PipelineUtils.java index 10722f3f29..3ffc14d58c 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/PipelineUtils.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/PipelineUtils.java @@ -43,7 +43,7 @@ import com.amazonaws.services.sagemaker.model.StopPipelineExecutionResult; public class PipelineUtils { - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, getClass())); private static final String EXECUTING = "Executing"; private static final String SUCCEEDED = "Succeeded"; @@ -52,7 +52,7 @@ public class PipelineUtils { StartPipelineExecutionResult result = client.startPipelineExecution(request); String pipelineExecutionArn = result.getPipelineExecutionArn(); String clientRequestToken = request.getClientRequestToken(); - logger.info("Start success, pipeline: {}, token: {}", pipelineExecutionArn, clientRequestToken); + log.info("Start success, pipeline: {}, token: {}", pipelineExecutionArn, clientRequestToken); return new PipelineId(pipelineExecutionArn, clientRequestToken); } @@ -63,13 +63,13 @@ public class PipelineUtils { request.setClientRequestToken(pipelineId.getClientRequestToken()); StopPipelineExecutionResult result = client.stopPipelineExecution(request); - logger.info("Stop pipeline: {} success", result.getPipelineExecutionArn()); + log.info("Stop pipeline: {} success", result.getPipelineExecutionArn()); } public int checkPipelineExecutionStatus(AmazonSageMaker client, PipelineId pipelineId) { String pipelineStatus = describePipelineExecution(client, pipelineId); while (EXECUTING.equals(pipelineStatus)) { - logger.info("check Pipeline Steps running"); + log.info("check Pipeline Steps running"); listPipelineExecutionSteps(client, pipelineId); ThreadUtils.sleep(SagemakerConstants.CHECK_PIPELINE_EXECUTION_STATUS_INTERVAL); pipelineStatus = describePipelineExecution(client, pipelineId); @@ -79,7 +79,7 @@ public class PipelineUtils { if (SUCCEEDED.equals(pipelineStatus)) { exitStatusCode = TaskConstants.EXIT_CODE_SUCCESS; } - logger.info("PipelineExecutionStatus : {}, exitStatusCode: {}", pipelineStatus, exitStatusCode); + log.info("PipelineExecutionStatus : {}, exitStatusCode: {}", pipelineStatus, exitStatusCode); return exitStatusCode; } @@ -87,7 +87,7 @@ public class PipelineUtils { DescribePipelineExecutionRequest request = new DescribePipelineExecutionRequest(); request.setPipelineExecutionArn(pipelineId.getPipelineExecutionArn()); DescribePipelineExecutionResult result = client.describePipelineExecution(request); - logger.info("PipelineExecutionStatus: {}", result.getPipelineExecutionStatus()); + log.info("PipelineExecutionStatus: {}", result.getPipelineExecutionStatus()); return result.getPipelineExecutionStatus(); } @@ -98,10 +98,10 @@ public class PipelineUtils { ListPipelineExecutionStepsResult result = client.listPipelineExecutionSteps(request); List steps = result.getPipelineExecutionSteps(); Collections.reverse(steps); - logger.info("pipelineStepsStatus: "); + log.info("pipelineStepsStatus: "); for (PipelineExecutionStep step : steps) { String stepMessage = step.toString(); - logger.info(stepMessage); + log.info(stepMessage); } } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTask.java index e49d1acff3..76109082f8 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sagemaker/src/main/java/org/apache/dolphinscheduler/plugin/task/sagemaker/SagemakerTask.java @@ -85,7 +85,7 @@ public class SagemakerTask extends AbstractRemoteTask { parameters = JSONUtils.parseObject(taskRequest.getTaskParams(), SagemakerParameters.class); - logger.info("Initialize Sagemaker task params {}", JSONUtils.toPrettyJsonString(parameters)); + log.info("Initialize Sagemaker task params {}", JSONUtils.toPrettyJsonString(parameters)); if (parameters == null) { throw new SagemakerTaskException("Sagemaker task params is empty"); } @@ -152,11 +152,11 @@ public class SagemakerTask extends AbstractRemoteTask { try { startPipelineRequest = objectMapper.readValue(requestJson, StartPipelineExecutionRequest.class); } catch (Exception e) { - logger.error("can not parse SagemakerRequestJson from json: {}", requestJson); + log.error("can not parse SagemakerRequestJson from json: {}", requestJson); throw new SagemakerTaskException("can not parse SagemakerRequestJson ", e); } - logger.info("Sagemaker task create StartPipelineRequest: {}", startPipelineRequest); + log.info("Sagemaker task create StartPipelineRequest: {}", startPipelineRequest); return startPipelineRequest; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-seatunnel/src/main/java/org/apache/dolphinscheduler/plugin/task/seatunnel/SeatunnelTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-seatunnel/src/main/java/org/apache/dolphinscheduler/plugin/task/seatunnel/SeatunnelTask.java index 35d9a42d1b..5b9e1ca2fc 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-seatunnel/src/main/java/org/apache/dolphinscheduler/plugin/task/seatunnel/SeatunnelTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-seatunnel/src/main/java/org/apache/dolphinscheduler/plugin/task/seatunnel/SeatunnelTask.java @@ -77,7 +77,7 @@ public class SeatunnelTask extends AbstractRemoteTask { this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, - logger); + log); } @Override @@ -87,7 +87,7 @@ public class SeatunnelTask extends AbstractRemoteTask { @Override public void init() { - logger.info("Intialize SeaTunnel task params {}", JSONUtils.toPrettyJsonString(seatunnelParameters)); + log.info("Intialize SeaTunnel task params {}", JSONUtils.toPrettyJsonString(seatunnelParameters)); if (seatunnelParameters == null || !seatunnelParameters.checkParameters()) { throw new TaskException("SeaTunnel task params is not valid"); } @@ -106,11 +106,11 @@ public class SeatunnelTask extends AbstractRemoteTask { seatunnelParameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current SeaTunnel task has been interrupted", e); + log.error("The current SeaTunnel task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current SeaTunnel task has been interrupted", e); } catch (Exception e) { - logger.error("SeaTunnel task error", e); + log.error("SeaTunnel task error", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute Seatunnel task failed", e); } @@ -143,7 +143,7 @@ public class SeatunnelTask extends AbstractRemoteTask { args.addAll(buildOptions()); String command = String.join(" ", args); - logger.info("SeaTunnel task command: {}", command); + log.info("SeaTunnel task command: {}", command); return command; } @@ -172,7 +172,7 @@ public class SeatunnelTask extends AbstractRemoteTask { } private String buildCustomConfigContent() { - logger.info("raw custom config content : {}", seatunnelParameters.getRawScript()); + log.info("raw custom config content : {}", seatunnelParameters.getRawScript()); String script = seatunnelParameters.getRawScript().replaceAll("\\r\\n", System.lineSeparator()); script = parseScript(script); return script; @@ -184,11 +184,11 @@ public class SeatunnelTask extends AbstractRemoteTask { } private void createConfigFileIfNotExists(String script, String scriptFile) throws IOException { - logger.info("tenantCode :{}, task dir:{}", taskExecutionContext.getTenantCode(), + log.info("tenantCode :{}, task dir:{}", taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); if (!Files.exists(Paths.get(scriptFile))) { - logger.info("generate script file:{}", scriptFile); + log.info("generate script file:{}", scriptFile); // write data to file FileUtils.writeStringToFile(new File(scriptFile), script, StandardCharsets.UTF_8); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java index b1b854497a..b5dd987547 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-shell/src/main/java/org/apache/dolphinscheduler/plugin/task/shell/ShellTask.java @@ -71,14 +71,14 @@ public class ShellTask extends AbstractTask { this.taskExecutionContext = taskExecutionContext; this.shellCommandExecutor = new ShellCommandExecutor(this::logHandle, taskExecutionContext, - logger); + log); } @Override public void init() { shellParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), ShellParameters.class); - logger.info("Initialize shell task params {}", JSONUtils.toPrettyJsonString(shellParameters)); + log.info("Initialize shell task params {}", JSONUtils.toPrettyJsonString(shellParameters)); if (shellParameters == null || !shellParameters.checkParameters()) { throw new TaskException("shell task params is not valid"); @@ -96,11 +96,11 @@ public class ShellTask extends AbstractTask { shellParameters.dealOutParam(shellCommandExecutor.getVarPool()); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - logger.error("The current Shell task has been interrupted", e); + log.error("The current Shell task has been interrupted", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("The current Shell task has been interrupted", e); } catch (Exception e) { - logger.error("shell task error", e); + log.error("shell task error", e); setExitStatusCode(EXIT_CODE_FAILURE); throw new TaskException("Execute shell task error", e); } @@ -133,7 +133,7 @@ public class ShellTask extends AbstractTask { if (Files.exists(path)) { // this shouldn't happen - logger.warn("The command file: {} is already exist", path); + log.warn("The command file: {} is already exist", path); return fileName; } @@ -141,8 +141,8 @@ public class ShellTask extends AbstractTask { script = parseScript(script); shellParameters.setRawScript(script); - logger.info("raw script : {}", shellParameters.getRawScript()); - logger.info("task execute path : {}", taskExecutionContext.getExecutePath()); + log.info("raw script : {}", shellParameters.getRawScript()); + log.info("task execute path : {}", taskExecutionContext.getExecutePath()); FileUtils.createFileWith755(path); Files.write(path, shellParameters.getRawScript().getBytes(), StandardOpenOption.APPEND); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTask.java index 6796357d8a..80d5be232f 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-spark/src/main/java/org/apache/dolphinscheduler/plugin/task/spark/SparkTask.java @@ -68,7 +68,7 @@ public class SparkTask extends AbstractYarnTask { sparkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SparkParameters.class); if (null == sparkParameters) { - logger.error("Spark params is null"); + log.error("Spark params is null"); return; } @@ -80,7 +80,7 @@ public class SparkTask extends AbstractYarnTask { if (sparkParameters.getProgramType() != ProgramType.SQL) { setMainJarName(); } - logger.info("Initialize spark task params {}", JSONUtils.toPrettyJsonString(sparkParameters)); + log.info("Initialize spark task params {}", JSONUtils.toPrettyJsonString(sparkParameters)); } /** @@ -116,7 +116,7 @@ public class SparkTask extends AbstractYarnTask { String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); - logger.info("spark task command: {}", command); + log.info("spark task command: {}", command); return command; } @@ -229,8 +229,8 @@ public class SparkTask extends AbstractYarnTask { String script = replaceParam(sparkParameters.getRawScript()); sparkParameters.setRawScript(script); - logger.info("raw script : {}", sparkParameters.getRawScript()); - logger.info("task execute path : {}", taskExecutionContext.getExecutePath()); + log.info("raw script : {}", sparkParameters.getRawScript()); + log.info("task execute path : {}", taskExecutionContext.getExecutePath()); Set perms = PosixFilePermissions.fromString(RWXR_XR_X); FileAttribute> attr = PosixFilePermissions.asFileAttribute(perms); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java index 5b4d083be6..f1a885162d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sql/src/main/java/org/apache/dolphinscheduler/plugin/task/sql/SqlTask.java @@ -110,7 +110,7 @@ public class SqlTask extends AbstractTask { super(taskRequest); this.taskExecutionContext = taskRequest; this.sqlParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SqlParameters.class); - logger.info("Initialize sql task parameter {}", JSONUtils.toPrettyJsonString(sqlParameters)); + log.info("Initialize sql task parameter {}", JSONUtils.toPrettyJsonString(sqlParameters)); if (sqlParameters == null || !sqlParameters.checkParameters()) { throw new TaskException("sql task params is not valid"); } @@ -129,8 +129,8 @@ public class SqlTask extends AbstractTask { @Override public void handle(TaskCallBack taskCallBack) throws TaskException { - logger.info("Full sql parameters: {}", sqlParameters); - logger.info( + log.info("Full sql parameters: {}", sqlParameters); + log.info( "sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {},varPool : {} ,query max result limit {}", sqlParameters.getType(), sqlParameters.getDatasource(), @@ -168,7 +168,7 @@ public class SqlTask extends AbstractTask { .map(this::getSqlAndSqlParamsMap) .collect(Collectors.toList()); - List createFuncs = createFuncs(sqlTaskExecutionContext.getUdfFuncParametersList(), logger); + List createFuncs = createFuncs(sqlTaskExecutionContext.getUdfFuncParametersList(), log); // execute sql task executeFuncAndSql(mainStatementSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs); @@ -177,7 +177,7 @@ public class SqlTask extends AbstractTask { } catch (Exception e) { setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); - logger.error("sql task error", e); + log.error("sql task error", e); throw new TaskException("Execute sql task failed", e); } } @@ -255,7 +255,7 @@ public class SqlTask extends AbstractTask { // post execute executeUpdate(connection, postStatementsBinds, "post"); } catch (Exception e) { - logger.error("execute sql error: {}", e.getMessage()); + log.error("execute sql error: {}", e.getMessage()); throw e; } finally { close(connection); @@ -294,7 +294,7 @@ public class SqlTask extends AbstractTask { while (resultSet.next()) { if (rowCount == limit) { - logger.info("sql result limit : {} exceeding results are filtered", limit); + log.info("sql result limit : {} exceeding results are filtered", limit); break; } ObjectNode mapOfColValues = JSONUtils.createObjectNode(); @@ -308,10 +308,10 @@ public class SqlTask extends AbstractTask { int displayRows = sqlParameters.getDisplayRows() > 0 ? sqlParameters.getDisplayRows() : TaskConstants.DEFAULT_DISPLAY_ROWS; displayRows = Math.min(displayRows, rowCount); - logger.info("display sql result {} rows as follows:", displayRows); + log.info("display sql result {} rows as follows:", displayRows); for (int i = 0; i < displayRows; i++) { String row = JSONUtils.toJsonString(resultJSONArray.get(i)); - logger.info("row {} : {}", i + 1, row); + log.info("row {} : {}", i + 1, row); } } @@ -323,7 +323,7 @@ public class SqlTask extends AbstractTask { ? sqlParameters.getTitle() : taskExecutionContext.getTaskName() + " query result sets", result); } - logger.debug("execute sql result : {}", result); + log.debug("execute sql result : {}", result); return result; } @@ -336,7 +336,7 @@ public class SqlTask extends AbstractTask { if (resultSet != null) { ResultSetMetaData metaData = resultSet.getMetaData(); int columnsNum = metaData.getColumnCount(); - logger.info("sql query results is empty"); + log.info("sql query results is empty"); for (int i = 1; i <= columnsNum; i++) { emptyOfColValues.set(metaData.getColumnLabel(i), JSONUtils.toJsonNode("")); } @@ -364,7 +364,7 @@ public class SqlTask extends AbstractTask { private String executeQuery(Connection connection, SqlBinds sqlBinds, String handlerType) throws Exception { try (PreparedStatement statement = prepareStatementAndBind(connection, sqlBinds)) { - logger.info("{} statement execute query, for sql: {}", handlerType, sqlBinds.getSql()); + log.info("{} statement execute query, for sql: {}", handlerType, sqlBinds.getSql()); ResultSet resultSet = statement.executeQuery(); return resultProcess(resultSet); } @@ -376,7 +376,7 @@ public class SqlTask extends AbstractTask { for (SqlBinds sqlBind : statementsBinds) { try (PreparedStatement statement = prepareStatementAndBind(connection, sqlBind)) { result = statement.executeUpdate(); - logger.info("{} statement execute update result: {}, for sql: {}", handlerType, result, + log.info("{} statement execute update result: {}, for sql: {}", handlerType, result, sqlBind.getSql()); } } @@ -393,7 +393,7 @@ public class SqlTask extends AbstractTask { List createFuncs) throws Exception { try (Statement funcStmt = connection.createStatement()) { for (String createFunc : createFuncs) { - logger.info("hive create function sql: {}", createFunc); + log.info("hive create function sql: {}", createFunc); funcStmt.execute(createFunc); } } @@ -409,7 +409,7 @@ public class SqlTask extends AbstractTask { try { connection.close(); } catch (SQLException e) { - logger.error("close connection error : {}", e.getMessage(), e); + log.error("close connection error : {}", e.getMessage(), e); } } } @@ -438,7 +438,7 @@ public class SqlTask extends AbstractTask { ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue()); } } - logger.info("prepare statement replace sql : {}, sql parameters : {}", sqlBinds.getSql(), + log.info("prepare statement replace sql : {}, sql parameters : {}", sqlBinds.getSql(), sqlBinds.getParamsMap()); return stmt; } catch (Exception exception) { @@ -456,17 +456,17 @@ public class SqlTask extends AbstractTask { */ private void printReplacedSql(String content, String formatSql, String rgex, Map sqlParamsMap) { // parameter print style - logger.info("after replace sql , preparing : {}", formatSql); + log.info("after replace sql , preparing : {}", formatSql); StringBuilder logPrint = new StringBuilder("replaced sql , parameters:"); if (sqlParamsMap == null) { - logger.info("printReplacedSql: sqlParamsMap is null."); + log.info("printReplacedSql: sqlParamsMap is null."); } else { for (int i = 1; i <= sqlParamsMap.size(); i++) { logPrint.append(sqlParamsMap.get(i).getValue()).append("(").append(sqlParamsMap.get(i).getType()) .append(")"); } } - logger.info("Sql Params are {}", logPrint); + log.info("Sql Params are {}", logPrint); } /** @@ -490,7 +490,7 @@ public class SqlTask extends AbstractTask { if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { String title = ParameterUtils.convertParameterPlaceholders(sqlParameters.getTitle(), ParamUtils.convert(paramsMap)); - logger.info("SQL title : {}", title); + log.info("SQL title : {}", title); sqlParameters.setTitle(title); } @@ -531,13 +531,13 @@ public class SqlTask extends AbstractTask { * create function list * * @param udfFuncParameters udfFuncParameters - * @param logger logger + * @param log log * @return */ - private List createFuncs(List udfFuncParameters, Logger logger) { + private List createFuncs(List udfFuncParameters, Logger log) { if (CollectionUtils.isEmpty(udfFuncParameters)) { - logger.info("can't find udf function resource"); + log.info("can't find udf function resource"); return null; } // build jar sql diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTask.java index f62c9d466d..72e2eca02d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/SqoopTask.java @@ -57,7 +57,7 @@ public class SqoopTask extends AbstractYarnTask { public void init() { sqoopParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SqoopParameters.class); - logger.info("Initialize sqoop task params {}", JSONUtils.toPrettyJsonString(sqoopParameters)); + log.info("Initialize sqoop task params {}", JSONUtils.toPrettyJsonString(sqoopParameters)); if (null == sqoopParameters) { throw new TaskException("Sqoop Task params is null"); } @@ -82,7 +82,7 @@ public class SqoopTask extends AbstractYarnTask { Map paramsMap = taskExecutionContext.getPrepareParamsMap(); String resultScripts = ParameterUtils.convertParameterPlaceholders(script, ParamUtils.convert(paramsMap)); - logger.info("sqoop script: {}", resultScripts); + log.info("sqoop script: {}", resultScripts); return resultScripts; } diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java index 0426906361..58255cd5d3 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/CommonGenerator.java @@ -29,16 +29,14 @@ import org.apache.commons.collections4.CollectionUtils; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * common script generator */ +@Slf4j public class CommonGenerator { - private static final Logger logger = LoggerFactory.getLogger(CommonGenerator.class); - public String generate(SqoopParameters sqoopParameters) { StringBuilder commonSb = new StringBuilder(); @@ -85,7 +83,7 @@ public class CommonGenerator { } } } catch (Exception e) { - logger.error(String.format("Sqoop task general param build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop task general param build failed: [%s]", e.getMessage())); } return commonSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java index 7c439e2f1a..3a00bf5d10 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HdfsSourceGenerator.java @@ -28,16 +28,14 @@ import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHdf import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * hdfs source generator */ +@Slf4j public class HdfsSourceGenerator implements ISourceGenerator { - private static final Logger logger = LoggerFactory.getLogger(HdfsSourceGenerator.class); - @Override public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { @@ -57,7 +55,7 @@ public class HdfsSourceGenerator implements ISourceGenerator { } } catch (Exception e) { - logger.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop hdfs source parmas build failed: [%s]", e.getMessage())); } return hdfsSourceSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java index d39038d39d..a0449544b2 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/HiveSourceGenerator.java @@ -31,16 +31,14 @@ import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.sources.SourceHiv import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * hive source generator */ +@Slf4j public class HiveSourceGenerator implements ISourceGenerator { - private static final Logger logger = LoggerFactory.getLogger(HiveSourceGenerator.class); - @Override public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { @@ -70,7 +68,7 @@ public class HiveSourceGenerator implements ISourceGenerator { } } } catch (Exception e) { - logger.error(String.format("Sqoop hive source params build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop hive source params build failed: [%s]", e.getMessage())); } return hiveSourceSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java index 1408f368b3..19c941b2ed 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/sources/MySQLSourceGenerator.java @@ -49,16 +49,14 @@ import org.apache.commons.lang3.StringUtils; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * mysql source generator */ +@Slf4j public class MySQLSourceGenerator implements ISourceGenerator { - private static final Logger logger = LoggerFactory.getLogger(MySQLSourceGenerator.class); - @Override public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { @@ -141,7 +139,7 @@ public class MySQLSourceGenerator implements ISourceGenerator { } } } catch (Exception e) { - logger.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop task mysql source params build failed: [%s]", e.getMessage())); } return mysqlSourceSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HdfsTargetGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HdfsTargetGenerator.java index 2ac4246b91..14afb2305a 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HdfsTargetGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HdfsTargetGenerator.java @@ -34,16 +34,14 @@ import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHdf import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * hdfs target generator */ +@Slf4j public class HdfsTargetGenerator implements ITargetGenerator { - private static final Logger logger = LoggerFactory.getLogger(HdfsTargetGenerator.class); - @Override public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { @@ -88,7 +86,7 @@ public class HdfsTargetGenerator implements ITargetGenerator { hdfsTargetSb.append(SPACE).append(FIELD_NULL_PLACEHOLDER); } } catch (Exception e) { - logger.error(String.format("Sqoop hdfs target params build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop hdfs target params build failed: [%s]", e.getMessage())); } return hdfsTargetSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java index 044a5e0790..55b664b6cd 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/HiveTargetGenerator.java @@ -38,16 +38,14 @@ import org.apache.dolphinscheduler.plugin.task.sqoop.parameter.targets.TargetHiv import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * hive target generator */ +@Slf4j public class HiveTargetGenerator implements ITargetGenerator { - private static final Logger logger = LoggerFactory.getLogger(HiveTargetGenerator.class); - @Override public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { @@ -100,7 +98,7 @@ public class HiveTargetGenerator implements ITargetGenerator { } } catch (Exception e) { - logger.error(String.format("Sqoop hive target params build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop hive target params build failed: [%s]", e.getMessage())); } return hiveTargetSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MySQLTargetGenerator.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MySQLTargetGenerator.java index 29586be755..5de25d3d5b 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MySQLTargetGenerator.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-sqoop/src/main/java/org/apache/dolphinscheduler/plugin/task/sqoop/generator/targets/MySQLTargetGenerator.java @@ -42,16 +42,14 @@ import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; /** * mysql target generator */ +@Slf4j public class MySQLTargetGenerator implements ITargetGenerator { - private static final Logger logger = LoggerFactory.getLogger(MySQLTargetGenerator.class); - @Override public String generate(SqoopParameters sqoopParameters, SqoopTaskExecutionContext sqoopTaskExecutionContext) { @@ -118,7 +116,7 @@ public class MySQLTargetGenerator implements ITargetGenerator { } } } catch (Exception e) { - logger.error(String.format("Sqoop mysql target params build failed: [%s]", e.getMessage())); + log.error(String.format("Sqoop mysql target params build failed: [%s]", e.getMessage())); } return mysqlTargetSb.toString(); diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTask.java index c8a65f2478..993d3ca577 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTask.java +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-zeppelin/src/main/java/org/apache/dolphinscheduler/plugin/task/zeppelin/ZeppelinTask.java @@ -75,7 +75,7 @@ public class ZeppelinTask extends AbstractRemoteTask { if (this.zeppelinParameters == null || !this.zeppelinParameters.checkParameters()) { throw new ZeppelinTaskException("zeppelin task params is not valid"); } - logger.info("Initialize zeppelin task params:{}", JSONUtils.toPrettyJsonString(taskParams)); + log.info("Initialize zeppelin task params:{}", JSONUtils.toPrettyJsonString(taskParams)); this.zClient = getZeppelinClient(); } @@ -142,10 +142,10 @@ public class ZeppelinTask extends AbstractRemoteTask { final int exitStatusCode = mapStatusToExitCode(status); setAppIds(String.format("%s-%s", noteId, paragraphId)); setExitStatusCode(exitStatusCode); - logger.info("zeppelin task finished with results: {}", resultContent); + log.info("zeppelin task finished with results: {}", resultContent); } catch (Exception e) { setExitStatusCode(TaskConstants.EXIT_CODE_FAILURE); - logger.error("zeppelin task submit failed with error", e); + log.error("zeppelin task submit failed with error", e); throw new TaskException("Execute ZeppelinTask exception"); } } @@ -172,10 +172,10 @@ public class ZeppelinTask extends AbstractRemoteTask { try { zClient = new ZeppelinClient(clientConfig); final String zeppelinVersion = zClient.getVersion(); - logger.info("zeppelin version: {}", zeppelinVersion); + log.info("zeppelin version: {}", zeppelinVersion); } catch (Exception e) { // TODO: complete error handling - logger.error("some error"); + log.error("some error"); } return zClient; } @@ -208,16 +208,16 @@ public class ZeppelinTask extends AbstractRemoteTask { final String noteId = this.zeppelinParameters.getNoteId(); final String paragraphId = this.zeppelinParameters.getParagraphId(); if (paragraphId == null) { - logger.info("trying terminate zeppelin task, taskId: {}, noteId: {}", + log.info("trying terminate zeppelin task, taskId: {}, noteId: {}", this.taskExecutionContext.getTaskInstanceId(), noteId); Unirest.config().defaultBaseUrl(restEndpoint + "/api"); Unirest.delete("/notebook/job/{noteId}").routeParam("noteId", noteId).asJson(); - logger.info("zeppelin task terminated, taskId: {}, noteId: {}", + log.info("zeppelin task terminated, taskId: {}, noteId: {}", this.taskExecutionContext.getTaskInstanceId(), noteId); } else { - logger.info("trying terminate zeppelin task, taskId: {}, noteId: {}, paragraphId: {}", + log.info("trying terminate zeppelin task, taskId: {}, noteId: {}, paragraphId: {}", this.taskExecutionContext.getTaskInstanceId(), noteId, paragraphId); @@ -226,7 +226,7 @@ public class ZeppelinTask extends AbstractRemoteTask { } catch (Exception e) { throw new TaskException("cancel paragraph error", e); } - logger.info("zeppelin task terminated, taskId: {}, noteId: {}, paragraphId: {}", + log.info("zeppelin task terminated, taskId: {}, noteId: {}, paragraphId: {}", this.taskExecutionContext.getTaskInstanceId(), noteId, paragraphId); diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/DolphinSchedulerManager.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/DolphinSchedulerManager.java index 74cf447621..04b906beb5 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/DolphinSchedulerManager.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/DolphinSchedulerManager.java @@ -35,15 +35,14 @@ import java.util.stream.Collectors; import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Service; @Service +@Slf4j public class DolphinSchedulerManager { - private static final Logger logger = LoggerFactory.getLogger(DolphinSchedulerManager.class); - private final UpgradeDao upgradeDao; private Map upgraderMap = new HashMap<>(); @@ -82,14 +81,14 @@ public class DolphinSchedulerManager { if (upgradeDao.isExistsTable("t_escheduler_version") || upgradeDao.isExistsTable("t_ds_version") || upgradeDao.isExistsTable("t_escheduler_queue")) { - logger.info("The database has been initialized. Skip the initialization step"); + log.info("The database has been initialized. Skip the initialization step"); return true; } return false; } public void initDolphinSchedulerSchema() { - logger.info("Start initializing the DolphinScheduler manager table structure"); + log.info("Start initializing the DolphinScheduler manager table structure"); upgradeDao.initSchema(); } @@ -97,7 +96,7 @@ public class DolphinSchedulerManager { // Gets a list of all upgrades List schemaList = SchemaUtils.getAllSchemaList(); if (schemaList == null || schemaList.size() == 0) { - logger.info("There is no schema to upgrade!"); + log.info("There is no schema to upgrade!"); } else { String version; // Gets the version of the current system @@ -110,7 +109,7 @@ public class DolphinSchedulerManager { } else if (upgradeDao.isExistsTable("t_escheduler_queue")) { version = "1.0.0"; } else { - logger.error("Unable to determine current software version, so cannot upgrade"); + log.error("Unable to determine current software version, so cannot upgrade"); throw new RuntimeException("Unable to determine current software version, so cannot upgrade"); } // The target version of the upgrade @@ -119,8 +118,8 @@ public class DolphinSchedulerManager { for (String schemaDir : schemaList) { schemaVersion = schemaDir.split("_")[0]; if (SchemaUtils.isAGreatVersion(schemaVersion, version)) { - logger.info("upgrade DolphinScheduler metadata version from {} to {}", version, schemaVersion); - logger.info("Begin upgrading DolphinScheduler's table structure"); + log.info("upgrade DolphinScheduler metadata version from {} to {}", version, schemaVersion); + log.info("Begin upgrading DolphinScheduler's table structure"); upgradeDao.upgradeDolphinScheduler(schemaDir); DolphinSchedulerVersion.getVersion(schemaVersion).ifPresent(v -> upgraderMap.get(v).doUpgrade()); version = schemaVersion; diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/InitDolphinScheduler.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/InitDolphinScheduler.java index f086a815f0..c6aa05190d 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/InitDolphinScheduler.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/InitDolphinScheduler.java @@ -19,8 +19,8 @@ package org.apache.dolphinscheduler.tools.datasource; import org.apache.dolphinscheduler.dao.DaoConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; @@ -38,10 +38,9 @@ public class InitDolphinScheduler { @Component @Profile("init") + @Slf4j static class InitRunner implements CommandLineRunner { - private static final Logger logger = LoggerFactory.getLogger(InitRunner.class); - private final DolphinSchedulerManager dolphinSchedulerManager; InitRunner(DolphinSchedulerManager dolphinSchedulerManager) { @@ -51,7 +50,7 @@ public class InitDolphinScheduler { @Override public void run(String... args) { dolphinSchedulerManager.initDolphinScheduler(); - logger.info("init DolphinScheduler finished"); + log.info("init DolphinScheduler finished"); } } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/UpgradeDolphinScheduler.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/UpgradeDolphinScheduler.java index b4039f185b..aa9553a308 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/UpgradeDolphinScheduler.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/UpgradeDolphinScheduler.java @@ -19,8 +19,8 @@ package org.apache.dolphinscheduler.tools.datasource; import org.apache.dolphinscheduler.dao.DaoConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; @@ -38,10 +38,9 @@ public class UpgradeDolphinScheduler { @Component @Profile("upgrade") + @Slf4j static class UpgradeRunner implements CommandLineRunner { - private static final Logger logger = LoggerFactory.getLogger(UpgradeRunner.class); - private final DolphinSchedulerManager dolphinSchedulerManager; UpgradeRunner(DolphinSchedulerManager dolphinSchedulerManager) { @@ -52,10 +51,10 @@ public class UpgradeDolphinScheduler { public void run(String... args) throws Exception { if (dolphinSchedulerManager.schemaIsInitialized()) { dolphinSchedulerManager.upgradeDolphinScheduler(); - logger.info("upgrade DolphinScheduler finished"); + log.info("upgrade DolphinScheduler finished"); } else { dolphinSchedulerManager.initDolphinScheduler(); - logger.info("init DolphinScheduler finished"); + log.info("init DolphinScheduler finished"); } } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/MySQLUpgradeDao.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/MySQLUpgradeDao.java index 20651213ec..2ad7940905 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/MySQLUpgradeDao.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/MySQLUpgradeDao.java @@ -25,15 +25,14 @@ import java.sql.SQLException; import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Service; @Service +@Slf4j public class MySQLUpgradeDao extends UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(MySQLUpgradeDao.class); - private MySQLUpgradeDao(DataSource dataSource) { super(dataSource); } @@ -60,7 +59,7 @@ public class MySQLUpgradeDao extends UpgradeDao { ResultSet rs = conn.getMetaData().getTables(conn.getCatalog(), conn.getSchema(), tableName, null)) { return rs.next(); } catch (SQLException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } @@ -81,7 +80,7 @@ public class MySQLUpgradeDao extends UpgradeDao { return rs.next(); } catch (SQLException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/PostgreSQLUpgradeDao.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/PostgreSQLUpgradeDao.java index d854a6ef7a..352da3e9d6 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/PostgreSQLUpgradeDao.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/PostgreSQLUpgradeDao.java @@ -26,15 +26,14 @@ import java.sql.SQLException; import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Service; @Service +@Slf4j public class PostgreSQLUpgradeDao extends UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(PostgreSQLUpgradeDao.class); - private PostgreSQLUpgradeDao(DataSource dataSource) { super(dataSource); } @@ -61,7 +60,7 @@ public class PostgreSQLUpgradeDao extends UpgradeDao { } } catch (SQLException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); } return ""; } @@ -79,7 +78,7 @@ public class PostgreSQLUpgradeDao extends UpgradeDao { ResultSet rs = conn.getMetaData().getTables(conn.getCatalog(), getSchema(), tableName, null)) { return rs.next(); } catch (SQLException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } @@ -98,7 +97,7 @@ public class PostgreSQLUpgradeDao extends UpgradeDao { ResultSet rs = conn.getMetaData().getColumns(conn.getCatalog(), getSchema(), tableName, columnName)) { return rs.next(); } catch (SQLException e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException(e.getMessage(), e); } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/ResourceDao.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/ResourceDao.java index d542428a11..b6ace04f17 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/ResourceDao.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/ResourceDao.java @@ -26,8 +26,7 @@ import java.util.Arrays; import java.util.HashMap; import java.util.Map; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.base.Joiner; import com.google.common.base.Strings; @@ -35,10 +34,9 @@ import com.google.common.base.Strings; /** * resource dao */ +@Slf4j public class ResourceDao { - public static final Logger logger = LoggerFactory.getLogger(ResourceDao.class); - /** * list all resources by the type * @@ -71,7 +69,7 @@ public class ResourceDao { } } } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } return resourceSizeMap; @@ -95,7 +93,7 @@ public class ResourceDao { } pstmt.executeBatch(); } catch (Exception e) { - logger.error(e.getMessage(), e); + log.error(e.getMessage(), e); throw new RuntimeException("sql: " + sql, e); } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/UpgradeDao.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/UpgradeDao.java index 201dd5dfba..343fbc6488 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/UpgradeDao.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/datasource/dao/UpgradeDao.java @@ -31,14 +31,14 @@ import java.sql.SQLException; import javax.sql.DataSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; +@Slf4j public abstract class UpgradeDao { - public static final Logger logger = LoggerFactory.getLogger(UpgradeDao.class); private static final String T_VERSION_NAME = "t_escheduler_version"; private static final String T_NEW_VERSION_NAME = "t_ds_version"; @@ -72,7 +72,7 @@ public abstract class UpgradeDao { initScriptRunner.runScript(initSqlReader); } } catch (Exception e) { - logger.error("Execute init sql file: {} error", sqlFile, e); + log.error("Execute init sql file: {} error", sqlFile, e); throw new RuntimeException(String.format("Execute init sql file: %s error", sqlFile), e); } } @@ -93,7 +93,7 @@ public abstract class UpgradeDao { } return version; } catch (SQLException e) { - logger.error("Get current version from database error, sql: {}", sql, e); + log.error("Get current version from database error, sql: {}", sql, e); throw new RuntimeException("Get current version from database error, sql: " + sql, e); } } @@ -114,7 +114,7 @@ public abstract class UpgradeDao { // update the size of the folder that is the type of udf. resourceDao.updateResourceFolderSizeByFileType(conn, 1); } catch (Exception ex) { - logger.error("Failed to upgrade because of failing to update the folder's size of resource files."); + log.error("Failed to upgrade because of failing to update the folder's size of resource files."); } } @@ -144,12 +144,12 @@ public abstract class UpgradeDao { } conn.commit(); } - logger.info("Success execute the dml file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile); + log.info("Success execute the dml file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile); } catch (FileNotFoundException e) { - logger.error("Cannot find the DDL file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile, e); + log.error("Cannot find the DDL file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile, e); throw new RuntimeException("sql file not found ", e); } catch (Exception e) { - logger.error("Execute ddl file failed, meet an unknown exception, schemaDir: {}, ddlScript: {}", schemaDir, + log.error("Execute ddl file failed, meet an unknown exception, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile, e); throw new RuntimeException("Execute ddl file failed, meet an unknown exception", e); } @@ -170,12 +170,12 @@ public abstract class UpgradeDao { try (Reader sqlReader = new InputStreamReader(sqlFilePath.getInputStream())) { scriptRunner.runScript(sqlReader); } - logger.info("Success execute the ddl file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile); + log.info("Success execute the ddl file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile); } catch (FileNotFoundException e) { - logger.error("Cannot find the DDL file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile, e); + log.error("Cannot find the DDL file, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile, e); throw new RuntimeException("sql file not found ", e); } catch (Exception e) { - logger.error("Execute ddl file failed, meet an unknown exception, schemaDir: {}, ddlScript: {}", schemaDir, + log.error("Execute ddl file failed, meet an unknown exception, schemaDir: {}, ddlScript: {}", schemaDir, scriptFile, e); throw new RuntimeException("Execute ddl file failed, meet an unknown exception", e); } @@ -200,7 +200,7 @@ public abstract class UpgradeDao { pstmt.executeUpdate(); } catch (SQLException e) { - logger.error("Update version error, sql: {}", upgradeSQL, e); + log.error("Update version error, sql: {}", upgradeSQL, e); throw new RuntimeException("Upgrade version error, sql: " + upgradeSQL, e); } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateDemoTenant.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateDemoTenant.java index 7c5c2b27d0..2a5b0cebf3 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateDemoTenant.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateDemoTenant.java @@ -22,13 +22,13 @@ import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import java.util.Date; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; +@Slf4j public class CreateDemoTenant { - private static final Logger logger = LoggerFactory.getLogger(CreateDemoTenant.class); @Autowired private TenantMapper tenantMapper; @@ -46,9 +46,9 @@ public class CreateDemoTenant { tenant.setUpdateTime(now); // save tenantMapper.insert(tenant); - logger.info("create tenant success"); + log.info("create tenant success"); } else { - logger.warn("os tenant code already exists"); + log.warn("os tenant code already exists"); } } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateProcessDemo.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateProcessDemo.java index 2535e1f3a7..d071fe2d5c 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateProcessDemo.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/CreateProcessDemo.java @@ -17,8 +17,8 @@ package org.apache.dolphinscheduler.tools.demo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -41,10 +41,9 @@ public class CreateProcessDemo { @Component @Profile("demo") + @Slf4j static class DemoRunner implements CommandLineRunner { - private static final Logger logger = LoggerFactory.getLogger(DemoRunner.class); - private final ProcessDefinitionDemo processDefinitionDemo; DemoRunner(ProcessDefinitionDemo processDefinitionDemo) { @@ -54,7 +53,7 @@ public class CreateProcessDemo { @Override public void run(String... args) throws Exception { processDefinitionDemo.createProcessDefinitionDemo(); - logger.info("create process definition demo success"); + log.info("create process definition demo success"); } } } diff --git a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java index 63ac66fa66..f32ee4a6f7 100644 --- a/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java +++ b/dolphinscheduler-tools/src/main/java/org/apache/dolphinscheduler/tools/demo/ProcessDefinitionDemo.java @@ -42,17 +42,16 @@ import java.util.Date; import java.util.LinkedHashMap; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @Component +@Slf4j public class ProcessDefinitionDemo { - private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDemo.class); - @Value("${demo.tenant-code}") private String tenantCode; @@ -80,7 +79,7 @@ public class ProcessDefinitionDemo { // create and get demo projectCode Project project = projectMapper.queryByName("demo"); if (project != null) { - logger.warn("Project {} already exists.", project.getName()); + log.warn("Project {} already exists.", project.getName()); } try { project = Project @@ -94,10 +93,10 @@ public class ProcessDefinitionDemo { .updateTime(now) .build(); } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("create project error", e); + log.error("create project error", e); } if (projectMapper.insert(project) > 0) { - logger.info("create project success"); + log.info("create project success"); } else { throw new Exception("create project error"); } @@ -105,7 +104,7 @@ public class ProcessDefinitionDemo { try { projectCode = project.getCode(); } catch (NullPointerException e) { - logger.error("project code is null", e); + log.error("project code is null", e); } // generate access token @@ -121,44 +120,44 @@ public class ProcessDefinitionDemo { int insert = accessTokenMapper.insert(accessToken); if (insert > 0) { - logger.info("create access token success"); + log.info("create access token success"); } else { - logger.info("create access token error"); + log.info("create access token error"); } // creat process definition demo // shell demo ProxyResult shellResult = shellDemo(token, projectCode, tenantCode); - logger.info("create shell demo {}", shellResult.getMsg()); + log.info("create shell demo {}", shellResult.getMsg()); // subprocess demo LinkedHashMap subProcess = (LinkedHashMap) shellResult.getData(); String subProcessCode = String.valueOf(subProcess.get("code")); ProxyResult subProcessResult = subProcessDemo(token, projectCode, tenantCode, subProcessCode); - logger.info("create subprocess demo {}", subProcessResult.getMsg()); + log.info("create subprocess demo {}", subProcessResult.getMsg()); // switch demo ProxyResult switchResult = swicthDemo(token, projectCode, tenantCode); - logger.info("create switch demo {}", switchResult.getMsg()); + log.info("create switch demo {}", switchResult.getMsg()); // condition demo ProxyResult conditionResult = conditionDemo(token, projectCode, tenantCode); - logger.info("create condition demo {}", conditionResult.getMsg()); + log.info("create condition demo {}", conditionResult.getMsg()); // dependent demo LinkedHashMap switchProcess = (LinkedHashMap) switchResult.getData(); String switchProcessCode = String.valueOf(switchProcess.get("code")); ProxyResult dependentResult = dependentProxyResultDemo(token, projectCode, tenantCode, subProcessCode, switchProcessCode); - logger.info("create dependent demo {}", dependentResult.getMsg()); + log.info("create dependent demo {}", dependentResult.getMsg()); // parameter context demo ProxyResult parameterContextResult = parameterContextDemo(token, projectCode, tenantCode); - logger.info("create parameter context demo {}", parameterContextResult.getMsg()); + log.info("create parameter context demo {}", parameterContextResult.getMsg()); // clear log demo ProxyResult clearLogResult = clearLogDemo(token, projectCode, tenantCode); - logger.info("create clear log demo {}", clearLogResult.getMsg()); + log.info("create clear log demo {}", clearLogResult.getMsg()); } @@ -171,7 +170,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCodeFirst = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); String absolutePath = System.getProperty("user.dir"); @@ -246,7 +245,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCodeFirst = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); String taskCodeSecond = String.valueOf(taskCodes.get(1)).replaceAll("\\[|\\]", ""); @@ -338,7 +337,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCodeFirst = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); String taskCodeSecond = String.valueOf(taskCodes.get(1)).replaceAll("\\[|\\]", ""); @@ -424,7 +423,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCodeFirst = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); String taskCodeSecond = String.valueOf(taskCodes.get(1)).replaceAll("\\[|\\]", ""); @@ -541,7 +540,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCodeFirst = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); String taskCodeSecond = String.valueOf(taskCodes.get(1)).replaceAll("\\[|\\]", ""); @@ -660,7 +659,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCodeFirst = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); String taskCodeSecond = String.valueOf(taskCodes.get(1)).replaceAll("\\[|\\]", ""); @@ -759,7 +758,7 @@ public class ProcessDefinitionDemo { taskCodes.add(CodeGenerateUtils.getInstance().genCode()); } } catch (CodeGenerateUtils.CodeGenerateException e) { - logger.error("task code get error, ", e); + log.error("task code get error, ", e); } String taskCode = String.valueOf(taskCodes.get(0)).replaceAll("\\[|\\]", ""); diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java index be093dc3b1..870c5a123d 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/WorkerServer.java @@ -39,8 +39,8 @@ import java.util.Collection; import javax.annotation.PostConstruct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -56,13 +56,9 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; "org.apache.dolphinscheduler.service.queue.*", }) }) +@Slf4j public class WorkerServer implements IStoppable { - /** - * logger - */ - private static final Logger logger = LoggerFactory.getLogger(WorkerServer.class); - @Autowired private WorkerManagerThread workerManagerThread; @@ -122,7 +118,7 @@ public class WorkerServer implements IStoppable { public void close(String cause) { if (!ServerLifeCycleManager.toStopped()) { - logger.warn("WorkerServer is already stopped, current cause: {}", cause); + log.warn("WorkerServer is already stopped, current cause: {}", cause); return; } ThreadUtils.sleep(Constants.SERVER_CLOSE_WAIT_TIME.toMillis()); @@ -130,14 +126,14 @@ public class WorkerServer implements IStoppable { try ( WorkerRpcServer closedWorkerRpcServer = workerRpcServer; WorkerRegistryClient closedRegistryClient = workerRegistryClient) { - logger.info("Worker server is stopping, current cause : {}", cause); + log.info("Worker server is stopping, current cause : {}", cause); // kill running tasks this.killAllRunningTasks(); } catch (Exception e) { - logger.error("Worker server stop failed, current cause: {}", cause, e); + log.error("Worker server stop failed, current cause: {}", cause, e); return; } - logger.info("Worker server stopped, current cause: {}", cause); + log.info("Worker server stopped, current cause: {}", cause); } @Override @@ -153,7 +149,7 @@ public class WorkerServer implements IStoppable { if (CollectionUtils.isEmpty(taskRequests)) { return; } - logger.info("Worker begin to kill all cache task, task size: {}", taskRequests.size()); + log.info("Worker begin to kill all cache task, task size: {}", taskRequests.size()); int killNumber = 0; for (TaskExecutionContext taskRequest : taskRequests) { // kill task when it's not finished yet @@ -167,7 +163,7 @@ public class WorkerServer implements IStoppable { LogUtils.removeWorkflowAndTaskInstanceIdMDC(); } } - logger.info("Worker after kill all cache task, task size: {}, killed number: {}", taskRequests.size(), + log.info("Worker after kill all cache task, task size: {}, killed number: {}", taskRequests.size(), killNumber); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java index 02728136de..93b705aa22 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/config/WorkerConfig.java @@ -25,9 +25,8 @@ import org.apache.dolphinscheduler.registry.api.ConnectStrategyProperties; import java.time.Duration; import lombok.Data; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.validation.Errors; @@ -38,10 +37,9 @@ import org.springframework.validation.annotation.Validated; @Validated @Configuration @ConfigurationProperties(prefix = "worker") +@Slf4j public class WorkerConfig implements Validator { - private Logger logger = LoggerFactory.getLogger(WorkerConfig.class); - private int listenPort = 1234; private int execThreads = 10; private Duration heartbeatInterval = Duration.ofSeconds(10); @@ -86,19 +84,19 @@ public class WorkerConfig implements Validator { } private void printConfig() { - logger.info("Worker config: listenPort -> {}", listenPort); - logger.info("Worker config: execThreads -> {}", execThreads); - logger.info("Worker config: heartbeatInterval -> {}", heartbeatInterval); - logger.info("Worker config: hostWeight -> {}", hostWeight); - logger.info("Worker config: tenantAutoCreate -> {}", tenantAutoCreate); - logger.info("Worker config: tenantDistributedUser -> {}", tenantDistributedUser); - logger.info("Worker config: maxCpuLoadAvg -> {}", maxCpuLoadAvg); - logger.info("Worker config: reservedMemory -> {}", reservedMemory); - logger.info("Worker config: alertListenHost -> {}", alertListenHost); - logger.info("Worker config: alertListenPort -> {}", alertListenPort); - logger.info("Worker config: registryDisconnectStrategy -> {}", registryDisconnectStrategy); - logger.info("Worker config: workerAddress -> {}", registryDisconnectStrategy); - logger.info("Worker config: workerRegistryPath: {}", workerRegistryPath); - logger.info("Worker config: taskExecuteThreadsFullPolicy: {}", taskExecuteThreadsFullPolicy); + log.info("Worker config: listenPort -> {}", listenPort); + log.info("Worker config: execThreads -> {}", execThreads); + log.info("Worker config: heartbeatInterval -> {}", heartbeatInterval); + log.info("Worker config: hostWeight -> {}", hostWeight); + log.info("Worker config: tenantAutoCreate -> {}", tenantAutoCreate); + log.info("Worker config: tenantDistributedUser -> {}", tenantDistributedUser); + log.info("Worker config: maxCpuLoadAvg -> {}", maxCpuLoadAvg); + log.info("Worker config: reservedMemory -> {}", reservedMemory); + log.info("Worker config: alertListenHost -> {}", alertListenHost); + log.info("Worker config: alertListenPort -> {}", alertListenPort); + log.info("Worker config: registryDisconnectStrategy -> {}", registryDisconnectStrategy); + log.info("Worker config: workerAddress -> {}", registryDisconnectStrategy); + log.info("Worker config: workerRegistryPath: {}", workerRegistryPath); + log.info("Worker config: taskExecuteThreadsFullPolicy: {}", taskExecuteThreadsFullPolicy); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/message/MessageRetryRunner.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/message/MessageRetryRunner.java index 31acf40009..f75767ef27 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/message/MessageRetryRunner.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/message/MessageRetryRunner.java @@ -34,18 +34,16 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import org.springframework.stereotype.Component; @Component +@Slf4j public class MessageRetryRunner extends BaseDaemonThread { - private final Logger logger = LoggerFactory.getLogger(MessageRetryRunner.class); - protected MessageRetryRunner() { super("WorkerMessageRetryRunnerThread"); } @@ -62,13 +60,13 @@ public class MessageRetryRunner extends BaseDaemonThread { @Override public synchronized void start() { - logger.info("Message retry runner staring"); + log.info("Message retry runner staring"); messageSenders.forEach(messageSender -> { messageSenderMap.put(messageSender.getMessageType(), messageSender); - logger.info("Injected message sender: {}", messageSender.getClass().getName()); + log.info("Injected message sender: {}", messageSender.getClass().getName()); }); super.start(); - logger.info("Message retry runner started"); + log.info("Message retry runner started"); } public void addRetryMessage(int taskInstanceId, @NonNull CommandType messageType, BaseCommand baseCommand) { @@ -120,25 +118,25 @@ public class MessageRetryRunner extends BaseDaemonThread { CommandType messageType = messageEntry.getKey(); BaseCommand message = messageEntry.getValue(); if (now - message.getMessageSendTime() > MESSAGE_RETRY_WINDOW) { - logger.info("Begin retry send message to master, message: {}", message); + log.info("Begin retry send message to master, message: {}", message); message.setMessageSendTime(now); messageSenderMap.get(messageType).sendMessage(message); - logger.info("Success send message to master, message: {}", message); + log.info("Success send message to master, message: {}", message); } } } catch (Exception e) { - logger.warn("Retry send message to master error", e); + log.warn("Retry send message to master error", e); } finally { LogUtils.removeTaskInstanceIdMDC(); } } Thread.sleep(Constants.SLEEP_TIME_MILLIS); } catch (InterruptedException instance) { - logger.warn("The message retry thread is interrupted, will break this loop", instance); + log.warn("The message retry thread is interrupted, will break this loop", instance); Thread.currentThread().interrupt(); break; } catch (Exception ex) { - logger.error("Retry send message failed, get an known exception.", ex); + log.error("Retry send message failed, get an known exception.", ex); } } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java index 0f6a3d093b..d5f226187c 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/HostUpdateProcessor.java @@ -24,8 +24,8 @@ import org.apache.dolphinscheduler.remote.command.HostUpdateCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.message.MessageRetryRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -37,10 +37,9 @@ import io.netty.channel.Channel; * this used when master failover */ @Component +@Slf4j public class HostUpdateProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(HostUpdateProcessor.class); - @Autowired private MessageRetryRunner messageRetryRunner; @@ -50,10 +49,10 @@ public class HostUpdateProcessor implements NettyRequestProcessor { String.format("invalid command type : %s", command.getType())); HostUpdateCommand updateCommand = JSONUtils.parseObject(command.getBody(), HostUpdateCommand.class); if (updateCommand == null) { - logger.error("host update command is null"); + log.error("host update command is null"); return; } - logger.info("received host update command : {}", updateCommand); + log.info("received host update command : {}", updateCommand); messageRetryRunner.updateMessageHost(updateCommand.getTaskInstanceId(), updateCommand.getProcessHost()); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskDispatchProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskDispatchProcessor.java index dcf6449820..22186a8126 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskDispatchProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskDispatchProcessor.java @@ -37,8 +37,8 @@ import org.apache.dolphinscheduler.server.worker.runner.WorkerDelayTaskExecuteRu import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import org.apache.dolphinscheduler.server.worker.runner.WorkerTaskExecuteRunnableFactoryBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -52,10 +52,9 @@ import io.netty.channel.Channel; * Used to handle {@link CommandType#TASK_DISPATCH_REQUEST} */ @Component +@Slf4j public class TaskDispatchProcessor implements NettyRequestProcessor { - private static final Logger logger = LoggerFactory.getLogger(TaskDispatchProcessor.class); - @Autowired private WorkerConfig workerConfig; @@ -93,16 +92,16 @@ public class TaskDispatchProcessor implements NettyRequestProcessor { TaskDispatchCommand taskDispatchCommand = JSONUtils.parseObject(command.getBody(), TaskDispatchCommand.class); if (taskDispatchCommand == null) { - logger.error("task execute request command content is null"); + log.error("task execute request command content is null"); return; } final String workflowMasterAddress = taskDispatchCommand.getMessageSenderAddress(); - logger.info("Receive task dispatch request, command: {}", taskDispatchCommand); + log.info("Receive task dispatch request, command: {}", taskDispatchCommand); TaskExecutionContext taskExecutionContext = taskDispatchCommand.getTaskExecutionContext(); if (taskExecutionContext == null) { - logger.error("task execution context is null"); + log.error("task execution context is null"); return; } try { @@ -119,7 +118,7 @@ public class TaskDispatchProcessor implements NettyRequestProcessor { DateUtils.getRemainTime(DateUtils.timeStampToDate(taskExecutionContext.getFirstSubmitTime()), taskExecutionContext.getDelayTime() * 60L); if (remainTime > 0) { - logger.info("Current taskInstance is choose delay execution, delay time: {}s", remainTime); + log.info("Current taskInstance is choose delay execution, delay time: {}s", remainTime); taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.DELAY_EXECUTION); workerMessageSender.sendMessage(taskExecutionContext, workflowMasterAddress, CommandType.TASK_EXECUTE_RESULT); @@ -138,13 +137,13 @@ public class TaskDispatchProcessor implements NettyRequestProcessor { // submit task to manager boolean offer = workerManager.offer(workerTaskExecuteRunnable); if (!offer) { - logger.warn( + log.warn( "submit task to wait queue error, queue is full, current queue size is {}, will send a task reject message to master", workerManager.getWaitSubmitQueueSize()); workerMessageSender.sendMessageWithRetry(taskExecutionContext, workflowMasterAddress, CommandType.TASK_REJECT); } else { - logger.info("Submit task to wait queue success, current queue size is {}", + log.info("Submit task to wait queue success, current queue size is {}", workerManager.getWaitSubmitQueueSize()); } } finally { diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteResultAckProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteResultAckProcessor.java index 5b58e22e38..43cb9da9a6 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteResultAckProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteResultAckProcessor.java @@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.message.MessageRetryRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -36,11 +36,10 @@ import io.netty.channel.Channel; /** * task execute running ack, from master to worker */ +@Slf4j @Component public class TaskExecuteResultAckProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskExecuteResultAckProcessor.class); - @Autowired private MessageRetryRunner messageRetryRunner; @@ -53,20 +52,20 @@ public class TaskExecuteResultAckProcessor implements NettyRequestProcessor { TaskExecuteAckCommand.class); if (taskExecuteAckMessage == null) { - logger.error("task execute response ack command is null"); + log.error("task execute response ack command is null"); return; } try { LogUtils.setTaskInstanceIdMDC(taskExecuteAckMessage.getTaskInstanceId()); - logger.info("Receive task execute response ack command : {}", taskExecuteAckMessage); + log.info("Receive task execute response ack command : {}", taskExecuteAckMessage); if (taskExecuteAckMessage.isSuccess()) { messageRetryRunner.removeRetryMessage(taskExecuteAckMessage.getTaskInstanceId(), CommandType.TASK_EXECUTE_RESULT); - logger.debug("remove REMOTE_CHANNELS, task instance id:{}", taskExecuteAckMessage.getTaskInstanceId()); + log.debug("remove REMOTE_CHANNELS, task instance id:{}", taskExecuteAckMessage.getTaskInstanceId()); } else { // master handle worker response error, will still retry - logger.error("Receive task execute result ack message, the message status is not success, message: {}", + log.error("Receive task execute result ack message, the message status is not success, message: {}", taskExecuteAckMessage); } } finally { diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteRunningAckProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteRunningAckProcessor.java index 5698229f90..f49a7c6129 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteRunningAckProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteRunningAckProcessor.java @@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.remote.command.TaskExecuteRunningAckMessage; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.message.MessageRetryRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -37,10 +37,9 @@ import io.netty.channel.Channel; * task execute running ack processor */ @Component +@Slf4j public class TaskExecuteRunningAckProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskExecuteRunningAckProcessor.class); - @Autowired private MessageRetryRunner messageRetryRunner; @@ -52,12 +51,12 @@ public class TaskExecuteRunningAckProcessor implements NettyRequestProcessor { TaskExecuteRunningAckMessage runningAckCommand = JSONUtils.parseObject(command.getBody(), TaskExecuteRunningAckMessage.class); if (runningAckCommand == null) { - logger.error("task execute running ack command is null"); + log.error("task execute running ack command is null"); return; } try { LogUtils.setTaskInstanceIdMDC(runningAckCommand.getTaskInstanceId()); - logger.info("task execute running ack command : {}", runningAckCommand); + log.info("task execute running ack command : {}", runningAckCommand); if (runningAckCommand.isSuccess()) { messageRetryRunner.removeRetryMessage(runningAckCommand.getTaskInstanceId(), diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java index 45fbf9fce4..3c99ea492e 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java @@ -47,8 +47,8 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -63,11 +63,10 @@ import io.netty.channel.ChannelFutureListener; /** * task kill processor */ +@Slf4j @Component public class TaskKillProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskKillProcessor.class); - @Autowired private WorkerManagerThread workerManager; @@ -86,10 +85,10 @@ public class TaskKillProcessor implements NettyRequestProcessor { String.format("invalid command type : %s", command.getType())); TaskKillRequestCommand killCommand = JSONUtils.parseObject(command.getBody(), TaskKillRequestCommand.class); if (killCommand == null) { - logger.error("task kill request command is null"); + log.error("task kill request command is null"); return; } - logger.info("task kill command : {}", killCommand); + log.info("task kill command : {}", killCommand); int taskInstanceId = killCommand.getTaskInstanceId(); try { @@ -97,7 +96,7 @@ public class TaskKillProcessor implements NettyRequestProcessor { TaskExecutionContext taskExecutionContext = TaskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId); if (taskExecutionContext == null) { - logger.error("taskRequest cache is null, taskInstanceId: {}", killCommand.getTaskInstanceId()); + log.error("taskRequest cache is null, taskInstanceId: {}", killCommand.getTaskInstanceId()); return; } @@ -108,7 +107,7 @@ public class TaskKillProcessor implements NettyRequestProcessor { taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.KILL); TaskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); sendTaskKillResponseCommand(channel, taskExecutionContext); - logger.info("the task has not been executed and has been cancelled, task id:{}", taskInstanceId); + log.info("the task has not been executed and has been cancelled, task id:{}", taskInstanceId); return; } @@ -124,7 +123,7 @@ public class TaskKillProcessor implements NettyRequestProcessor { TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); messageRetryRunner.removeRetryMessages(taskExecutionContext.getTaskInstanceId()); - logger.info("remove REMOTE_CHANNELS, task instance id:{}", killCommand.getTaskInstanceId()); + log.info("remove REMOTE_CHANNELS, task instance id:{}", killCommand.getTaskInstanceId()); } finally { LogUtils.removeTaskInstanceIdMDC(); } @@ -145,7 +144,7 @@ public class TaskKillProcessor implements NettyRequestProcessor { @Override public void operationComplete(ChannelFuture future) throws Exception { if (!future.isSuccess()) { - logger.error("Submit kill response to master error, kill command: {}", taskKillResponseCommand); + log.error("Submit kill response to master error, kill command: {}", taskKillResponseCommand); } } }); @@ -176,20 +175,20 @@ public class TaskKillProcessor implements NettyRequestProcessor { protected void cancelApplication(int taskInstanceId) { WorkerTaskExecuteRunnable workerTaskExecuteRunnable = workerManager.getTaskExecuteThread(taskInstanceId); if (workerTaskExecuteRunnable == null) { - logger.warn("taskExecuteThread not found, taskInstanceId:{}", taskInstanceId); + log.warn("taskExecuteThread not found, taskInstanceId:{}", taskInstanceId); return; } AbstractTask task = workerTaskExecuteRunnable.getTask(); if (task == null) { - logger.warn("task not found, taskInstanceId:{}", taskInstanceId); + log.warn("task not found, taskInstanceId:{}", taskInstanceId); return; } try { task.cancel(); } catch (Exception e) { - logger.error("kill task error", e); + log.error("kill task error", e); } - logger.info("kill task by cancelApplication, task id:{}", taskInstanceId); + log.info("kill task by cancelApplication, task id:{}", taskInstanceId); } /** @@ -207,12 +206,12 @@ public class TaskKillProcessor implements NettyRequestProcessor { if (!Strings.isNullOrEmpty(pidsStr)) { String cmd = String.format("kill -9 %s", pidsStr); cmd = OSUtils.getSudoCmd(tenantCode, cmd); - logger.info("process id:{}, cmd:{}", processId, cmd); + log.info("process id:{}, cmd:{}", processId, cmd); OSUtils.exeCmd(cmd); } } catch (Exception e) { processFlag = false; - logger.error("kill task error", e); + log.error("kill task error", e); } return processFlag; } @@ -232,24 +231,24 @@ public class TaskKillProcessor implements NettyRequestProcessor { String executePath, String tenantCode) { if (logPath == null || appInfoPath == null || executePath == null || tenantCode == null) { - logger.error( + log.error( "Kill yarn job error, the input params is illegal, host: {}, logPath: {}, appInfoPath: {}, executePath: {}, tenantCode: {}", host, logPath, appInfoPath, executePath, tenantCode); return Pair.of(false, Collections.emptyList()); } try { - logger.info("Get appIds from worker {}:{} taskLogPath: {}", host.getIp(), host.getPort(), logPath); + log.info("Get appIds from worker {}:{} taskLogPath: {}", host.getIp(), host.getPort(), logPath); List appIds = LogUtils.getAppIds(logPath, appInfoPath, PropertyUtils.getString(APPID_COLLECT, DEFAULT_COLLECT_WAY)); if (CollectionUtils.isEmpty(appIds)) { - logger.info("The appId is empty"); + log.info("The appId is empty"); return Pair.of(true, Collections.emptyList()); } - ProcessUtils.cancelApplication(appIds, logger, tenantCode, executePath); + ProcessUtils.cancelApplication(appIds, log, tenantCode, executePath); return Pair.of(true, appIds); } catch (Exception e) { - logger.error("Kill yarn job error, host: {}, logPath: {}, executePath: {}, tenantCode: {}", host, logPath, + log.error("Kill yarn job error, host: {}, logPath: {}, executePath: {}, tenantCode: {}", host, logPath, executePath, tenantCode, e); } return Pair.of(false, Collections.emptyList()); diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRejectAckProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRejectAckProcessor.java index c4f5330ee4..7bc1267907 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRejectAckProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskRejectAckProcessor.java @@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.remote.command.TaskRejectAckCommand; import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.message.MessageRetryRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -34,10 +34,9 @@ import com.google.common.base.Preconditions; import io.netty.channel.Channel; @Component +@Slf4j public class TaskRejectAckProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskRejectAckProcessor.class); - @Autowired private MessageRetryRunner messageRetryRunner; @@ -49,19 +48,19 @@ public class TaskRejectAckProcessor implements NettyRequestProcessor { TaskRejectAckCommand taskRejectAckMessage = JSONUtils.parseObject(command.getBody(), TaskRejectAckCommand.class); if (taskRejectAckMessage == null) { - logger.error("Receive task reject response, the response message is null"); + log.error("Receive task reject response, the response message is null"); return; } try { LogUtils.setTaskInstanceIdMDC(taskRejectAckMessage.getTaskInstanceId()); - logger.info("Receive task reject response ack command: {}", taskRejectAckMessage); + log.info("Receive task reject response ack command: {}", taskRejectAckMessage); if (taskRejectAckMessage.isSuccess()) { messageRetryRunner.removeRetryMessage(taskRejectAckMessage.getTaskInstanceId(), CommandType.TASK_REJECT); - logger.debug("removeRecallCache: task instance id:{}", taskRejectAckMessage.getTaskInstanceId()); + log.debug("removeRecallCache: task instance id:{}", taskRejectAckMessage.getTaskInstanceId()); } else { - logger.error("Receive task reject ack message, the message status is not success, message: {}", + log.error("Receive task reject ack message, the message status is not success, message: {}", taskRejectAckMessage); } } finally { diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskSavePointProcessor.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskSavePointProcessor.java index 3ec5c28376..7d3008a921 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskSavePointProcessor.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskSavePointProcessor.java @@ -31,8 +31,8 @@ import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor; import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import org.apache.dolphinscheduler.server.worker.runner.WorkerTaskExecuteRunnable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -46,10 +46,9 @@ import io.netty.channel.ChannelFutureListener; * task save point processor */ @Component +@Slf4j public class TaskSavePointProcessor implements NettyRequestProcessor { - private final Logger logger = LoggerFactory.getLogger(TaskSavePointProcessor.class); - /** * task execute manager */ @@ -69,16 +68,16 @@ public class TaskSavePointProcessor implements NettyRequestProcessor { TaskSavePointRequestCommand taskSavePointRequestCommand = JSONUtils.parseObject(command.getBody(), TaskSavePointRequestCommand.class); if (taskSavePointRequestCommand == null) { - logger.error("task savepoint request command is null"); + log.error("task savepoint request command is null"); return; } - logger.info("Receive task savepoint command : {}", taskSavePointRequestCommand); + log.info("Receive task savepoint command : {}", taskSavePointRequestCommand); int taskInstanceId = taskSavePointRequestCommand.getTaskInstanceId(); TaskExecutionContext taskExecutionContext = TaskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId); if (taskExecutionContext == null) { - logger.error("taskRequest cache is null, taskInstanceId: {}", + log.error("taskRequest cache is null, taskInstanceId: {}", taskSavePointRequestCommand.getTaskInstanceId()); return; } @@ -101,10 +100,10 @@ public class TaskSavePointProcessor implements NettyRequestProcessor { @Override public void operationComplete(ChannelFuture future) throws Exception { if (!future.isSuccess()) { - logger.error("Submit kill response to master error, kill command: {}", + log.error("Submit kill response to master error, kill command: {}", taskSavePointResponseCommand); } else - logger.info("Submit kill response to master success, kill command: {}", + log.info("Submit kill response to master success, kill command: {}", taskSavePointResponseCommand); } }); @@ -113,22 +112,22 @@ public class TaskSavePointProcessor implements NettyRequestProcessor { protected void doSavePoint(int taskInstanceId) { WorkerTaskExecuteRunnable workerTaskExecuteRunnable = workerManager.getTaskExecuteThread(taskInstanceId); if (workerTaskExecuteRunnable == null) { - logger.warn("taskExecuteThread not found, taskInstanceId:{}", taskInstanceId); + log.warn("taskExecuteThread not found, taskInstanceId:{}", taskInstanceId); return; } AbstractTask task = workerTaskExecuteRunnable.getTask(); if (task == null) { - logger.warn("task not found, taskInstanceId:{}", taskInstanceId); + log.warn("task not found, taskInstanceId:{}", taskInstanceId); return; } if (!(task instanceof StreamTask)) { - logger.warn("task is not stream task"); + log.warn("task is not stream task"); return; } try { ((StreamTask) task).savePoint(); } catch (Exception e) { - logger.error("task save point error", e); + log.error("task save point error", e); } } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerConnectionStateListener.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerConnectionStateListener.java index 7e549a458c..d070a90b34 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerConnectionStateListener.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerConnectionStateListener.java @@ -23,13 +23,11 @@ import org.apache.dolphinscheduler.registry.api.ConnectionState; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - +@Slf4j public class WorkerConnectionStateListener implements ConnectionListener { - private final Logger logger = LoggerFactory.getLogger(WorkerConnectionStateListener.class); private final WorkerConfig workerConfig; private final WorkerConnectStrategy workerConnectStrategy; @@ -41,7 +39,7 @@ public class WorkerConnectionStateListener implements ConnectionListener { @Override public void onUpdate(ConnectionState state) { - logger.info("Worker received a {} event from registry, the current server state is {}", state, + log.info("Worker received a {} event from registry, the current server state is {}", state, ServerLifeCycleManager.getServerStatus()); switch (state) { case CONNECTED: diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerStopStrategy.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerStopStrategy.java index 88433c9465..ee4e960a80 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerStopStrategy.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerStopStrategy.java @@ -21,18 +21,17 @@ import org.apache.dolphinscheduler.registry.api.RegistryClient; import org.apache.dolphinscheduler.registry.api.StrategyType; import org.apache.dolphinscheduler.server.worker.config.WorkerConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Service; @Service @ConditionalOnProperty(prefix = "worker.registry-disconnect-strategy", name = "strategy", havingValue = "stop", matchIfMissing = true) +@Slf4j public class WorkerStopStrategy implements WorkerConnectStrategy { - private final Logger logger = LoggerFactory.getLogger(WorkerStopStrategy.class); - @Autowired public RegistryClient registryClient; @Autowired @@ -46,7 +45,7 @@ public class WorkerStopStrategy implements WorkerConnectStrategy { @Override public void reconnect() { - logger.warn("The current connect strategy is stop, so the worker will not reconnect to registry"); + log.warn("The current connect strategy is stop, so the worker will not reconnect to registry"); } @Override diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerWaitingStrategy.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerWaitingStrategy.java index 24a609b74f..bc3563d5a1 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerWaitingStrategy.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerWaitingStrategy.java @@ -30,18 +30,17 @@ import org.apache.dolphinscheduler.server.worker.runner.WorkerManagerThread; import java.time.Duration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Service; @Service @ConditionalOnProperty(prefix = "worker.registry-disconnect-strategy", name = "strategy", havingValue = "waiting") +@Slf4j public class WorkerWaitingStrategy implements WorkerConnectStrategy { - private final Logger logger = LoggerFactory.getLogger(WorkerWaitingStrategy.class); - @Autowired private WorkerConfig workerConfig; @@ -67,7 +66,7 @@ public class WorkerWaitingStrategy implements WorkerConnectStrategy { clearWorkerResource(); Duration maxWaitingTime = workerConfig.getRegistryDisconnectStrategy().getMaxWaitingTime(); try { - logger.info("Worker disconnect from registry will try to reconnect in {} s", + log.info("Worker disconnect from registry will try to reconnect in {} s", maxWaitingTime.getSeconds()); registryClient.connectUntilTimeout(maxWaitingTime); } catch (RegistryException ex) { @@ -78,15 +77,15 @@ public class WorkerWaitingStrategy implements WorkerConnectStrategy { String errorMessage = String.format( "Disconnect from registry and change the current status to waiting error, the current server state is %s, will stop the current server", ServerLifeCycleManager.getServerStatus()); - logger.error(errorMessage, e); + log.error(errorMessage, e); registryClient.getStoppable().stop(errorMessage); } catch (RegistryException ex) { String errorMessage = "Disconnect from registry and waiting to reconnect failed, will stop the server"; - logger.error(errorMessage, ex); + log.error(errorMessage, ex); registryClient.getStoppable().stop(errorMessage); } catch (Exception ex) { String errorMessage = "Disconnect from registry and get an unknown exception, will stop the server"; - logger.error(errorMessage, ex); + log.error(errorMessage, ex); registryClient.getStoppable().stop(errorMessage); } } @@ -94,19 +93,19 @@ public class WorkerWaitingStrategy implements WorkerConnectStrategy { @Override public void reconnect() { if (ServerLifeCycleManager.isRunning()) { - logger.info("no need to reconnect, as the current server status is running"); + log.info("no need to reconnect, as the current server status is running"); } else { try { ServerLifeCycleManager.recoverFromWaiting(); reStartWorkerResource(); - logger.info("Recover from waiting success, the current server status is {}", + log.info("Recover from waiting success, the current server status is {}", ServerLifeCycleManager.getServerStatus()); } catch (Exception e) { String errorMessage = String.format( "Recover from waiting failed, the current server status is %s, will stop the server", ServerLifeCycleManager.getServerStatus()); - logger.error(errorMessage, e); + log.error(errorMessage, e); registryClient.getStoppable().stop(errorMessage); } } @@ -120,21 +119,21 @@ public class WorkerWaitingStrategy implements WorkerConnectStrategy { private void clearWorkerResource() { // close the worker resource, if close failed should stop the worker server workerRpcServer.close(); - logger.warn("Worker server close the RPC server due to lost connection from registry"); + log.warn("Worker server close the RPC server due to lost connection from registry"); workerRpcClient.close(); - logger.warn("Worker server close the RPC client due to lost connection from registry"); + log.warn("Worker server close the RPC client due to lost connection from registry"); workerManagerThread.clearTask(); - logger.warn("Worker server clear the tasks due to lost connection from registry"); + log.warn("Worker server clear the tasks due to lost connection from registry"); messageRetryRunner.clearMessage(); - logger.warn("Worker server clear the retry message due to lost connection from registry"); + log.warn("Worker server clear the retry message due to lost connection from registry"); } private void reStartWorkerResource() { // reopen the resource, if reopen failed should stop the worker server workerRpcServer.start(); - logger.warn("Worker server restart PRC server due to reconnect to registry"); + log.warn("Worker server restart PRC server due to reconnect to registry"); workerRpcClient.start(); - logger.warn("Worker server restart PRC client due to reconnect to registry"); + log.warn("Worker server restart PRC client due to reconnect to registry"); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerMessageSender.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerMessageSender.java index 393586b799..d816420493 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerMessageSender.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerMessageSender.java @@ -31,17 +31,15 @@ import java.util.Map; import javax.annotation.PostConstruct; import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component +@Slf4j public class WorkerMessageSender { - private final Logger logger = LoggerFactory.getLogger(WorkerMessageSender.class); - @Autowired private MessageRetryRunner messageRetryRunner; @@ -69,7 +67,7 @@ public class WorkerMessageSender { messageRetryRunner.addRetryMessage(taskExecutionContext.getTaskInstanceId(), messageType, baseCommand); messageSender.sendMessage(baseCommand); } catch (RemotingException e) { - logger.error("Send message error, messageType: {}, message: {}", messageType, baseCommand); + log.error("Send message error, messageType: {}, message: {}", messageType, baseCommand); } } @@ -84,7 +82,7 @@ public class WorkerMessageSender { try { messageSender.sendMessage(baseCommand); } catch (RemotingException e) { - logger.error("Send message error, messageType: {}, message: {}", messageType, baseCommand); + log.error("Send message error, messageType: {}, message: {}", messageType, baseCommand); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcClient.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcClient.java index a01cbaabdf..4380f85485 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcClient.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcClient.java @@ -27,8 +27,8 @@ import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteResultAckP import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteRunningAckProcessor; import org.apache.dolphinscheduler.server.worker.processor.TaskRejectAckProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -36,10 +36,9 @@ import org.springframework.stereotype.Component; * This rpc client is only used to send message, will not receive message, all response message should send to {@link WorkerRpcServer}. */ @Component +@Slf4j public class WorkerRpcClient implements AutoCloseable { - private final Logger logger = LoggerFactory.getLogger(WorkerRpcClient.class); - @Autowired private TaskExecuteRunningAckProcessor taskExecuteRunningAckProcessor; @@ -52,7 +51,7 @@ public class WorkerRpcClient implements AutoCloseable { private NettyRemotingClient nettyRemotingClient; public void start() { - logger.info("Worker rpc client starting"); + log.info("Worker rpc client starting"); NettyClientConfig nettyClientConfig = new NettyClientConfig(); this.nettyRemotingClient = new NettyRemotingClient(nettyClientConfig); // we only use the client to handle the ack message, we can optimize this, send ack to the nettyServer. @@ -60,7 +59,7 @@ public class WorkerRpcClient implements AutoCloseable { taskExecuteRunningAckProcessor); this.nettyRemotingClient.registerProcessor(CommandType.TASK_EXECUTE_RESULT_ACK, taskExecuteResultAckProcessor); this.nettyRemotingClient.registerProcessor(CommandType.TASK_REJECT_ACK, taskRejectAckProcessor); - logger.info("Worker rpc client started"); + log.info("Worker rpc client started"); } public void send(Host host, Command command) throws RemotingException { @@ -68,8 +67,8 @@ public class WorkerRpcClient implements AutoCloseable { } public void close() { - logger.info("Worker rpc client closing"); + log.info("Worker rpc client closing"); nettyRemotingClient.close(); - logger.info("Worker rpc client closed"); + log.info("Worker rpc client closed"); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcServer.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcServer.java index 26b2e93166..929e253553 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcServer.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/rpc/WorkerRpcServer.java @@ -32,16 +32,15 @@ import org.apache.dolphinscheduler.server.worker.processor.TaskSavePointProcesso import java.io.Closeable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service +@Slf4j public class WorkerRpcServer implements Closeable { - private static final Logger LOGGER = LoggerFactory.getLogger(WorkerRpcServer.class); - @Autowired private TaskDispatchProcessor taskDispatchProcessor; @@ -72,7 +71,7 @@ public class WorkerRpcServer implements Closeable { private NettyRemotingServer nettyRemotingServer; public void start() { - LOGGER.info("Worker rpc server starting"); + log.info("Worker rpc server starting"); NettyServerConfig serverConfig = new NettyServerConfig(); serverConfig.setListenPort(workerConfig.getListenPort()); this.nettyRemotingServer = new NettyRemotingServer(serverConfig); @@ -84,21 +83,21 @@ public class WorkerRpcServer implements Closeable { this.nettyRemotingServer.registerProcessor(CommandType.TASK_REJECT_ACK, taskRejectAckProcessor); this.nettyRemotingServer.registerProcessor(CommandType.PROCESS_HOST_UPDATE_REQUEST, hostUpdateProcessor); this.nettyRemotingServer.registerProcessor(CommandType.TASK_SAVEPOINT_REQUEST, taskSavePointProcessor); - // logger server + // log server this.nettyRemotingServer.registerProcessor(CommandType.GET_APP_ID_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.GET_LOG_BYTES_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.ROLL_VIEW_LOG_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.VIEW_WHOLE_LOG_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.registerProcessor(CommandType.REMOVE_TAK_LOG_REQUEST, loggerRequestProcessor); this.nettyRemotingServer.start(); - LOGGER.info("Worker rpc server started"); + log.info("Worker rpc server started"); } @Override public void close() { - LOGGER.info("Worker rpc server closing"); + log.info("Worker rpc server closing"); this.nettyRemotingServer.close(); - LOGGER.info("Worker rpc server closed"); + log.info("Worker rpc server closed"); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskCallbackImpl.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskCallbackImpl.java index a27d9e9abd..9ee176b3f9 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskCallbackImpl.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskCallbackImpl.java @@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory; @Builder public class TaskCallbackImpl implements TaskCallBack { - protected final Logger logger = + protected final Logger log = LoggerFactory.getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, TaskCallbackImpl.class)); private final WorkerMessageSender workerMessageSender; @@ -50,12 +50,12 @@ public class TaskCallbackImpl implements TaskCallBack { TaskExecutionContext taskExecutionContext = TaskExecutionContextCacheManager.getByTaskInstanceId(taskInstanceId); if (taskExecutionContext == null) { - logger.error("task execution context is empty, taskInstanceId: {}, applicationInfo:{}", taskInstanceId, + log.error("task execution context is empty, taskInstanceId: {}, applicationInfo:{}", taskInstanceId, applicationInfo); return; } - logger.info("send remote application info {}", applicationInfo); + log.info("send remote application info {}", applicationInfo); taskExecutionContext.setAppIds(applicationInfo.getAppIds()); workerMessageSender.sendMessageWithRetry(taskExecutionContext, masterAddress, CommandType.TASK_EXECUTE_RUNNING); } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java index 6fa7122d2a..3238122e88 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerExecService.java @@ -24,8 +24,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadPoolExecutor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; @@ -33,13 +32,9 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; +@Slf4j public class WorkerExecService { - /** - * logger of WorkerExecService - */ - private static final Logger logger = LoggerFactory.getLogger(WorkerExecService.class); - private final ListeningExecutorService listeningExecutorService; /** @@ -72,7 +67,7 @@ public class WorkerExecService { @Override public void onFailure(Throwable throwable) { - logger.error("task execute failed, processInstanceId:{}, taskInstanceId:{}", + log.error("task execute failed, processInstanceId:{}, taskInstanceId:{}", taskExecuteThread.getTaskExecutionContext().getProcessInstanceId(), taskExecuteThread.getTaskExecutionContext().getTaskInstanceId(), throwable); diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java index d30f5a53c1..c6f2125c72 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerManagerThread.java @@ -30,18 +30,17 @@ import java.util.concurrent.DelayQueue; import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import lombok.extern.slf4j.Slf4j; + import org.springframework.stereotype.Component; /** * Manage tasks */ @Component +@Slf4j public class WorkerManagerThread implements Runnable { - private final Logger logger = LoggerFactory.getLogger(WorkerManagerThread.class); - private final DelayQueue waitSubmitQueue; private final WorkerExecService workerExecService; private final WorkerConfig workerConfig; @@ -102,7 +101,7 @@ public class WorkerManagerThread implements Runnable { } if (waitSubmitQueue.size() > workerExecThreads) { - logger.warn("Wait submit queue is full, will retry submit task later"); + log.warn("Wait submit queue is full, will retry submit task later"); WorkerServerMetrics.incWorkerSubmitQueueIsFullCount(); // if waitSubmitQueue is full, it will wait 1s, then try add ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); @@ -114,11 +113,11 @@ public class WorkerManagerThread implements Runnable { } public void start() { - logger.info("Worker manager thread starting"); + log.info("Worker manager thread starting"); Thread thread = new Thread(this, this.getClass().getName()); thread.setDaemon(true); thread.start(); - logger.info("Worker manager thread started"); + log.info("Worker manager thread started"); } @Override @@ -134,12 +133,12 @@ public class WorkerManagerThread implements Runnable { workerExecService.submit(workerDelayTaskExecuteRunnable); } else { WorkerServerMetrics.incWorkerOverloadCount(); - logger.info("Exec queue is full, waiting submit queue {}, waiting exec queue size {}", + log.info("Exec queue is full, waiting submit queue {}, waiting exec queue size {}", this.getWaitSubmitQueueSize(), this.getThreadPoolQueueSize()); ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS); } } catch (Exception e) { - logger.error("An unexpected interrupt is happened, " + log.error("An unexpected interrupt is happened, " + "the exception will be ignored and this thread will continue to run", e); } } @@ -151,9 +150,9 @@ public class WorkerManagerThread implements Runnable { int taskInstanceId = workerTaskExecuteRunnable.getTaskExecutionContext().getTaskInstanceId(); try { workerTaskExecuteRunnable.cancelTask(); - logger.info("Cancel the taskInstance in worker {}", taskInstanceId); + log.info("Cancel the taskInstance in worker {}", taskInstanceId); } catch (Exception ex) { - logger.error("Cancel the taskInstance error {}", taskInstanceId, ex); + log.error("Cancel the taskInstance error {}", taskInstanceId, ex); } finally { TaskExecutionContextCacheManager.removeByTaskInstanceId(taskInstanceId); } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerTaskExecuteRunnable.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerTaskExecuteRunnable.java index da84f91027..631f7298d3 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerTaskExecuteRunnable.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/runner/WorkerTaskExecuteRunnable.java @@ -69,7 +69,7 @@ import com.google.common.base.Strings; public abstract class WorkerTaskExecuteRunnable implements Runnable { - protected final Logger logger = LoggerFactory + protected final Logger log = LoggerFactory .getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, WorkerTaskExecuteRunnable.class)); protected final TaskExecutionContext taskExecutionContext; @@ -104,7 +104,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { taskExecutionContext.getProcessInstanceId(), taskExecutionContext.getTaskInstanceId()); taskExecutionContext.setTaskLogName(taskLogName); - logger.info("Set task logger name: {}", taskLogName); + log.info("Set task log name: {}", taskLogName); } protected abstract void executeTask(TaskCallBack taskCallBack); @@ -118,7 +118,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { sendTaskResult(); TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); - logger.info("Remove the current task execute context from worker cache"); + log.info("Remove the current task execute context from worker cache"); clearTaskExecPathIfNeeded(); } @@ -128,7 +128,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.FAILURE); taskExecutionContext.setEndTime(System.currentTimeMillis()); workerMessageSender.sendMessageWithRetry(taskExecutionContext, masterAddress, CommandType.TASK_EXECUTE_RESULT); - logger.info( + log.info( "Get a exception when execute the task, will send the task execute result to master, the current task execute result is {}", TaskExecutionStatus.FAILURE); } @@ -142,11 +142,11 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { LogUtils.getAppIds(taskExecutionContext.getLogPath(), taskExecutionContext.getExecutePath(), PropertyUtils.getString(APPID_COLLECT, DEFAULT_COLLECT_WAY)); if (CollectionUtils.isNotEmpty(appIds)) { - ProcessUtils.cancelApplication(appIds, logger, taskExecutionContext.getTenantCode(), + ProcessUtils.cancelApplication(appIds, log, taskExecutionContext.getTenantCode(), taskExecutionContext.getExecutePath()); } } catch (Exception e) { - logger.error( + log.error( "Task execute failed and cancel the application failed, this will not affect the taskInstance status, but you need to check manual", e); } @@ -161,7 +161,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { LogUtils.setWorkflowAndTaskInstanceIDMDC(taskExecutionContext.getProcessInstanceId(), taskExecutionContext.getTaskInstanceId()); - logger.info("Begin to pulling task"); + log.info("Begin to pulling task"); initializeTask(); @@ -171,7 +171,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { TaskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId()); workerMessageSender.sendMessageWithRetry(taskExecutionContext, masterAddress, CommandType.TASK_EXECUTE_RESULT); - logger.info( + log.info( "The current execute mode is dry run, will stop the subsequent process and set the taskInstance status to success"); return; } @@ -185,7 +185,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { afterExecute(); } catch (Throwable ex) { - logger.error("Task execute failed, due to meet an exception", ex); + log.error("Task execute failed, due to meet an exception", ex); afterThrowing(ex); } finally { LogUtils.removeWorkflowAndTaskInstanceIdMDC(); @@ -193,33 +193,33 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { } protected void initializeTask() { - logger.info("Begin to initialize task"); + log.info("Begin to initialize task"); long taskStartTime = System.currentTimeMillis(); taskExecutionContext.setStartTime(taskStartTime); - logger.info("Set task startTime: {}", taskStartTime); + log.info("Set task startTime: {}", taskStartTime); String taskAppId = String.format("%s_%s", taskExecutionContext.getProcessInstanceId(), taskExecutionContext.getTaskInstanceId()); taskExecutionContext.setTaskAppId(taskAppId); - logger.info("Set task appId: {}", taskAppId); + log.info("Set task appId: {}", taskAppId); - logger.info("End initialize task {}", JSONUtils.toPrettyJsonString(taskExecutionContext)); + log.info("End initialize task {}", JSONUtils.toPrettyJsonString(taskExecutionContext)); } protected void beforeExecute() { taskExecutionContext.setCurrentExecutionStatus(TaskExecutionStatus.RUNNING_EXECUTION); workerMessageSender.sendMessageWithRetry(taskExecutionContext, masterAddress, CommandType.TASK_EXECUTE_RUNNING); - logger.info("Set task status to {}", TaskExecutionStatus.RUNNING_EXECUTION); + log.info("Set task status to {}", TaskExecutionStatus.RUNNING_EXECUTION); TaskExecutionCheckerUtils.checkTenantExist(workerConfig, taskExecutionContext); - logger.info("TenantCode:{} check success", taskExecutionContext.getTenantCode()); + log.info("TenantCode:{} check success", taskExecutionContext.getTenantCode()); TaskExecutionCheckerUtils.createProcessLocalPathIfAbsent(taskExecutionContext); - logger.info("ProcessExecDir:{} check success", taskExecutionContext.getExecutePath()); + log.info("ProcessExecDir:{} check success", taskExecutionContext.getExecutePath()); - TaskExecutionCheckerUtils.downloadResourcesIfNeeded(storageOperate, taskExecutionContext, logger); - logger.info("Resources:{} check success", taskExecutionContext.getResources()); + TaskExecutionCheckerUtils.downloadResourcesIfNeeded(storageOperate, taskExecutionContext, log); + log.info("Resources:{} check success", taskExecutionContext.getResources()); TaskFilesTransferUtils.downloadUpstreamFiles(taskExecutionContext, storageOperate); @@ -233,13 +233,13 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { throw new TaskPluginException(String.format("%s task is null, please check the task plugin is correct", taskExecutionContext.getTaskType())); } - logger.info("Task plugin: {} create success", taskExecutionContext.getTaskType()); + log.info("Task plugin: {} create success", taskExecutionContext.getTaskType()); task.init(); - logger.info("Success initialized task plugin instance success"); + log.info("Success initialized task plugin instance success"); task.getParameters().setVarPool(taskExecutionContext.getVarPool()); - logger.info("Success set taskVarPool: {}", taskExecutionContext.getVarPool()); + log.info("Success set taskVarPool: {}", taskExecutionContext.getVarPool()); } @@ -247,7 +247,7 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { if (!task.getNeedAlert()) { return; } - logger.info("The current task need to send alert, begin to send alert"); + log.info("The current task need to send alert, begin to send alert"); TaskExecutionStatus status = task.getExitStatus(); TaskAlertInfo taskAlertInfo = task.getTaskAlertInfo(); int strategy = @@ -259,9 +259,9 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { strategy); try { workerRpcClient.send(Host.of(workerConfig.getAlertListenHost()), alertCommand.convert2Command()); - logger.info("Success send alert"); + log.info("Success send alert"); } catch (RemotingException e) { - logger.error("Send alert failed, alertCommand: {}", alertCommand, e); + log.error("Send alert failed, alertCommand: {}", alertCommand, e); } } @@ -275,40 +275,40 @@ public abstract class WorkerTaskExecuteRunnable implements Runnable { TaskFilesTransferUtils.uploadOutputFiles(taskExecutionContext, storageOperate); workerMessageSender.sendMessageWithRetry(taskExecutionContext, masterAddress, CommandType.TASK_EXECUTE_RESULT); - logger.info("Send task execute result to master, the current task status: {}", + log.info("Send task execute result to master, the current task status: {}", taskExecutionContext.getCurrentExecutionStatus()); } protected void clearTaskExecPathIfNeeded() { String execLocalPath = taskExecutionContext.getExecutePath(); if (!CommonUtils.isDevelopMode()) { - logger.info("The current execute mode isn't develop mode, will clear the task execute file: {}", + log.info("The current execute mode isn't develop mode, will clear the task execute file: {}", execLocalPath); // get exec dir if (Strings.isNullOrEmpty(execLocalPath)) { - logger.warn("The task execute file is {} no need to clear", taskExecutionContext.getTaskName()); + log.warn("The task execute file is {} no need to clear", taskExecutionContext.getTaskName()); return; } if (SINGLE_SLASH.equals(execLocalPath)) { - logger.warn("The task execute file is '/', direct deletion is not allowed"); + log.warn("The task execute file is '/', direct deletion is not allowed"); return; } try { org.apache.commons.io.FileUtils.deleteDirectory(new File(execLocalPath)); - logger.info("Success clear the task execute file: {}", execLocalPath); + log.info("Success clear the task execute file: {}", execLocalPath); } catch (IOException e) { if (e instanceof NoSuchFileException) { // this is expected } else { - logger.error( + log.error( "Delete task execute file: {} failed, this will not affect the task status, but you need to clear this manually", execLocalPath, e); } } } else { - logger.info("The current execute mode is develop mode, will not clear the task execute file: {}", + log.info("The current execute mode is develop mode, will not clear the task execute file: {}", execLocalPath); } } diff --git a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/utils/TaskFilesTransferUtils.java b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/utils/TaskFilesTransferUtils.java index 44eeeabe40..364e1d4b67 100644 --- a/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/utils/TaskFilesTransferUtils.java +++ b/dolphinscheduler-worker/src/main/java/org/apache/dolphinscheduler/server/worker/utils/TaskFilesTransferUtils.java @@ -48,7 +48,7 @@ import com.fasterxml.jackson.databind.JsonNode; public class TaskFilesTransferUtils { - protected final static Logger logger = LoggerFactory + protected final static Logger log = LoggerFactory .getLogger(String.format(TaskConstants.TASK_LOG_LOGGER_NAME_FORMAT, TaskFilesTransferUtils.class)); // tmp path in local path for transfer @@ -84,7 +84,7 @@ public class TaskFilesTransferUtils { return; } - logger.info("Upload output files ..."); + log.info("Upload output files ..."); for (Property property : localParamsProperty) { // get local file path String path = String.format("%s/%s", taskExecutionContext.getExecutePath(), property.getValue()); @@ -107,9 +107,9 @@ public class TaskFilesTransferUtils { storageOperate.getResourceFileName(taskExecutionContext.getTenantCode(), resourcePath); String resourceCRCWholePath = storageOperate.getResourceFileName(taskExecutionContext.getTenantCode(), resourceCRCPath); - logger.info("{} --- Local:{} to Remote:{}", property, srcPath, resourceWholePath); + log.info("{} --- Local:{} to Remote:{}", property, srcPath, resourceWholePath); storageOperate.upload(taskExecutionContext.getTenantCode(), srcPath, resourceWholePath, false, true); - logger.info("{} --- Local:{} to Remote:{}", "CRC file", srcCRCPath, resourceCRCWholePath); + log.info("{} --- Local:{} to Remote:{}", "CRC file", srcCRCPath, resourceCRCWholePath); storageOperate.upload(taskExecutionContext.getTenantCode(), srcCRCPath, resourceCRCWholePath, false, true); } catch (IOException ex) { @@ -155,11 +155,11 @@ public class TaskFilesTransferUtils { // data path to download packaged data String downloadTmpPath = String.format("%s/%s", executePath, DOWNLOAD_TMP); - logger.info("Download upstream files..."); + log.info("Download upstream files..."); for (Property property : localParamsProperty) { Property inVarPool = varPoolsMap.get(property.getValue()); if (inVarPool == null) { - logger.error("{} not in {}", property.getValue(), varPoolsMap.keySet()); + log.error("{} not in {}", property.getValue(), varPoolsMap.keySet()); throw new TaskException(String.format("Can not find upstream file using %s, please check the key", property.getValue())); } @@ -180,7 +180,7 @@ public class TaskFilesTransferUtils { try { String resourceWholePath = storageOperate.getResourceFileName(taskExecutionContext.getTenantCode(), resourcePath); - logger.info("{} --- Remote:{} to Local:{}", property, resourceWholePath, downloadPath); + log.info("{} --- Remote:{} to Local:{}", property, resourceWholePath, downloadPath); storageOperate.download(taskExecutionContext.getTenantCode(), resourceWholePath, downloadPath, false, true); } catch (IOException ex) { @@ -190,7 +190,7 @@ public class TaskFilesTransferUtils { // unpack if the data is packaged if (isPack) { File downloadFile = new File(downloadPath); - logger.info("Unpack {} to {}", downloadPath, targetPath); + log.info("Unpack {} to {}", downloadPath, targetPath); ZipUtil.unpack(downloadFile, new File(targetPath)); } } @@ -199,7 +199,7 @@ public class TaskFilesTransferUtils { try { org.apache.commons.io.FileUtils.deleteDirectory(new File(downloadTmpPath)); } catch (IOException e) { - logger.error("Delete DownloadTmpPath {} failed, this will not affect the task status", downloadTmpPath, e); + log.error("Delete DownloadTmpPath {} failed, this will not affect the task status", downloadTmpPath, e); } } @@ -278,7 +278,7 @@ public class TaskFilesTransferUtils { String newPath; if (file.isDirectory()) { newPath = file.getPath() + PACK_SUFFIX; - logger.info("Pack {} to {}", path, newPath); + log.info("Pack {} to {}", path, newPath); ZipUtil.pack(file, new File(newPath)); } else { newPath = path; diff --git a/dolphinscheduler-worker/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClientTest.java b/dolphinscheduler-worker/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClientTest.java index bbfeed5a50..9e864a13a7 100644 --- a/dolphinscheduler-worker/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClientTest.java +++ b/dolphinscheduler-worker/src/test/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistryClientTest.java @@ -47,7 +47,7 @@ import com.google.common.collect.Sets; @ExtendWith(MockitoExtension.class) public class WorkerRegistryClientTest { - private static final Logger LOGGER = LoggerFactory.getLogger(WorkerRegistryClientTest.class); + private static final Logger log = LoggerFactory.getLogger(WorkerRegistryClientTest.class); private static final String TEST_WORKER_GROUP = "test";