Browse Source

[Improvement-13491] Use lombok @Slf4j annotation to generate logger (#13509)

3.2.0-release
seedscoder 2 years ago committed by GitHub
parent
commit
8d12dc0702
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 17
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java
  2. 12
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java
  3. 10
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java
  4. 14
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java
  5. 9
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java
  6. 17
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java
  7. 9
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java
  8. 5
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java
  9. 8
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java
  10. 19
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java
  11. 12
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java
  12. 8
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java
  13. 10
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java
  14. 5
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java
  15. 17
      dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java
  16. 11
      dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java
  17. 9
      dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java
  18. 29
      dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSenderService.java
  19. 21
      dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java
  20. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java
  21. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java
  22. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java
  23. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java
  24. 19
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java
  25. 20
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java
  26. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java
  27. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java
  28. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java
  29. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java
  30. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java
  31. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java
  32. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java
  33. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java
  34. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java
  35. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java
  36. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java
  37. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/RateLimitInterceptor.java
  38. 10
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/k8s/K8sManager.java
  39. 27
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java
  40. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java
  41. 17
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java
  42. 15
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java
  43. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java
  44. 35
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java
  45. 23
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java
  46. 14
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java
  47. 47
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java
  48. 21
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java
  49. 61
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
  50. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java
  51. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java
  52. 51
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java
  53. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentWorkerGroupRelationServiceImpl.java
  54. 125
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java
  55. 55
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java
  56. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java
  57. 7
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java
  58. 236
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  59. 73
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java
  60. 67
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java
  61. 60
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java
  62. 21
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java
  63. 217
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java
  64. 64
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java
  65. 11
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java
  66. 109
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java
  67. 9
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupQueueServiceImpl.java
  68. 53
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java
  69. 25
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java
  70. 35
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java
  71. 45
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java
  72. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java
  73. 105
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java
  74. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java
  75. 29
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java
  76. 13
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java
  77. 12
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java
  78. 22
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java
  79. 9
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java
  80. 8
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java
  81. 11
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java
  82. 14
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java
  83. 20
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java
  84. 22
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java
  85. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java
  86. 15
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java
  87. 38
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java
  88. 20
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java
  89. 24
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java
  90. 14
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java
  91. 9
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java
  92. 9
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java
  93. 4
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java
  94. 11
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java
  95. 12
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java
  96. 14
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java
  97. 10
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java
  98. 10
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java
  99. 11
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java
  100. 8
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDao.java
  101. Some files were not shown because too many files have changed in this diff Show More

17
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-dingtalk/src/main/java/org/apache/dolphinscheduler/plugin/alert/dingtalk/DingTalkSender.java

@ -47,8 +47,7 @@ import java.util.Objects;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* <p>
@ -56,9 +55,9 @@ import org.slf4j.LoggerFactory;
* https://open.dingtalk.com/document/robots/customize-robot-security-settings
* </p>
*/
@Slf4j
public final class DingTalkSender {
private static final Logger logger = LoggerFactory.getLogger(DingTalkSender.class);
private final String url;
private final String keyword;
private final String secret;
@ -127,13 +126,13 @@ public final class DingTalkSender {
if (null == result) {
alertResult.setMessage("send ding talk msg error");
logger.info("send ding talk msg error,ding talk server resp is null");
log.info("send ding talk msg error,ding talk server resp is null");
return alertResult;
}
DingTalkSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, DingTalkSendMsgResponse.class);
if (null == sendMsgResponse) {
alertResult.setMessage("send ding talk msg fail");
logger.info("send ding talk msg error,resp error");
log.info("send ding talk msg error,resp error");
return alertResult;
}
if (sendMsgResponse.errcode == 0) {
@ -142,7 +141,7 @@ public final class DingTalkSender {
return alertResult;
}
alertResult.setMessage(String.format("alert send ding talk msg error : %s", sendMsgResponse.getErrmsg()));
logger.info("alert send ding talk msg error : {}", sendMsgResponse.getErrmsg());
log.info("alert send ding talk msg error : {}", sendMsgResponse.getErrmsg());
return alertResult;
}
@ -159,7 +158,7 @@ public final class DingTalkSender {
String resp = sendMsg(title, content);
return checkSendDingTalkSendMsgResult(resp);
} catch (Exception e) {
logger.info("send ding talk alert msg exception : {}", e.getMessage());
log.info("send ding talk alert msg exception : {}", e.getMessage());
alertResult = new AlertResult();
alertResult.setStatus("false");
alertResult.setMessage("send ding talk alert fail.");
@ -193,7 +192,7 @@ public final class DingTalkSender {
} finally {
response.close();
}
logger.info("Ding Talk send msg :{}, resp: {}", msg, resp);
log.info("Ding Talk send msg :{}, resp: {}", msg, resp);
return resp;
} finally {
httpClient.close();
@ -320,7 +319,7 @@ public final class DingTalkSender {
byte[] signData = mac.doFinal(stringToSign.getBytes("UTF-8"));
sign = URLEncoder.encode(new String(Base64.encodeBase64(signData)), "UTF-8");
} catch (Exception e) {
logger.error("generate sign error, message:{}", e);
log.error("generate sign error, message:{}", e);
}
return url + "&timestamp=" + timestamp + "&sign=" + sign;
}

12
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/EmailAlertChannel.java

@ -24,13 +24,11 @@ import org.apache.dolphinscheduler.alert.api.AlertResult;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class EmailAlertChannel implements AlertChannel {
private static final Logger logger = LoggerFactory.getLogger(EmailAlertChannel.class);
@Override
public AlertResult process(AlertInfo info) {
@ -48,18 +46,18 @@ public final class EmailAlertChannel implements AlertChannel {
alertResult = new AlertResult();
alertResult.setStatus("false");
alertResult.setMessage("alert send error.");
logger.info("alert send error : {}", alertResult.getMessage());
log.info("alert send error : {}", alertResult.getMessage());
return alertResult;
}
flag = Boolean.parseBoolean(String.valueOf(alertResult.getStatus()));
if (flag) {
logger.info("alert send success");
log.info("alert send success");
alertResult.setMessage("email send success.");
} else {
alertResult.setMessage("alert send error.");
logger.info("alert send error : {}", alertResult.getMessage());
log.info("alert send error : {}", alertResult.getMessage());
}
return alertResult;

10
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/ExcelUtils.java

@ -35,14 +35,12 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class ExcelUtils {
private static final int XLSX_WINDOW_ROW = 10000;
private static final Logger logger = LoggerFactory.getLogger(ExcelUtils.class);
private ExcelUtils() {
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
}
@ -57,14 +55,14 @@ public final class ExcelUtils {
public static void genExcelFile(String content, String title, String xlsFilePath) {
File file = new File(xlsFilePath);
if (!file.exists() && !file.mkdirs()) {
logger.error("Create xlsx directory error, path:{}", xlsFilePath);
log.error("Create xlsx directory error, path:{}", xlsFilePath);
throw new AlertEmailException("Create xlsx directory error");
}
List<LinkedHashMap> itemsList = JSONUtils.toList(content, LinkedHashMap.class);
if (CollectionUtils.isEmpty(itemsList)) {
logger.error("itemsList is null");
log.error("itemsList is null");
throw new AlertEmailException("itemsList is null");
}

14
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailSender.java

@ -54,15 +54,13 @@ import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import javax.mail.internet.MimeUtility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.sun.mail.smtp.SMTPProvider;
@Slf4j
public final class MailSender {
private static final Logger logger = LoggerFactory.getLogger(MailSender.class);
private final List<String> receivers;
private final List<String> receiverCcs;
private final String mailProtocol = "SMTP";
@ -390,12 +388,12 @@ public final class MailSender {
public void deleteFile(File file) {
if (file.exists()) {
if (file.delete()) {
logger.info("delete success: {}", file.getAbsolutePath());
log.info("delete success: {}", file.getAbsolutePath());
} else {
logger.info("delete fail: {}", file.getAbsolutePath());
log.info("delete fail: {}", file.getAbsolutePath());
}
} else {
logger.info("file not exists: {}", file.getAbsolutePath());
log.info("file not exists: {}", file.getAbsolutePath());
}
}
@ -403,7 +401,7 @@ public final class MailSender {
* handle exception
*/
private void handleException(AlertResult alertResult, Exception e) {
logger.error("Send email to {} failed", receivers, e);
log.error("Send email to {} failed", receivers, e);
alertResult.setMessage("Send email to {" + String.join(",", receivers) + "} failed," + e.toString());
}

9
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/template/DefaultHTMLTemplate.java

@ -32,8 +32,8 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.configurationprocessor.json.JSONArray;
import org.springframework.boot.configurationprocessor.json.JSONException;
import org.springframework.boot.configurationprocessor.json.JSONTokener;
@ -42,10 +42,9 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
@Slf4j
public class DefaultHTMLTemplate implements AlertTemplate {
public static final Logger logger = LoggerFactory.getLogger(DefaultHTMLTemplate.class);
@Override
public String getMessageFromTemplate(String content, ShowType showType, boolean showAll) {
@ -128,7 +127,7 @@ public class DefaultHTMLTemplate implements AlertTemplate {
content = JSONUtils.toJsonString(Collections.singletonList(jsonNodes));
}
} catch (JSONException e) {
logger.error("alert content is null");
log.error("alert content is null");
}
ArrayNode list = JSONUtils.parseArray(content);
StringBuilder contents = new StringBuilder(100);

17
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-feishu/src/main/java/org/apache/dolphinscheduler/plugin/alert/feishu/FeiShuSender.java

@ -36,14 +36,13 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.fasterxml.jackson.annotation.JsonProperty;
@Slf4j
public final class FeiShuSender {
private static final Logger logger = LoggerFactory.getLogger(FeiShuSender.class);
private final String url;
private final Boolean enableProxy;
@ -84,14 +83,14 @@ public final class FeiShuSender {
if (org.apache.commons.lang3.StringUtils.isBlank(result)) {
alertResult.setMessage("send fei shu msg error");
logger.info("send fei shu msg error,fei shu server resp is null");
log.info("send fei shu msg error,fei shu server resp is null");
return alertResult;
}
FeiShuSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, FeiShuSendMsgResponse.class);
if (null == sendMsgResponse) {
alertResult.setMessage("send fei shu msg fail");
logger.info("send fei shu msg error,resp error");
log.info("send fei shu msg error,resp error");
return alertResult;
}
if (sendMsgResponse.statusCode == 0) {
@ -100,7 +99,7 @@ public final class FeiShuSender {
return alertResult;
}
alertResult.setMessage(String.format("alert send fei shu msg error : %s", sendMsgResponse.getStatusMessage()));
logger.info("alert send fei shu msg error : {} ,Extra : {} ", sendMsgResponse.getStatusMessage(),
log.info("alert send fei shu msg error : {} ,Extra : {} ", sendMsgResponse.getStatusMessage(),
sendMsgResponse.getExtra());
return alertResult;
}
@ -134,7 +133,7 @@ public final class FeiShuSender {
String resp = sendMsg(alertData);
return checkSendFeiShuSendMsgResult(resp);
} catch (Exception e) {
logger.info("send fei shu alert msg exception : {}", e.getMessage());
log.info("send fei shu alert msg exception : {}", e.getMessage());
alertResult = new AlertResult();
alertResult.setStatus("false");
alertResult.setMessage("send fei shu alert fail.");
@ -157,7 +156,7 @@ public final class FeiShuSender {
int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
logger.error("send feishu message error, return http status code: {} ", statusCode);
log.error("send feishu message error, return http status code: {} ", statusCode);
}
String resp;
try {
@ -167,7 +166,7 @@ public final class FeiShuSender {
} finally {
response.close();
}
logger.info("Fei Shu send title :{} ,content :{}, resp: {}", alertData.getTitle(), alertData.getContent(),
log.info("Fei Shu send title :{} ,content :{}, resp: {}", alertData.getTitle(), alertData.getContent(),
resp);
return resp;
} finally {

9
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-http/src/main/java/org/apache/dolphinscheduler/plugin/alert/http/HttpSender.java

@ -39,14 +39,13 @@ import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.fasterxml.jackson.databind.node.ObjectNode;
@Slf4j
public final class HttpSender {
private static final Logger logger = LoggerFactory.getLogger(HttpSender.class);
private static final String URL_SPLICE_CHAR = "?";
/**
* request type post
@ -96,7 +95,7 @@ public final class HttpSender {
alertResult.setStatus("true");
alertResult.setMessage(resp);
} catch (Exception e) {
logger.error("send http alert msg exception : {}", e.getMessage());
log.error("send http alert msg exception : {}", e.getMessage());
alertResult.setStatus("false");
alertResult.setMessage("send http request alert fail.");
}
@ -170,7 +169,7 @@ public final class HttpSender {
StringEntity entity = new StringEntity(JSONUtils.toJsonString(objectNode), DEFAULT_CHARSET);
((HttpPost) httpRequest).setEntity(entity);
} catch (Exception e) {
logger.error("send http alert msg exception : {}", e.getMessage());
log.error("send http alert msg exception : {}", e.getMessage());
}
}
}

5
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-pagerduty/src/main/java/org/apache/dolphinscheduler/plugin/alert/pagerduty/PagerDutySender.java

@ -34,14 +34,13 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.slf4j.Logger;
import lombok.extern.slf4j.Slf4j;
import com.google.common.base.Preconditions;
@Slf4j
public final class PagerDutySender {
private static final Logger log = org.slf4j.LoggerFactory.getLogger(PagerDutySender.class);
private final String integrationKey;
public PagerDutySender(Map<String, String> config) {

8
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ProcessUtils.java

@ -19,13 +19,11 @@ package org.apache.dolphinscheduler.plugin.alert.script;
import java.io.IOException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class ProcessUtils {
private static final Logger logger = LoggerFactory.getLogger(ProcessUtils.class);
private ProcessUtils() {
throw new UnsupportedOperationException("This is a utility class and cannot be instantiated");
}
@ -50,7 +48,7 @@ public final class ProcessUtils {
errorStreamGobbler.start();
return process.waitFor();
} catch (IOException | InterruptedException e) {
logger.error("execute alert script error {}", e.getMessage());
log.error("execute alert script error {}", e.getMessage());
Thread.currentThread().interrupt();
}

19
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/ScriptSender.java

@ -24,12 +24,11 @@ import org.apache.commons.lang3.StringUtils;
import java.io.File;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class ScriptSender {
private static final Logger logger = LoggerFactory.getLogger(ScriptSender.class);
private static final String ALERT_TITLE_OPTION = " -t ";
private static final String ALERT_CONTENT_OPTION = " -c ";
private static final String ALERT_USER_PARAMS_OPTION = " -p ";
@ -57,7 +56,7 @@ public final class ScriptSender {
// If it is another type of alarm script can be added here, such as python
alertResult.setStatus("false");
logger.error("script type error: {}", scriptType);
log.error("script type error: {}", scriptType);
alertResult.setMessage("script type error : " + scriptType);
return alertResult;
}
@ -73,30 +72,30 @@ public final class ScriptSender {
File shellScriptFile = new File(scriptPath);
// validate existence
if (!shellScriptFile.exists()) {
logger.error("shell script not exist : {}", scriptPath);
log.error("shell script not exist : {}", scriptPath);
alertResult.setMessage("shell script not exist : " + scriptPath);
return alertResult;
}
// validate is file
if (!shellScriptFile.isFile()) {
logger.error("shell script is not a file : {}", scriptPath);
log.error("shell script is not a file : {}", scriptPath);
alertResult.setMessage("shell script is not a file : " + scriptPath);
return alertResult;
}
// avoid command injection (RCE vulnerability)
if (userParams.contains("'")) {
logger.error("shell script illegal user params : {}", userParams);
log.error("shell script illegal user params : {}", userParams);
alertResult.setMessage("shell script illegal user params : " + userParams);
return alertResult;
}
if (title.contains("'")) {
logger.error("shell script illegal title : {}", title);
log.error("shell script illegal title : {}", title);
alertResult.setMessage("shell script illegal title : " + title);
return alertResult;
}
if (content.contains("'")) {
logger.error("shell script illegal content : {}", content);
log.error("shell script illegal content : {}", content);
alertResult.setMessage("shell script illegal content : " + content);
return alertResult;
}
@ -111,7 +110,7 @@ public final class ScriptSender {
return alertResult;
}
alertResult.setMessage("send script alert msg error,exitCode is " + exitCode);
logger.info("send script alert msg error,exitCode is {}", exitCode);
log.info("send script alert msg error,exitCode is {}", exitCode);
return alertResult;
}

12
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-script/src/main/java/org/apache/dolphinscheduler/plugin/alert/script/StreamGobbler.java

@ -22,13 +22,11 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class StreamGobbler extends Thread {
private static final Logger logger = LoggerFactory.getLogger(StreamGobbler.class);
private final InputStream inputStream;
StreamGobbler(InputStream inputStream) {
@ -48,16 +46,16 @@ public final class StreamGobbler extends Thread {
output.append(System.getProperty("line.separator"));
}
if (output.length() > 0) {
logger.info("out put msg is{}", output);
log.info("out put msg is{}", output);
}
} catch (IOException e) {
logger.error("I/O error occurs {}", e.getMessage());
log.error("I/O error occurs {}", e.getMessage());
} finally {
try {
inputBufferReader.close();
inputStreamReader.close();
} catch (IOException e) {
logger.error("I/O error occurs {}", e.getMessage());
log.error("I/O error occurs {}", e.getMessage());
}
}
}

8
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-slack/src/main/java/org/apache/dolphinscheduler/plugin/alert/slack/SlackSender.java

@ -38,15 +38,13 @@ import java.util.Map.Entry;
import java.util.Objects;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.google.common.base.Preconditions;
@Slf4j
public final class SlackSender {
private static final Logger logger = LoggerFactory.getLogger(SlackSender.class);
private final String webHookUrl;
private final String botName;
@ -86,7 +84,7 @@ public final class SlackSender {
HttpEntity entity = response.getEntity();
return EntityUtils.toString(entity, "UTF-8");
} catch (Exception e) {
logger.error("Send message to slack error.", e);
log.error("Send message to slack error.", e);
return "System Exception";
}
}

10
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-telegram/src/main/java/org/apache/dolphinscheduler/plugin/alert/telegram/TelegramSender.java

@ -43,15 +43,13 @@ import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.fasterxml.jackson.annotation.JsonProperty;
@Slf4j
public final class TelegramSender {
private static final Logger logger = LoggerFactory.getLogger(TelegramSender.class);
private static final String BOT_TOKEN_REGEX = "{botToken}";
private final String chatId;
@ -104,7 +102,7 @@ public final class TelegramSender {
String resp = sendInvoke(alertData.getTitle(), alertData.getContent());
result = parseRespToResult(resp);
} catch (Exception e) {
logger.warn("send telegram alert msg exception : {}", e.getMessage());
log.warn("send telegram alert msg exception : {}", e.getMessage());
result = new AlertResult();
result.setStatus("false");
result.setMessage(String.format("send telegram alert fail. %s", e.getMessage()));
@ -159,7 +157,7 @@ public final class TelegramSender {
} finally {
response.close();
}
logger.info("Telegram send title :{},content : {}, resp: {}", title, content, resp);
log.info("Telegram send title :{},content : {}, resp: {}", title, content, resp);
return resp;
} finally {
httpClient.close();

5
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-webexteams/src/main/java/org/apache/dolphinscheduler/plugin/alert/webexteams/WebexTeamsSender.java

@ -34,14 +34,13 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.slf4j.Logger;
import lombok.extern.slf4j.Slf4j;
import com.google.common.base.Preconditions;
@Slf4j
public final class WebexTeamsSender {
private static final Logger log = org.slf4j.LoggerFactory.getLogger(WebexTeamsSender.class);
private final String botAccessToken;
private final String roomId;
private final String toPersonId;

17
dolphinscheduler-alert/dolphinscheduler-alert-plugins/dolphinscheduler-alert-wechat/src/main/java/org/apache/dolphinscheduler/plugin/alert/wechat/WeChatSender.java

@ -44,12 +44,11 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class WeChatSender {
private static final Logger logger = LoggerFactory.getLogger(WeChatSender.class);
private static final String MUST_NOT_NULL = " must not null";
private static final String ALERT_STATUS = "false";
private static final String AGENT_ID_REG_EXP = "{agentId}";
@ -93,7 +92,7 @@ public final class WeChatSender {
} finally {
response.close();
}
logger.info("Enterprise WeChat send [{}], param:{}, resp:{}",
log.info("Enterprise WeChat send [{}], param:{}, resp:{}",
url, data, resp);
return resp;
}
@ -110,7 +109,7 @@ public final class WeChatSender {
if (StringUtils.isNotEmpty(content)) {
List<LinkedHashMap> mapItemsList = JSONUtils.toList(content, LinkedHashMap.class);
if (null == mapItemsList || mapItemsList.isEmpty()) {
logger.error("itemsList is null");
log.error("itemsList is null");
throw new RuntimeException("itemsList is null");
}
@ -175,13 +174,13 @@ public final class WeChatSender {
if (null == result) {
alertResult.setMessage("we chat send fail");
logger.info("send we chat msg error,resp is null");
log.info("send we chat msg error,resp is null");
return alertResult;
}
WeChatSendMsgResponse sendMsgResponse = JSONUtils.parseObject(result, WeChatSendMsgResponse.class);
if (null == sendMsgResponse) {
alertResult.setMessage("we chat send fail");
logger.info("send we chat msg error,resp error");
log.info("send we chat msg error,resp error");
return alertResult;
}
if (sendMsgResponse.errcode == 0) {
@ -229,7 +228,7 @@ public final class WeChatSender {
try {
return checkWeChatSendMsgResult(post(enterpriseWeChatPushUrlReplace, msgJson));
} catch (Exception e) {
logger.info("send we chat alert msg exception : {}", e.getMessage());
log.info("send we chat alert msg exception : {}", e.getMessage());
alertResult = new AlertResult();
alertResult.setMessage("send we chat alert fail");
alertResult.setStatus(ALERT_STATUS);
@ -250,7 +249,7 @@ public final class WeChatSender {
try {
return get(weChatTokenUrlReplace);
} catch (IOException e) {
logger.info("we chat alert get token error{}", e.getMessage());
log.info("we chat alert get token error{}", e.getMessage());
}
return null;
}

11
dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertPluginManager.java

@ -37,17 +37,16 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public final class AlertPluginManager {
private static final Logger logger = LoggerFactory.getLogger(AlertPluginManager.class);
private final PluginDao pluginDao;
public AlertPluginManager(PluginDao pluginDao) {
@ -79,11 +78,11 @@ public final class AlertPluginManager {
String name = entry.getKey();
AlertChannelFactory factory = entry.getValue();
logger.info("Registering alert plugin: {} - {}", name, factory.getClass());
log.info("Registering alert plugin: {} - {}", name, factory.getClass());
final AlertChannel alertChannel = factory.create();
logger.info("Registered alert plugin: {} - {}", name, factory.getClass());
log.info("Registered alert plugin: {} - {}", name, factory.getClass());
final List<PluginParams> params = new ArrayList<>(factory.params());
params.add(0, warningTypeParams);

9
dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertRequestProcessor.java

@ -26,17 +26,16 @@ import org.apache.dolphinscheduler.remote.command.alert.AlertSendResponseCommand
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.JsonSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import io.netty.channel.Channel;
@Component
@Slf4j
public final class AlertRequestProcessor implements NettyRequestProcessor {
private static final Logger logger = LoggerFactory.getLogger(AlertRequestProcessor.class);
private final AlertSenderService alertSenderService;
public AlertRequestProcessor(AlertSenderService alertSenderService) {
@ -51,7 +50,7 @@ public final class AlertRequestProcessor implements NettyRequestProcessor {
AlertSendRequestCommand alertSendRequestCommand = JsonSerializer.deserialize(
command.getBody(), AlertSendRequestCommand.class);
logger.info("Received command : {}", alertSendRequestCommand);
log.info("Received command : {}", alertSendRequestCommand);
AlertSendResponseCommand alertSendResponseCommand = alertSenderService.syncHandler(
alertSendRequestCommand.getGroupId(),

29
dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertSenderService.java

@ -49,17 +49,16 @@ import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import com.google.common.collect.Lists;
@Service
@Slf4j
public final class AlertSenderService extends Thread {
private static final Logger logger = LoggerFactory.getLogger(AlertSenderService.class);
private final AlertDao alertDao;
private final AlertPluginManager alertPluginManager;
private final AlertConfig alertConfig;
@ -78,23 +77,23 @@ public final class AlertSenderService extends Thread {
@Override
public void run() {
logger.info("Alert sender thread started");
log.info("Alert sender thread started");
while (!ServerLifeCycleManager.isStopped()) {
try {
List<Alert> alerts = alertDao.listPendingAlerts();
if (CollectionUtils.isEmpty(alerts)) {
logger.debug("There is not waiting alerts");
log.debug("There is not waiting alerts");
continue;
}
AlertServerMetrics.registerPendingAlertGauge(alerts::size);
this.send(alerts);
} catch (Exception e) {
logger.error("Alert sender thread meet an exception", e);
log.error("Alert sender thread meet an exception", e);
} finally {
ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS * 5L);
}
}
logger.info("Alert sender thread stopped");
log.info("Alert sender thread stopped");
}
public void send(List<Alert> alerts) {
@ -104,7 +103,7 @@ public final class AlertSenderService extends Thread {
int alertGroupId = Optional.ofNullable(alert.getAlertGroupId()).orElse(0);
List<AlertPluginInstance> alertInstanceList = alertDao.listInstanceByAlertGroupId(alertGroupId);
if (CollectionUtils.isEmpty(alertInstanceList)) {
logger.error("send alert msg fail,no bind plugin instance.");
log.error("send alert msg fail,no bind plugin instance.");
List<AlertResult> alertResults = Lists.newArrayList(new AlertResult("false",
"no bind plugin instance"));
alertDao.updateAlert(AlertStatus.EXECUTION_FAILURE, JSONUtils.toJsonString(alertResults), alertId);
@ -184,7 +183,7 @@ public final class AlertSenderService extends Thread {
alertSendResponseResult.setSuccess(false);
alertSendResponseResult.setMessage(message);
sendResponseResults.add(alertSendResponseResult);
logger.error("Alert GroupId {} send error : not found alert instance", alertGroupId);
log.error("Alert GroupId {} send error : not found alert instance", alertGroupId);
return new AlertSendResponseCommand(false, sendResponseResults);
}
@ -216,7 +215,7 @@ public final class AlertSenderService extends Thread {
String message = String.format("Alert Plugin %s send error: the channel doesn't exist, pluginDefineId: %s",
pluginInstanceName,
pluginDefineId);
logger.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginDefineId);
log.error("Alert Plugin {} send error : not found plugin {}", pluginInstanceName, pluginDefineId);
return new AlertResult("false", message);
}
AlertChannel alertChannel = alertChannelOptional.get();
@ -232,7 +231,7 @@ public final class AlertSenderService extends Thread {
if (warningType == null) {
String message = String.format("Alert Plugin %s send error : plugin warnType is null", pluginInstanceName);
logger.error("Alert Plugin {} send error : plugin warnType is null", pluginInstanceName);
log.error("Alert Plugin {} send error : plugin warnType is null", pluginInstanceName);
return new AlertResult("false", message);
}
@ -258,7 +257,7 @@ public final class AlertSenderService extends Thread {
String message = String.format(
"Alert Plugin %s send ignore warning type not match: plugin warning type is %s, alert data warning type is %s",
pluginInstanceName, warningType.getCode(), alertData.getWarnType());
logger.info(
log.info(
"Alert Plugin {} send ignore warning type not match: plugin warning type is {}, alert data warning type is {}",
pluginInstanceName, warningType.getCode(), alertData.getWarnType());
return new AlertResult("false", message);
@ -292,11 +291,11 @@ public final class AlertSenderService extends Thread {
}
return alertResult;
} catch (InterruptedException e) {
logger.error("send alert error alert data id :{},", alertData.getId(), e);
log.error("send alert error alert data id :{},", alertData.getId(), e);
Thread.currentThread().interrupt();
return new AlertResult("false", e.getMessage());
} catch (Exception e) {
logger.error("send alert error alert data id :{},", alertData.getId(), e);
log.error("send alert error alert data id :{},", alertData.getId(), e);
return new AlertResult("false", e.getMessage());
}
}

21
dolphinscheduler-alert/dolphinscheduler-alert-server/src/main/java/org/apache/dolphinscheduler/alert/AlertServer.java

@ -29,8 +29,8 @@ import java.io.Closeable;
import javax.annotation.PreDestroy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.event.ApplicationReadyEvent;
@ -39,10 +39,9 @@ import org.springframework.context.event.EventListener;
@SpringBootApplication
@ComponentScan("org.apache.dolphinscheduler")
@Slf4j
public class AlertServer implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(AlertServer.class);
private final PluginDao pluginDao;
private final AlertSenderService alertSenderService;
private final AlertRequestProcessor alertRequestProcessor;
@ -66,12 +65,12 @@ public class AlertServer implements Closeable {
@EventListener
public void run(ApplicationReadyEvent readyEvent) {
logger.info("Alert server is staring ...");
log.info("Alert server is staring ...");
checkTable();
startServer();
alertSenderService.start();
logger.info("Alert server is started ...");
log.info("Alert server is started ...");
}
@Override
@ -91,26 +90,26 @@ public class AlertServer implements Closeable {
// set stop signal is true
// execute only once
if (!ServerLifeCycleManager.toStopped()) {
logger.warn("AlterServer is already stopped");
log.warn("AlterServer is already stopped");
return;
}
logger.info("Alert server is stopping, cause: {}", cause);
log.info("Alert server is stopping, cause: {}", cause);
// thread sleep 3 seconds for thread quietly stop
ThreadUtils.sleep(Constants.SERVER_CLOSE_WAIT_TIME.toMillis());
// close
this.nettyRemotingServer.close();
logger.info("Alter server stopped, cause: {}", cause);
log.info("Alter server stopped, cause: {}", cause);
} catch (Exception e) {
logger.error("Alert server stop failed, cause: {}", cause, e);
log.error("Alert server stop failed, cause: {}", cause, e);
}
}
protected void checkTable() {
if (!pluginDao.checkPluginDefineTableExist()) {
logger.error("Plugin Define Table t_ds_plugin_define Not Exist . Please Create it First !");
log.error("Plugin Define Table t_ds_plugin_define Not Exist . Please Create it First !");
System.exit(1);
}
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/ApiApplicationServer.java

@ -28,8 +28,8 @@ import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@ -41,10 +41,9 @@ import org.springframework.context.event.EventListener;
@ServletComponentScan
@SpringBootApplication
@ComponentScan("org.apache.dolphinscheduler")
@Slf4j
public class ApiApplicationServer {
private final Logger logger = LoggerFactory.getLogger(ApiApplicationServer.class);
@Autowired
private TaskPluginManager taskPluginManager;
@ -57,7 +56,7 @@ public class ApiApplicationServer {
@EventListener
public void run(ApplicationReadyEvent readyEvent) {
logger.info("Received spring application context ready event will load taskPlugin and write to DB");
log.info("Received spring application context ready event will load taskPlugin and write to DB");
// install task plugin
taskPluginManager.loadPlugin();
for (Map.Entry<String, TaskChannelFactory> entry : taskPluginManager.getTaskChannelFactoryMap().entrySet()) {

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/AccessLogAspect.java

@ -34,23 +34,22 @@ import java.util.stream.IntStream;
import javax.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
@Aspect
@Component
@Slf4j
public class AccessLogAspect {
private static final Logger logger = LoggerFactory.getLogger(AccessLogAspect.class);
private static final String TRACE_ID = "traceId";
public static final String sensitiveDataRegEx = "(password=[\'\"]+)(\\S+)([\'\"]+)";
@ -90,7 +89,7 @@ public class AccessLogAspect {
String argsString = parseArgs(proceedingJoinPoint, annotation);
// handle sensitive data in the string
argsString = handleSensitiveData(argsString);
logger.info("REQUEST TRACE_ID:{}, LOGIN_USER:{}, URI:{}, METHOD:{}, HANDLER:{}, ARGS:{}",
log.info("REQUEST TRACE_ID:{}, LOGIN_USER:{}, URI:{}, METHOD:{}, HANDLER:{}, ARGS:{}",
traceId,
userName,
request.getRequestURI(),
@ -106,7 +105,7 @@ public class AccessLogAspect {
// log response
if (!annotation.ignoreResponse()) {
logger.info("RESPONSE TRACE_ID:{}, BODY:{}, REQUEST DURATION:{} milliseconds", traceId, ob,
log.info("RESPONSE TRACE_ID:{}, BODY:{}, REQUEST DURATION:{} milliseconds", traceId, ob,
(System.currentTimeMillis() - startTime));
}

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/aspect/CacheEvictAspect.java

@ -28,13 +28,13 @@ import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
@ -48,10 +48,9 @@ import org.springframework.stereotype.Component;
*/
@Aspect
@Component
@Slf4j
public class CacheEvictAspect {
private static final Logger logger = LoggerFactory.getLogger(CacheEvictAspect.class);
/**
* symbol of spring el
*/

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/audit/AuditPublishService.java

@ -25,12 +25,13 @@ import java.util.concurrent.LinkedBlockingQueue;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class AuditPublishService {
private BlockingQueue<AuditMessage> auditMessageQueue = new LinkedBlockingQueue<>();
@ -41,8 +42,6 @@ public class AuditPublishService {
@Autowired
private AuditConfiguration auditConfiguration;
private static final Logger logger = LoggerFactory.getLogger(AuditPublishService.class);
/**
* create a daemon thread to process the message queue
*/
@ -63,7 +62,7 @@ public class AuditPublishService {
*/
public void publish(AuditMessage message) {
if (auditConfiguration.getEnabled() && !auditMessageQueue.offer(message)) {
logger.error("Publish audit message failed, message:{}", message);
log.error("Publish audit message failed, message:{}", message);
}
}
@ -79,11 +78,11 @@ public class AuditPublishService {
try {
subscriber.execute(message);
} catch (Exception e) {
logger.error("Consume audit message failed, message:{}", message, e);
log.error("Consume audit message failed, message:{}", message, e);
}
}
} catch (InterruptedException e) {
logger.error("Consume audit message failed, message:{}", message, e);
log.error("Consume audit message failed, message:{}", message, e);
Thread.currentThread().interrupt();
break;
}

19
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/DynamicTaskTypeConfiguration.java

@ -27,9 +27,8 @@ import java.util.List;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.PropertySource;
@ -41,9 +40,9 @@ import org.springframework.stereotype.Component;
@ConfigurationProperties(prefix = "dynamic-task")
@Getter
@Setter
@Slf4j
public class DynamicTaskTypeConfiguration {
private static final Logger logger = LoggerFactory.getLogger(DynamicTaskTypeConfiguration.class);
private static final List<String> defaultTaskCategories =
Arrays.asList(Constants.TYPE_UNIVERSAL, Constants.TYPE_DATA_INTEGRATION, Constants.TYPE_CLOUD,
Constants.TYPE_LOGIC, Constants.TYPE_DATA_QUALITY, Constants.TYPE_OTHER,
@ -83,12 +82,12 @@ public class DynamicTaskTypeConfiguration {
}
public void printDefaultTypes() {
logger.info("support default universal dynamic task types: {}", universal);
logger.info("support default cloud dynamic task types: {}", cloud);
logger.info("support default logic dynamic task types: {}", logic);
logger.info("support default dataIntegration dynamic task types: {}", dataIntegration);
logger.info("support default dataQuality dynamic task types: {}", dataQuality);
logger.info("support default machineLearning dynamic task types: {}", machineLearning);
logger.info("support default other dynamic task types: {}", other);
log.info("support default universal dynamic task types: {}", universal);
log.info("support default cloud dynamic task types: {}", cloud);
log.info("support default logic dynamic task types: {}", logic);
log.info("support default dataIntegration dynamic task types: {}", dataIntegration);
log.info("support default dataQuality dynamic task types: {}", dataQuality);
log.info("support default machineLearning dynamic task types: {}", machineLearning);
log.info("support default other dynamic task types: {}", other);
}
}

20
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/configuration/TaskTypeConfiguration.java

@ -28,9 +28,8 @@ import java.util.List;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.PropertySource;
@ -42,10 +41,9 @@ import org.springframework.stereotype.Component;
@ConfigurationProperties(prefix = "task")
@Getter
@Setter
@Slf4j
public class TaskTypeConfiguration {
private static final Logger logger = LoggerFactory.getLogger(TaskTypeConfiguration.class);
private List<String> universal;
private List<String> cloud;
private List<String> logic;
@ -74,12 +72,12 @@ public class TaskTypeConfiguration {
}
public void printDefaultTypes() {
logger.info("support default universal task types: {}", universal);
logger.info("support default cloud task types: {}", cloud);
logger.info("support default logic task types: {}", logic);
logger.info("support default dataIntegration task types: {}", dataIntegration);
logger.info("support default dataQuality task types: {}", dataQuality);
logger.info("support default machineLearning task types: {}", machineLearning);
logger.info("support default other task types: {}", other);
log.info("support default universal task types: {}", universal);
log.info("support default cloud task types: {}", cloud);
log.info("support default logic task types: {}", logic);
log.info("support default dataIntegration task types: {}", dataIntegration);
log.info("support default dataQuality task types: {}", dataQuality);
log.info("support default machineLearning task types: {}", machineLearning);
log.info("support default other task types: {}", other);
}
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertGroupController.java

@ -35,8 +35,8 @@ import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
@ -62,10 +62,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "ALERT_GROUP_TAG")
@RestController
@RequestMapping("/alert-groups")
@Slf4j
public class AlertGroupController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class);
@Autowired
private AlertGroupService alertGroupService;
@ -238,7 +237,7 @@ public class AlertGroupController extends BaseController {
boolean exist = alertGroupService.existGroupName(groupName);
Result result = new Result();
if (exist) {
logger.error("group {} has exist, can't create again.", groupName);
log.error("group {} has exist, can't create again.", groupName);
result.setCode(Status.ALERT_GROUP_EXIST.getCode());
result.setMsg(Status.ALERT_GROUP_EXIST.getMsg());
} else {

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/AlertPluginInstanceController.java

@ -35,8 +35,8 @@ import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
@ -62,10 +62,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "ALERT_PLUGIN_INSTANCE_TAG")
@RestController
@RequestMapping("alert-plugin-instances")
@Slf4j
public class AlertPluginInstanceController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(AlertPluginInstanceController.class);
@Autowired
private AlertPluginInstanceService alertPluginInstanceService;
@ -200,7 +199,7 @@ public class AlertPluginInstanceController extends BaseController {
boolean exist = alertPluginInstanceService.checkExistPluginInstanceName(alertInstanceName);
if (exist) {
logger.error("alert plugin instance {} has exist, can't create again.", alertInstanceName);
log.error("alert plugin instance {} has exist, can't create again.", alertInstanceName);
return Result.error(Status.PLUGIN_INSTANCE_ALREADY_EXISTS);
} else {
return Result.success();

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java

@ -51,8 +51,8 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
@ -76,10 +76,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "EXECUTOR_TAG")
@RestController
@RequestMapping("projects/{projectCode}/executors")
@Slf4j
public class ExecutorController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class);
@Autowired
private ExecutorService execService;
@ -239,7 +238,7 @@ public class ExecutorController extends BaseController {
@RequestParam(value = "complementDependentMode", required = false) ComplementDependentMode complementDependentMode) {
if (timeout == null) {
logger.debug("Parameter timeout set to {} due to null.", Constants.MAX_TASK_TIMEOUT);
log.debug("Parameter timeout set to {} due to null.", Constants.MAX_TASK_TIMEOUT);
timeout = Constants.MAX_TASK_TIMEOUT;
}
@ -249,7 +248,7 @@ public class ExecutorController extends BaseController {
}
if (complementDependentMode == null) {
logger.debug("Parameter complementDependentMode set to {} due to null.", ComplementDependentMode.OFF_MODE);
log.debug("Parameter complementDependentMode set to {} due to null.", ComplementDependentMode.OFF_MODE);
complementDependentMode = ComplementDependentMode.OFF_MODE;
}
@ -268,11 +267,11 @@ public class ExecutorController extends BaseController {
complementDependentMode, null);
if (!Status.SUCCESS.equals(result.get(Constants.STATUS))) {
logger.error("Process definition start failed, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process definition start failed, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinitionCode);
startFailedProcessDefinitionCodeList.add(String.valueOf(processDefinitionCode));
} else {
logger.info("Start process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.info("Start process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinitionCode);
}
}
@ -307,7 +306,7 @@ public class ExecutorController extends BaseController {
@Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode,
@RequestParam("processInstanceId") Integer processInstanceId,
@RequestParam("executeType") ExecuteType executeType) {
logger.info("Start to execute process instance, projectCode:{}, processInstanceId:{}.", projectCode,
log.info("Start to execute process instance, projectCode:{}, processInstanceId:{}.", projectCode,
processInstanceId);
Map<String, Object> result = execService.execute(loginUser, projectCode, processInstanceId, executeType);
return returnDataList(result);
@ -347,11 +346,11 @@ public class ExecutorController extends BaseController {
Map<String, Object> singleResult =
execService.execute(loginUser, projectCode, processInstanceId, executeType);
if (!Status.SUCCESS.equals(singleResult.get(Constants.STATUS))) {
logger.error("Start to execute process instance error, projectCode:{}, processInstanceId:{}.",
log.error("Start to execute process instance error, projectCode:{}, processInstanceId:{}.",
projectCode, processInstanceId);
executeFailedIdList.add((String) singleResult.get(Constants.MSG));
} else
logger.info("Start to execute process instance complete, projectCode:{}, processInstanceId:{}.",
log.info("Start to execute process instance complete, projectCode:{}, processInstanceId:{}.",
projectCode, processInstanceId);
} catch (Exception e) {
executeFailedIdList
@ -445,7 +444,7 @@ public class ExecutorController extends BaseController {
startParamMap = JSONUtils.toMap(startParams);
}
logger.info("Start to execute stream task instance, projectCode:{}, taskDefinitionCode:{}, taskVersion:{}.",
log.info("Start to execute stream task instance, projectCode:{}, taskDefinitionCode:{}, taskVersion:{}.",
projectCode, code, version);
Map<String, Object> result = execService.execStreamTaskInstance(loginUser, projectCode, code, version,
warningGroupId, workerGroup, environmentCode, startParamMap, dryRun);
@ -477,7 +476,7 @@ public class ExecutorController extends BaseController {
@RequestParam("processInstanceId") Integer processInstanceId,
@RequestParam("startNodeList") String startNodeList,
@RequestParam("taskDependType") TaskDependType taskDependType) {
logger.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.",
log.info("Start to execute task in process instance, projectCode:{}, processInstanceId:{}.",
projectCode,
processInstanceId);
return execService.executeTask(loginUser, projectCode, processInstanceId, startNodeList, taskDependType);

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java

@ -53,8 +53,8 @@ import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
@ -82,10 +82,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "PROCESS_DEFINITION_TAG")
@RestController
@RequestMapping("projects/{projectCode}/process-definition")
@Slf4j
public class ProcessDefinitionController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class);
@Autowired
private ProcessDefinitionService processDefinitionService;
@ -696,7 +695,7 @@ public class ProcessDefinitionController extends BaseController {
try {
processDefinitionService.batchExportProcessDefinitionByCodes(loginUser, projectCode, codes, response);
} catch (Exception e) {
logger.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e);
log.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e);
}
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessInstanceController.java

@ -39,8 +39,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
@ -66,10 +66,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "PROCESS_INSTANCE_TAG")
@RestController
@RequestMapping("/projects/{projectCode}/process-instances")
@Slf4j
public class ProcessInstanceController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class);
@Autowired
private ProcessInstanceService processInstanceService;
@ -400,7 +399,7 @@ public class ProcessInstanceController extends BaseController {
try {
processInstanceService.deleteProcessInstanceById(loginUser, processInstanceId);
} catch (Exception e) {
logger.error("Delete workflow instance: {} error", strProcessInstanceId, e);
log.error("Delete workflow instance: {} error", strProcessInstanceId, e);
deleteFailedIdList
.add(MessageFormat.format(Status.PROCESS_INSTANCE_ERROR.getMsg(), strProcessInstanceId));
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProjectController.java

@ -35,8 +35,8 @@ import org.apache.dolphinscheduler.common.constants.Constants;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.plugin.task.api.utils.ParameterUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.DeleteMapping;
@ -62,10 +62,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "PROJECT_TAG")
@RestController
@RequestMapping("projects")
@Slf4j
public class ProjectController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProjectController.class);
@Autowired
private ProjectService projectService;
@ -166,7 +165,7 @@ public class ProjectController extends BaseController {
Result result = checkPageParams(pageNo, pageSize);
if (!result.checkResult()) {
logger.warn("Pagination parameters check failed, pageNo:{}, pageSize:{}", pageNo, pageSize);
log.warn("Pagination parameters check failed, pageNo:{}, pageSize:{}", pageNo, pageSize);
return result;
}
searchVal = ParameterUtils.handleEscapes(searchVal);

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java

@ -61,8 +61,8 @@ import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
@ -93,10 +93,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "RESOURCES_TAG")
@RestController
@RequestMapping("resources")
@Slf4j
public class ResourcesController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class);
@Autowired
private ResourcesService resourceService;
@Autowired
@ -414,7 +413,7 @@ public class ResourcesController extends BaseController {
@RequestParam(value = "content") String content,
@RequestParam(value = "currentDir") String currentDir) {
if (StringUtils.isEmpty(content)) {
logger.error("resource file contents are not allowed to be empty");
log.error("resource file contents are not allowed to be empty");
return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg());
}
return resourceService.onlineCreateResource(loginUser, type, fileName, fileSuffix, description, content,
@ -442,7 +441,7 @@ public class ResourcesController extends BaseController {
@RequestParam(value = "tenantCode") String tenantCode,
@RequestParam(value = "content") String content) {
if (StringUtils.isEmpty(content)) {
logger.error("The resource file contents are not allowed to be empty");
log.error("The resource file contents are not allowed to be empty");
return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg());
}
return resourceService.updateResourceContent(loginUser, fullName, tenantCode, content);

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UsersController.java

@ -46,8 +46,8 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
@ -71,10 +71,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "USERS_TAG")
@RestController
@RequestMapping("/users")
@Slf4j
public class UsersController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(UsersController.class);
@Autowired
private UsersService usersService;
@ -539,7 +538,7 @@ public class UsersController extends BaseController {
Map<String, Object> result = usersService.authorizedUser(loginUser, alertgroupId);
return returnDataList(result);
} catch (Exception e) {
logger.error(Status.AUTHORIZED_USER_ERROR.getMsg(), e);
log.error(Status.AUTHORIZED_USER_ERROR.getMsg(), e);
return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg());
}
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/WorkFlowLineageController.java

@ -36,8 +36,8 @@ import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
@ -61,10 +61,9 @@ import io.swagger.v3.oas.annotations.tags.Tag;
@Tag(name = "WORK_FLOW_LINEAGE_TAG")
@RestController
@RequestMapping("projects/{projectCode}/lineages")
@Slf4j
public class WorkFlowLineageController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(WorkFlowLineageController.class);
@Autowired
private WorkFlowLineageService workFlowLineageService;
@ -80,7 +79,7 @@ public class WorkFlowLineageController extends BaseController {
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByName(projectCode, workFlowName);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e);
log.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
}
}
@ -96,7 +95,7 @@ public class WorkFlowLineageController extends BaseController {
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineageByCode(projectCode, workFlowCode);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e);
log.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
}
}
@ -111,7 +110,7 @@ public class WorkFlowLineageController extends BaseController {
Map<String, Object> result = workFlowLineageService.queryWorkFlowLineage(projectCode);
return returnDataList(result);
} catch (Exception e) {
logger.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e);
log.error(QUERY_WORKFLOW_LINEAGE_ERROR.getMsg(), e);
return error(QUERY_WORKFLOW_LINEAGE_ERROR.getCode(), QUERY_WORKFLOW_LINEAGE_ERROR.getMsg());
}
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandler.java

@ -20,8 +20,8 @@ package org.apache.dolphinscheduler.api.exceptions;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestControllerAdvice;
@ -32,13 +32,12 @@ import org.springframework.web.method.HandlerMethod;
*/
@RestControllerAdvice
@ResponseBody
@Slf4j
public class ApiExceptionHandler {
private static final Logger logger = LoggerFactory.getLogger(ApiExceptionHandler.class);
@ExceptionHandler(ServiceException.class)
public Result exceptionHandler(ServiceException e, HandlerMethod hm) {
logger.error("ServiceException: ", e);
log.error("ServiceException: ", e);
return new Result(e.getCode(), e.getMessage());
}
@ -46,11 +45,11 @@ public class ApiExceptionHandler {
public Result exceptionHandler(Exception e, HandlerMethod hm) {
ApiException ce = hm.getMethodAnnotation(ApiException.class);
if (ce == null) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
return Result.errorWithArgs(Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage());
}
Status st = ce.value();
logger.error(st.getMsg(), e);
log.error(st.getMsg(), e);
return Result.error(st);
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/LoginHandlerInterceptor.java

@ -33,8 +33,8 @@ import java.util.Date;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.servlet.HandlerInterceptor;
import org.springframework.web.servlet.ModelAndView;
@ -42,10 +42,9 @@ import org.springframework.web.servlet.ModelAndView;
/**
* login interceptor, must log in first
*/
@Slf4j
public class LoginHandlerInterceptor implements HandlerInterceptor {
private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class);
@Autowired
private UserMapper userMapper;
@ -70,14 +69,14 @@ public class LoginHandlerInterceptor implements HandlerInterceptor {
// if user is null
if (user == null) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED);
logger.info("user does not exist");
log.info("user does not exist");
return false;
}
} else {
user = userMapper.queryUserByToken(token, new Date());
if (user == null) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED);
logger.info("user token has expired");
log.info("user token has expired");
return false;
}
}
@ -85,7 +84,7 @@ public class LoginHandlerInterceptor implements HandlerInterceptor {
// check user state
if (user.getState() == Flag.NO.ordinal()) {
response.setStatus(HttpStatus.SC_UNAUTHORIZED);
logger.info(Status.USER_DISABLED.getMsg());
log.info(Status.USER_DISABLED.getMsg());
return false;
}
request.setAttribute(Constants.SESSION_USER, user);

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/interceptor/RateLimitInterceptor.java

@ -29,8 +29,8 @@ import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.HttpStatus;
import org.springframework.web.servlet.HandlerInterceptor;
@ -44,10 +44,9 @@ import com.google.common.util.concurrent.RateLimiter;
* If the current coming tenant reaches his tenant-level request quota, his request will be reject fast.
* If the current system request number reaches the global request quota, all coming request will be reject fast.
*/
@Slf4j
public class RateLimitInterceptor implements HandlerInterceptor {
private static final Logger logger = LoggerFactory.getLogger(RateLimitInterceptor.class);
private TrafficConfiguration trafficConfiguration;
private RateLimiter globalRateLimiter;
@ -81,7 +80,7 @@ public class RateLimitInterceptor implements HandlerInterceptor {
RateLimiter tenantRateLimiter = tenantRateLimiterCache.get(token);
if (!tenantRateLimiter.tryAcquire()) {
response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value());
logger.warn("Too many request, reach tenant rate limit, current tenant:{} qps is {}", token,
log.warn("Too many request, reach tenant rate limit, current tenant:{} qps is {}", token,
tenantRateLimiter.getRate());
return false;
}
@ -91,7 +90,7 @@ public class RateLimitInterceptor implements HandlerInterceptor {
if (trafficConfiguration.isGlobalSwitch()) {
if (!globalRateLimiter.tryAcquire()) {
response.setStatus(HttpStatus.TOO_MANY_REQUESTS.value());
logger.warn("Too many request, reach global rate limit, current qps is {}",
log.warn("Too many request, reach global rate limit, current qps is {}",
globalRateLimiter.getRate());
return false;
}

10
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/k8s/K8sManager.java

@ -25,8 +25,8 @@ import org.apache.dolphinscheduler.service.utils.ClusterConfUtils;
import java.util.Hashtable;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@ -38,9 +38,9 @@ import io.fabric8.kubernetes.client.KubernetesClient;
* use multiple environment feature
*/
@Component
@Slf4j
public class K8sManager {
private static final Logger logger = LoggerFactory.getLogger(K8sManager.class);
/**
* cache k8s client
*/
@ -112,7 +112,7 @@ public class K8sManager {
client = getClient(k8sConfig);
clientMap.put(clusterCode, client);
} catch (RemotingException e) {
logger.error("cluster code ={},fail to get k8s ApiClient: {}", clusterCode, e.getMessage());
log.error("cluster code ={},fail to get k8s ApiClient: {}", clusterCode, e.getMessage());
throw new RemotingException("fail to get k8s ApiClient:" + e.getMessage());
}
}
@ -123,7 +123,7 @@ public class K8sManager {
Config config = Config.fromKubeconfig(configYaml);
return new DefaultKubernetesClient(config);
} catch (Exception e) {
logger.error("Fail to get k8s ApiClient", e);
log.error("Fail to get k8s ApiClient", e);
throw new RemotingException("fail to get k8s ApiClient:" + e.getMessage());
}
}

27
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/python/PythonGateway.java

@ -78,16 +78,15 @@ import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class PythonGateway {
private static final Logger logger = LoggerFactory.getLogger(PythonGateway.class);
private static final FailureStrategy DEFAULT_FAILURE_STRATEGY = FailureStrategy.CONTINUE;
private static final Priority DEFAULT_PRIORITY = Priority.MEDIUM;
private static final Long DEFAULT_ENVIRONMENT_CODE = -1L;
@ -304,7 +303,7 @@ public class PythonGateway {
} else if (verifyStatus != Status.SUCCESS) {
String msg =
"Verify workflow exists status is invalid, neither SUCCESS or WORKFLOW_NAME_EXIST.";
logger.error(msg);
log.error(msg);
throw new RuntimeException(msg);
}
@ -501,11 +500,11 @@ public class PythonGateway {
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(datasourceName);
if (dataSourceList == null || dataSourceList.isEmpty()) {
String msg = String.format("Can not find any datasource by name %s", datasourceName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
} else if (dataSourceList.size() > 1) {
String msg = String.format("Get more than one datasource by name %s", datasourceName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
} else {
DataSource dataSource = dataSourceList.get(0);
@ -542,7 +541,7 @@ public class PythonGateway {
result.put("code", processDefinition.getCode());
} else {
String msg = String.format("Can not find valid workflow by name %s", workflowName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
}
@ -563,7 +562,7 @@ public class PythonGateway {
Project project = projectMapper.queryByName(projectName);
if (project == null) {
String msg = String.format("Can not find valid project by name %s", projectName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
}
long projectCode = project.getCode();
@ -573,7 +572,7 @@ public class PythonGateway {
processDefinitionMapper.queryByDefineName(projectCode, workflowName);
if (processDefinition == null) {
String msg = String.format("Can not find valid workflow by name %s", workflowName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
}
result.put("processDefinitionCode", processDefinition.getCode());
@ -604,7 +603,7 @@ public class PythonGateway {
if (CollectionUtils.isEmpty(namedResources)) {
String msg =
String.format("Can not find valid resource by program type %s and name %s", programType, fullName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
}
@ -624,7 +623,7 @@ public class PythonGateway {
if (result.get("data") == null) {
String msg = String.format("Can not find valid environment by name %s", environmentName);
logger.error(msg);
log.error(msg);
throw new IllegalArgumentException(msg);
}
EnvironmentDto environmentDto = EnvironmentDto.class.cast(result.get("data"));
@ -682,10 +681,10 @@ public class PythonGateway {
}
GatewayServer.turnLoggingOn();
logger.info("PythonGatewayService started on: " + gatewayHost.toString());
log.info("PythonGatewayService started on: " + gatewayHost.toString());
serverBuilder.build().start();
} catch (UnknownHostException e) {
logger.error("exception occurred while constructing PythonGatewayService().", e);
log.error("exception occurred while constructing PythonGatewayService().", e);
}
}
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/SecurityConfig.java

@ -22,8 +22,8 @@ import org.apache.dolphinscheduler.api.security.impl.pwd.PasswordAuthenticator;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
@ -31,10 +31,9 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@Slf4j
public class SecurityConfig {
private static final Logger logger = LoggerFactory.getLogger(SecurityConfig.class);
@Value("${security.authentication.type:PASSWORD}")
private String type;
@ -48,7 +47,7 @@ public class SecurityConfig {
private void setAuthenticationType(String type) {
if (StringUtils.isBlank(type)) {
logger.info("security.authentication.type configuration is empty, the default value 'PASSWORD'");
log.info("security.authentication.type configuration is empty, the default value 'PASSWORD'");
this.authenticationType = AuthenticationType.PASSWORD;
return;
}

17
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/AbstractAuthenticator.java

@ -33,14 +33,13 @@ import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
@Slf4j
public abstract class AbstractAuthenticator implements Authenticator {
private static final Logger logger = LoggerFactory.getLogger(AbstractAuthenticator.class);
@Autowired
protected UsersService userService;
@ -65,7 +64,7 @@ public abstract class AbstractAuthenticator implements Authenticator {
Result<Map<String, String>> result = new Result<>();
User user = login(userId, password, extra);
if (user == null) {
logger.error("Username or password entered incorrectly.");
log.error("Username or password entered incorrectly.");
result.setCode(Status.USER_NAME_PASSWD_ERROR.getCode());
result.setMsg(Status.USER_NAME_PASSWD_ERROR.getMsg());
return result;
@ -73,7 +72,7 @@ public abstract class AbstractAuthenticator implements Authenticator {
// check user state
if (user.getState() == Flag.NO.ordinal()) {
logger.error("The current user is deactivated, userName:{}.", user.getUserName());
log.error("The current user is deactivated, userName:{}.", user.getUserName());
result.setCode(Status.USER_DISABLED.getCode());
result.setMsg(Status.USER_DISABLED.getMsg());
return result;
@ -82,13 +81,13 @@ public abstract class AbstractAuthenticator implements Authenticator {
// create session
String sessionId = sessionService.createSession(user, extra);
if (sessionId == null) {
logger.error("Failed to create session, userName:{}.", user.getUserName());
log.error("Failed to create session, userName:{}.", user.getUserName());
result.setCode(Status.LOGIN_SESSION_FAILED.getCode());
result.setMsg(Status.LOGIN_SESSION_FAILED.getMsg());
return result;
}
logger.info("Session is created and sessionId is :{}.", sessionId);
log.info("Session is created and sessionId is :{}.", sessionId);
Map<String, String> data = new HashMap<>();
data.put(Constants.SESSION_ID, sessionId);
@ -104,7 +103,7 @@ public abstract class AbstractAuthenticator implements Authenticator {
public User getAuthUser(HttpServletRequest request) {
Session session = sessionService.getSession(request);
if (session == null) {
logger.info("session info is null ");
log.info("session info is null ");
return null;
}
// get user object from session

15
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/security/impl/ldap/LdapService.java

@ -34,8 +34,8 @@ import javax.naming.directory.SearchResult;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.ldap.filter.EqualsFilter;
@ -43,10 +43,9 @@ import org.springframework.stereotype.Component;
@Component
@Configuration
@Slf4j
public class LdapService {
private static final Logger logger = LoggerFactory.getLogger(LdapService.class);
@Value("${security.authentication.ldap.user.admin:#{null}}")
private String adminUserId;
@ -109,7 +108,7 @@ public class LdapService {
try {
new InitialDirContext(searchEnv);
} catch (Exception e) {
logger.warn("invalid ldap credentials or ldap search error", e);
log.warn("invalid ldap credentials or ldap search error", e);
return null;
}
Attribute attr = attrs.next();
@ -119,7 +118,7 @@ public class LdapService {
}
}
} catch (NamingException e) {
logger.error("ldap search error", e);
log.error("ldap search error", e);
return null;
} finally {
try {
@ -127,7 +126,7 @@ public class LdapService {
ctx.close();
}
} catch (NamingException e) {
logger.error("ldap context close error", e);
log.error("ldap context close error", e);
}
}
@ -150,7 +149,7 @@ public class LdapService {
public LdapUserNotExistActionType getLdapUserNotExistAction() {
if (StringUtils.isBlank(ldapUserNotExistAction)) {
logger.info(
log.info(
"security.authentication.ldap.user.not.exist.action configuration is empty, the default value 'CREATE'");
return LdapUserNotExistActionType.CREATE;
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AccessTokenServiceImpl.java

@ -41,8 +41,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -53,10 +53,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* access token service impl
*/
@Service
@Slf4j
public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTokenService {
private static final Logger logger = LoggerFactory.getLogger(AccessTokenServiceImpl.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
@ -134,7 +133,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok
// 2. check if user is existed
if (userId <= 0) {
String errorMsg = "User id should not less than or equals to 0.";
logger.error(errorMsg);
log.error(errorMsg);
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, errorMsg);
return result;
}
@ -198,7 +197,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("Access token does not exist, accessTokenId:{}.", id);
log.error("Access token does not exist, accessTokenId:{}.", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}
@ -235,7 +234,7 @@ public class AccessTokenServiceImpl extends BaseServiceImpl implements AccessTok
// 2. check if token is existed
AccessToken accessToken = accessTokenMapper.selectById(id);
if (accessToken == null) {
logger.error("Access token does not exist, accessTokenId:{}.", id);
log.error("Access token does not exist, accessTokenId:{}.", id);
putMsg(result, Status.ACCESS_TOKEN_NOT_EXIST);
return result;
}

35
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java

@ -43,8 +43,8 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.stereotype.Service;
@ -57,10 +57,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* alert group service impl
*/
@Service
@Slf4j
public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroupService {
private Logger logger = LoggerFactory.getLogger(AlertGroupServiceImpl.class);
@Autowired
private AlertGroupMapper alertGroupMapper;
@ -78,7 +77,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
alertGroups = alertGroupMapper.queryAllGroupList();
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), log);
if (ids.isEmpty()) {
result.put(Constants.DATA_LIST, Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -139,7 +138,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
alertGroupPage = alertGroupMapper.queryAlertGroupPage(page, searchVal);
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.ALERT_GROUP, loginUser.getId(), log);
if (ids.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -175,7 +174,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
return result;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -196,14 +195,14 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
result.put(Constants.DATA_LIST, alertGroup);
putMsg(result, Status.SUCCESS);
permissionPostHandle(AuthorizationType.ALERT_GROUP, loginUser.getId(),
Collections.singletonList(alertGroup.getId()), logger);
logger.info("Create alert group complete, groupName:{}", alertGroup.getGroupName());
Collections.singletonList(alertGroup.getId()), log);
log.info("Create alert group complete, groupName:{}", alertGroup.getGroupName());
} else {
logger.error("Create alert group error, groupName:{}", alertGroup.getGroupName());
log.error("Create alert group error, groupName:{}", alertGroup.getGroupName());
putMsg(result, Status.CREATE_ALERT_GROUP_ERROR);
}
} catch (DuplicateKeyException ex) {
logger.error("Create alert group error, groupName:{}", alertGroup.getGroupName(), ex);
log.error("Create alert group error, groupName:{}", alertGroup.getGroupName(), ex);
putMsg(result, Status.ALERT_GROUP_EXIST);
}
@ -230,14 +229,14 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
return result;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) {
logger.error("Alert group does not exist, id:{}.", id);
log.error("Alert group does not exist, id:{}.", id);
putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
return result;
@ -254,10 +253,10 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
alertGroup.setAlertInstanceIds(alertInstanceIds);
try {
alertGroupMapper.updateById(alertGroup);
logger.info("Update alert group complete, groupName:{}", alertGroup.getGroupName());
log.info("Update alert group complete, groupName:{}", alertGroup.getGroupName());
putMsg(result, Status.SUCCESS);
} catch (DuplicateKeyException ex) {
logger.error("Update alert group error, groupName:{}", alertGroup.getGroupName(), ex);
log.error("Update alert group error, groupName:{}", alertGroup.getGroupName(), ex);
putMsg(result, Status.ALERT_GROUP_EXIST);
}
return result;
@ -284,7 +283,7 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
// Not allow to delete the default alarm group ,because the module of service need to use it.
if (id == 1) {
logger.warn("Not allow to delete the default alarm group.");
log.warn("Not allow to delete the default alarm group.");
putMsg(result, Status.NOT_ALLOW_TO_DELETE_DEFAULT_ALARM_GROUP);
return result;
}
@ -292,13 +291,13 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
// check exist
AlertGroup alertGroup = alertGroupMapper.selectById(id);
if (alertGroup == null) {
logger.error("Alert group does not exist, id:{}.", id);
log.error("Alert group does not exist, id:{}.", id);
putMsg(result, Status.ALERT_GROUP_NOT_EXIST);
return result;
}
alertGroupMapper.deleteById(id);
logger.info("Delete alert group complete, groupId:{}", id);
log.info("Delete alert group complete, groupId:{}", id);
putMsg(result, Status.SUCCESS);
return result;
}

23
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertPluginInstanceServiceImpl.java

@ -50,8 +50,8 @@ import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
@ -64,10 +64,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
*/
@Service
@Lazy
@Slf4j
public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements AlertPluginInstanceService {
private static final Logger logger = LoggerFactory.getLogger(AlertPluginInstanceServiceImpl.class);
@Autowired
private AlertPluginInstanceMapper alertPluginInstanceMapper;
@ -100,7 +99,7 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A
return result;
}
if (alertPluginInstanceMapper.existInstanceName(alertPluginInstance.getInstanceName()) == Boolean.TRUE) {
logger.error("Plugin instance with the same name already exists, name:{}.",
log.error("Plugin instance with the same name already exists, name:{}.",
alertPluginInstance.getInstanceName());
putMsg(result, Status.PLUGIN_INSTANCE_ALREADY_EXISTS);
return result;
@ -108,12 +107,12 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A
int i = alertPluginInstanceMapper.insert(alertPluginInstance);
if (i > 0) {
logger.info("Create alert plugin instance complete, name:{}", alertPluginInstance.getInstanceName());
log.info("Create alert plugin instance complete, name:{}", alertPluginInstance.getInstanceName());
result.put(Constants.DATA_LIST, alertPluginInstance);
putMsg(result, Status.SUCCESS);
return result;
}
logger.error("Create alert plugin instance error, name:{}", alertPluginInstance.getInstanceName());
log.error("Create alert plugin instance error, name:{}", alertPluginInstance.getInstanceName());
putMsg(result, Status.SAVE_ERROR);
return result;
}
@ -143,12 +142,12 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A
int i = alertPluginInstanceMapper.updateById(alertPluginInstance);
if (i > 0) {
logger.info("Update alert plugin instance complete, instanceId:{}, name:{}", alertPluginInstance.getId(),
log.info("Update alert plugin instance complete, instanceId:{}, name:{}", alertPluginInstance.getId(),
alertPluginInstance.getInstanceName());
putMsg(result, Status.SUCCESS);
return result;
}
logger.error("Update alert plugin instance error, instanceId:{}, name:{}", alertPluginInstance.getId(),
log.error("Update alert plugin instance error, instanceId:{}, name:{}", alertPluginInstance.getId(),
alertPluginInstance.getInstanceName());
putMsg(result, Status.SAVE_ERROR);
return result;
@ -167,7 +166,7 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A
// check if there is an associated alert group
boolean hasAssociatedAlertGroup = checkHasAssociatedAlertGroup(String.valueOf(id));
if (hasAssociatedAlertGroup) {
logger.warn("Delete alert plugin failed because alert group is using it, pluginId:{}.", id);
log.warn("Delete alert plugin failed because alert group is using it, pluginId:{}.", id);
putMsg(result, Status.DELETE_ALERT_PLUGIN_INSTANCE_ERROR_HAS_ALERT_GROUP_ASSOCIATED);
return result;
}
@ -178,10 +177,10 @@ public class AlertPluginInstanceServiceImpl extends BaseServiceImpl implements A
int i = alertPluginInstanceMapper.deleteById(id);
if (i > 0) {
logger.info("Delete alert plugin instance complete, instanceId:{}", id);
log.info("Delete alert plugin instance complete, instanceId:{}", id);
putMsg(result, Status.SUCCESS);
}
logger.error("Delete alert plugin instance error, instanceId:{}", id);
log.error("Delete alert plugin instance error, instanceId:{}", id);
return result;
}

14
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/BaseServiceImpl.java

@ -36,17 +36,17 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
/**
* base service impl
*/
@Slf4j
public class BaseServiceImpl implements BaseService {
private static final Logger logger = LoggerFactory.getLogger(BaseServiceImpl.class);
@Autowired
protected ResourcePermissionCheckService resourcePermissionCheckService;
@ -56,7 +56,7 @@ public class BaseServiceImpl implements BaseService {
try {
resourcePermissionCheckService.postHandle(authorizationType, userId, ids, logger);
} catch (Exception e) {
logger.error("Post handle error, userId:{}.", userId, e);
log.error("Post handle error, userId:{}.", userId, e);
throw new RuntimeException("Resource association user error", e);
}
}
@ -178,9 +178,9 @@ public class BaseServiceImpl implements BaseService {
@Override
public boolean canOperatorPermissions(User user, Object[] ids, AuthorizationType type, String permissionKey) {
boolean operationPermissionCheck =
resourcePermissionCheckService.operationPermissionCheck(type, user.getId(), permissionKey, logger);
resourcePermissionCheckService.operationPermissionCheck(type, user.getId(), permissionKey, log);
boolean resourcePermissionCheck = resourcePermissionCheckService.resourcePermissionCheck(type, ids,
user.getUserType().equals(UserType.ADMIN_USER) ? 0 : user.getId(), logger);
user.getUserType().equals(UserType.ADMIN_USER) ? 0 : user.getId(), log);
return operationPermissionCheck && resourcePermissionCheck;
}
@ -193,7 +193,7 @@ public class BaseServiceImpl implements BaseService {
if (!StringUtils.isEmpty(startDateStr)) {
start = DateUtils.stringToDate(startDateStr);
if (Objects.isNull(start)) {
logger.warn("Parameter startDateStr is invalid.");
log.warn("Parameter startDateStr is invalid.");
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE);
}
}

47
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ClusterServiceImpl.java

@ -44,8 +44,8 @@ import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -59,10 +59,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* cluster definition service impl
*/
@Service
@Slf4j
public class ClusterServiceImpl extends BaseServiceImpl implements ClusterService {
private static final Logger logger = LoggerFactory.getLogger(ClusterServiceImpl.class);
@Autowired
private ClusterMapper clusterMapper;
@ -84,7 +83,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
public Map<String, Object> createCluster(User loginUser, String name, String config, String desc) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
logger.warn("Only admin can create cluster, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can create cluster, current login user name:{}.", loginUser.getUserName());
return result;
}
@ -95,7 +94,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
Cluster clusterExistByName = clusterMapper.queryByClusterName(name);
if (clusterExistByName != null) {
logger.warn("Cluster with the same name already exists, clusterName:{}.", clusterExistByName.getName());
log.warn("Cluster with the same name already exists, clusterName:{}.", clusterExistByName.getName());
putMsg(result, Status.CLUSTER_NAME_EXISTS, name);
return result;
}
@ -112,7 +111,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
code = CodeGenerateUtils.getInstance().genCode();
cluster.setCode(code);
} catch (CodeGenerateException e) {
logger.error("Generate cluster code error.", e);
log.error("Generate cluster code error.", e);
}
if (code == 0L) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating cluster code");
@ -120,11 +119,11 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
}
if (clusterMapper.insert(cluster) > 0) {
logger.info("Cluster create complete, clusterName:{}.", cluster.getName());
log.info("Cluster create complete, clusterName:{}.", cluster.getName());
result.put(Constants.DATA_LIST, cluster.getCode());
putMsg(result, Status.SUCCESS);
} else {
logger.error("Cluster create error, clusterName:{}.", cluster.getName());
log.error("Cluster create error, clusterName:{}.", cluster.getName());
putMsg(result, Status.CREATE_CLUSTER_ERROR);
}
return result;
@ -227,7 +226,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
Cluster cluster = clusterMapper.queryByClusterName(name);
if (cluster == null) {
logger.warn("Cluster does not exist, name:{}.", name);
log.warn("Cluster does not exist, name:{}.", name);
putMsg(result, Status.QUERY_CLUSTER_BY_NAME_ERROR, name);
} else {
@ -250,7 +249,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
public Map<String, Object> deleteClusterByCode(User loginUser, Long code) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
logger.warn("Only admin can delete cluster, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can delete cluster, current login user name:{}.", loginUser.getUserName());
return result;
}
@ -258,7 +257,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
.selectCount(new QueryWrapper<K8sNamespace>().lambda().eq(K8sNamespace::getClusterCode, code));
if (relatedNamespaceNumber > 0) {
logger.warn("Delete cluster failed because {} namespace(s) is(are) using it, clusterCode:{}.",
log.warn("Delete cluster failed because {} namespace(s) is(are) using it, clusterCode:{}.",
relatedNamespaceNumber, code);
putMsg(result, Status.DELETE_CLUSTER_RELATED_NAMESPACE_EXISTS);
return result;
@ -266,10 +265,10 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
int delete = clusterMapper.deleteByCode(code);
if (delete > 0) {
logger.info("Delete cluster complete, clusterCode:{}.", code);
log.info("Delete cluster complete, clusterCode:{}.", code);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Delete cluster error, clusterCode:{}.", code);
log.error("Delete cluster error, clusterCode:{}.", code);
putMsg(result, Status.DELETE_CLUSTER_ERROR);
}
return result;
@ -289,12 +288,12 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
public Map<String, Object> updateClusterByCode(User loginUser, Long code, String name, String config, String desc) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
logger.warn("Only admin can update cluster, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can update cluster, current login user name:{}.", loginUser.getUserName());
return result;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -306,14 +305,14 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
Cluster clusterExistByName = clusterMapper.queryByClusterName(name);
if (clusterExistByName != null && !clusterExistByName.getCode().equals(code)) {
logger.warn("Cluster with the same name already exists, name:{}.", clusterExistByName.getName());
log.warn("Cluster with the same name already exists, name:{}.", clusterExistByName.getName());
putMsg(result, Status.CLUSTER_NAME_EXISTS, name);
return result;
}
Cluster clusterExist = clusterMapper.queryByClusterCode(code);
if (clusterExist == null) {
logger.error("Cluster does not exist, code:{}.", code);
log.error("Cluster does not exist, code:{}.", code);
putMsg(result, Status.CLUSTER_NOT_EXISTS, name);
return result;
}
@ -323,7 +322,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
try {
k8sManager.getAndUpdateK8sClient(code, true);
} catch (RemotingException e) {
logger.error("Update K8s error.", e);
log.error("Update K8s error.", e);
putMsg(result, Status.K8S_CLIENT_OPS_ERROR, name);
return result;
}
@ -335,7 +334,7 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
clusterExist.setDescription(desc);
clusterMapper.updateById(clusterExist);
// need not update relation
logger.info("Cluster update complete, clusterId:{}.", clusterExist.getId());
log.info("Cluster update complete, clusterId:{}.", clusterExist.getId());
putMsg(result, Status.SUCCESS);
return result;
}
@ -351,14 +350,14 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(clusterName)) {
logger.warn("Parameter cluster name is empty.");
log.warn("Parameter cluster name is empty.");
putMsg(result, Status.CLUSTER_NAME_IS_NULL);
return result;
}
Cluster cluster = clusterMapper.queryByClusterName(clusterName);
if (cluster != null) {
logger.warn("Cluster with the same name already exists, name:{}.", cluster.getName());
log.warn("Cluster with the same name already exists, name:{}.", cluster.getName());
putMsg(result, Status.CLUSTER_NAME_EXISTS, clusterName);
return result;
}
@ -370,12 +369,12 @@ public class ClusterServiceImpl extends BaseServiceImpl implements ClusterServic
public Map<String, Object> checkParams(String name, String config) {
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(name)) {
logger.warn("Parameter cluster name is empty.");
log.warn("Parameter cluster name is empty.");
putMsg(result, Status.CLUSTER_NAME_IS_NULL);
return result;
}
if (StringUtils.isEmpty(config)) {
logger.warn("Parameter cluster config is empty.");
log.warn("Parameter cluster config is empty.");
putMsg(result, Status.CLUSTER_CONFIG_IS_NULL);
return result;
}

21
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java

@ -63,8 +63,8 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -72,10 +72,9 @@ import org.springframework.stereotype.Service;
* data analysis service impl
*/
@Service
@Slf4j
public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnalysisService {
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@ -176,7 +175,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
start = DateUtils.stringToDate(startDate);
end = DateUtils.stringToDate(endDate);
if (Objects.isNull(start) || Objects.isNull(end)) {
logger.warn("Parameter startDate or endDate is invalid.");
log.warn("Parameter startDate or endDate is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE);
return result;
}
@ -293,7 +292,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
private Pair<Set<Integer>, Map<String, Object>> getProjectIds(User loginUser, Map<String, Object> result) {
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
List<ExecuteStatusCount> taskInstanceStateCounts = new ArrayList<>();
result.put(Constants.DATA_LIST, new TaskCountDto(taskInstanceStateCounts));
@ -370,7 +369,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
Map<String, Object> result = new HashMap<>();
int count = 0;
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (!projectIds.isEmpty()) {
List<Project> projects = projectMapper.selectBatchIds(projectIds);
List<Long> projectCodes = projects.stream().map(project -> project.getCode()).collect(Collectors.toList());
@ -392,7 +391,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
StatisticsStateRequest statisticsStateRequest) {
Map<String, Object> result = new HashMap<>();
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
putMsg(result, Status.SUCCESS);
return result;
@ -416,7 +415,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
workflowCode = processDefinitionMapper.queryByDefineName(projectCode, workflowName).getCode();
}
} catch (Exception e) {
logger.warn(e.getMessage());
log.warn(e.getMessage());
}
Date date = new Date();
@ -466,7 +465,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
public Map<String, Object> countTaskStates(User loginUser, StatisticsStateRequest statisticsStateRequest) {
Map<String, Object> result = new HashMap<>();
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
putMsg(result, Status.SUCCESS);
return result;
@ -497,7 +496,7 @@ public class DataAnalysisServiceImpl extends BaseServiceImpl implements DataAnal
// taskCode = relationMapper.queryTaskCodeByTaskName(workflowCode, taskName);
}
} catch (Exception e) {
logger.warn(e.getMessage());
log.warn(e.getMessage());
}
Date date = new Date();

61
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java

@ -60,8 +60,8 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.stereotype.Service;
@ -75,10 +75,9 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
* data source service impl
*/
@Service
@Slf4j
public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService {
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class);
@Autowired
private DataSourceMapper dataSourceMapper;
@ -110,12 +109,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
}
// check name can use or not
if (checkName(datasourceParam.getName())) {
logger.warn("Datasource with the same name already exists, name:{}.", datasourceParam.getName());
log.warn("Datasource with the same name already exists, name:{}.", datasourceParam.getName());
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
if (checkDescriptionLength(datasourceParam.getNote())) {
logger.warn("Parameter description is too long, description:{}.", datasourceParam.getNote());
log.warn("Parameter description is too long, description:{}.", datasourceParam.getNote());
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -139,11 +138,11 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
dataSourceMapper.insert(dataSource);
putMsg(result, Status.SUCCESS);
permissionPostHandle(AuthorizationType.DATASOURCE, loginUser.getId(),
Collections.singletonList(dataSource.getId()), logger);
logger.info("Datasource create complete, dbType:{}, datasourceName:{}.", dataSource.getType().getDescp(),
Collections.singletonList(dataSource.getId()), log);
log.info("Datasource create complete, dbType:{}, datasourceName:{}.", dataSource.getType().getDescp(),
dataSource.getName());
} catch (DuplicateKeyException ex) {
logger.error("Datasource create error.", ex);
log.error("Datasource create error.", ex);
putMsg(result, Status.DATASOURCE_EXIST);
}
@ -164,7 +163,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
// determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
logger.error("Datasource does not exist, id:{}.", id);
log.error("Datasource does not exist, id:{}.", id);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -177,12 +176,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
// check name can use or not
if (!dataSourceParam.getName().trim().equals(dataSource.getName()) && checkName(dataSourceParam.getName())) {
logger.warn("Datasource with the same name already exists, name:{}.", dataSource.getName());
log.warn("Datasource with the same name already exists, name:{}.", dataSource.getName());
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
if (checkDescriptionLength(dataSourceParam.getNote())) {
logger.warn("Parameter description is too long, description:{}.", dataSourceParam.getNote());
log.warn("Parameter description is too long, description:{}.", dataSourceParam.getNote());
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -211,11 +210,11 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
dataSource.setBindTestId(dataSourceParam.getBindTestId());
try {
dataSourceMapper.updateById(dataSource);
logger.info("Update datasource complete, datasourceId:{}, datasourceName:{}.", dataSource.getId(),
log.info("Update datasource complete, datasourceId:{}, datasourceName:{}.", dataSource.getId(),
dataSource.getName());
putMsg(result, Status.SUCCESS);
} catch (DuplicateKeyException ex) {
logger.error("Update datasource error, datasourceId:{}, datasourceName:{}.", dataSource.getId(),
log.error("Update datasource error, datasourceId:{}, datasourceName:{}.", dataSource.getId(),
dataSource.getName());
putMsg(result, Status.DATASOURCE_EXIST);
}
@ -239,7 +238,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
Map<String, Object> result = new HashMap<>();
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
logger.error("Datasource does not exist, id:{}.", id);
log.error("Datasource does not exist, id:{}.", id);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -282,7 +281,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), log);
if (ids.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -337,7 +336,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
datasourceList = dataSourceMapper.queryDataSourceByType(0, type, testFlag);
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.DATASOURCE, loginUser.getId(), log);
if (ids.isEmpty()) {
result.put(Constants.DATA_LIST, Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -363,7 +362,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
Result<Object> result = new Result<>();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && !dataSourceList.isEmpty()) {
logger.warn("Datasource with the same name already exists, dataSourceName:{}.",
log.warn("Datasource with the same name already exists, dataSourceName:{}.",
dataSourceList.get(0).getName());
putMsg(result, Status.DATASOURCE_EXIST);
} else {
@ -386,12 +385,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
Result<Object> result = new Result<>();
try (Connection connection = DataSourceClientProvider.getInstance().getConnection(type, connectionParam)) {
if (connection == null) {
logger.error("Connection test to {} datasource failed, connectionParam:{}.", type.getDescp(),
log.error("Connection test to {} datasource failed, connectionParam:{}.", type.getDescp(),
connectionParam);
putMsg(result, Status.CONNECTION_TEST_FAILURE);
return result;
}
logger.info("Connection test to {} datasource success, connectionParam:{}", type.getDescp(),
log.info("Connection test to {} datasource success, connectionParam:{}", type.getDescp(),
connectionParam);
putMsg(result, Status.SUCCESS);
return result;
@ -399,7 +398,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
String message = Optional.of(e).map(Throwable::getCause)
.map(Throwable::getMessage)
.orElse(e.getMessage());
logger.error("Datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type,
log.error("Datasource test connection error, dbType:{}, connectionParam:{}, message:{}.", type,
connectionParam, message);
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(), message);
}
@ -416,7 +415,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
Result<Object> result = new Result<>();
logger.error("Datasource does not exist, datasourceId:{}.", id);
log.error("Datasource does not exist, datasourceId:{}.", id);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -439,7 +438,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
// query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if (dataSource == null) {
logger.warn("Datasource does not exist, datasourceId:{}.", datasourceId);
log.warn("Datasource does not exist, datasourceId:{}.", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -451,10 +450,10 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId);
clearBindTestId(datasourceId);
logger.info("Delete datasource complete, datasourceId:{}.", datasourceId);
log.info("Delete datasource complete, datasourceId:{}.", datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("Delete datasource complete, datasourceId:{}.", datasourceId, e);
log.error("Delete datasource complete, datasourceId:{}.", datasourceId, e);
throw new ServiceException(Status.DELETE_DATA_SOURCE_FAILURE);
}
return result;
@ -547,7 +546,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
try {
schema = metaData.getConnection().getSchema();
} catch (SQLException e) {
logger.error("Cant not get the schema, datasourceId:{}.", datasourceId, e);
log.error("Cant not get the schema, datasourceId:{}.", datasourceId, e);
}
tables = metaData.getTables(
@ -555,7 +554,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
getDbSchemaPattern(dataSource.getType(), schema, connectionParam),
"%", TABLE_TYPES);
if (null == tables) {
logger.error("Get datasource tables error, datasourceId:{}.", datasourceId);
log.error("Get datasource tables error, datasourceId:{}.", datasourceId);
putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR);
return result;
}
@ -567,7 +566,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
}
} catch (Exception e) {
logger.error("Get datasource tables error, datasourceId:{}.", datasourceId, e);
log.error("Get datasource tables error, datasourceId:{}.", datasourceId, e);
putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR);
return result;
} finally {
@ -622,7 +621,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
columnList.add(rs.getString(COLUMN_NAME));
}
} catch (Exception e) {
logger.error("Get datasource table columns error, datasourceId:{}.", dataSource.getId(), e);
log.error("Get datasource table columns error, datasourceId:{}.", dataSource.getId(), e);
} finally {
closeResult(rs);
releaseConnection(connection);
@ -684,7 +683,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
try {
connection.close();
} catch (Exception e) {
logger.error("Connection release error", e);
log.error("Connection release error", e);
}
}
}
@ -694,7 +693,7 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
try {
rs.close();
} catch (Exception e) {
logger.error("ResultSet close error", e);
log.error("ResultSet close error", e);
}
}
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java

@ -30,8 +30,8 @@ import org.apache.commons.lang3.StringUtils;
import java.util.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -42,10 +42,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* DqExecuteResultServiceImpl
*/
@Service
@Slf4j
public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExecuteResultService {
private final Logger logger = LoggerFactory.getLogger(DqExecuteResultServiceImpl.class);
@Autowired
private DqExecuteResultMapper dqExecuteResultMapper;
@ -76,7 +75,7 @@ public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExe
end = DateUtils.stringToDate(endTime);
}
} catch (Exception e) {
logger.warn("Parameter startTime or endTime is invalid.");
log.warn("Parameter startTime or endTime is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime");
return result;
}

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java

@ -64,8 +64,8 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -80,10 +80,9 @@ import com.fasterxml.jackson.databind.ObjectMapper;
* DqRuleServiceImpl
*/
@Service
@Slf4j
public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService {
private final Logger logger = LoggerFactory.getLogger(DqRuleServiceImpl.class);
@Autowired
private DqRuleMapper dqRuleMapper;
@ -240,7 +239,7 @@ public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService
try {
result = mapper.writeValueAsString(params);
} catch (JsonProcessingException e) {
logger.error("Json parse error.", e);
log.error("Json parse error.", e);
}
return result;

51
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentServiceImpl.java

@ -55,8 +55,8 @@ import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -72,10 +72,9 @@ import com.fasterxml.jackson.core.type.TypeReference;
* task definition service impl
*/
@Service
@Slf4j
public class EnvironmentServiceImpl extends BaseServiceImpl implements EnvironmentService {
private static final Logger logger = LoggerFactory.getLogger(EnvironmentServiceImpl.class);
@Autowired
private EnvironmentMapper environmentMapper;
@ -104,7 +103,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
return result;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -115,7 +114,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
Environment environment = environmentMapper.queryByEnvironmentName(name);
if (environment != null) {
logger.warn("Environment with the same name already exist, environmentName:{}.", environment.getName());
log.warn("Environment with the same name already exist, environmentName:{}.", environment.getName());
putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, name);
return result;
}
@ -132,7 +131,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
code = CodeGenerateUtils.getInstance().genCode();
env.setCode(code);
} catch (CodeGenerateException e) {
logger.error("Generate environment code error.", e);
log.error("Generate environment code error.", e);
}
if (code == 0L) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating environment code");
@ -153,7 +152,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
relation.setCreateTime(new Date());
relation.setUpdateTime(new Date());
relationMapper.insert(relation);
logger.info(
log.info(
"Environment-WorkerGroup relation create complete, environmentName:{}, workerGroup:{}.",
env.getName(), relation.getWorkerGroup());
}
@ -163,10 +162,10 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
result.put(Constants.DATA_LIST, env.getCode());
putMsg(result, Status.SUCCESS);
permissionPostHandle(AuthorizationType.ENVIRONMENT, loginUser.getId(),
Collections.singletonList(env.getId()), logger);
logger.info("Environment create complete, name:{}.", env.getName());
Collections.singletonList(env.getId()), log);
log.info("Environment create complete, name:{}.", env.getName());
} else {
logger.error("Environment create error, name:{}.", env.getName());
log.error("Environment create error, name:{}.", env.getName());
putMsg(result, Status.CREATE_ENVIRONMENT_ERROR);
}
return result;
@ -191,7 +190,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
environmentIPage = environmentMapper.queryEnvironmentListPaging(page, searchVal);
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT, loginUser.getId(), log);
if (ids.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -235,7 +234,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
public Map<String, Object> queryAllEnvironmentList(User loginUser) {
Map<String, Object> result = new HashMap<>();
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.ENVIRONMENT,
loginUser.getId(), logger);
loginUser.getId(), log);
if (ids.isEmpty()) {
result.put(Constants.DATA_LIST, Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -335,7 +334,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
.selectCount(new QueryWrapper<TaskDefinition>().lambda().eq(TaskDefinition::getEnvironmentCode, code));
if (relatedTaskNumber > 0) {
logger.warn("Delete environment failed because {} tasks is using it, environmentCode:{}.",
log.warn("Delete environment failed because {} tasks is using it, environmentCode:{}.",
relatedTaskNumber, code);
putMsg(result, Status.DELETE_ENVIRONMENT_RELATED_TASK_EXISTS);
return result;
@ -346,10 +345,10 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
relationMapper.delete(new QueryWrapper<EnvironmentWorkerGroupRelation>()
.lambda()
.eq(EnvironmentWorkerGroupRelation::getEnvironmentCode, code));
logger.info("Environment and relations delete complete, environmentCode:{}.", code);
log.info("Environment and relations delete complete, environmentCode:{}.", code);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Environment delete error, environmentCode:{}.", code);
log.error("Environment delete error, environmentCode:{}.", code);
putMsg(result, Status.DELETE_ENVIRONMENT_ERROR);
}
return result;
@ -380,14 +379,14 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
return checkResult;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
Environment environment = environmentMapper.queryByEnvironmentName(name);
if (environment != null && !environment.getCode().equals(code)) {
logger.warn("Environment with the same name already exist, name:{}.", environment.getName());
log.warn("Environment with the same name already exist, name:{}.", environment.getName());
putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, name);
return result;
}
@ -445,10 +444,10 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
relationMapper.insert(relation);
}
});
logger.info("Environment and relations update complete, environmentId:{}.", env.getId());
log.info("Environment and relations update complete, environmentId:{}.", env.getId());
putMsg(result, Status.SUCCESS);
} else {
logger.error("Environment update error, environmentId:{}.", env.getId());
log.error("Environment update error, environmentId:{}.", env.getId());
putMsg(result, Status.UPDATE_ENVIRONMENT_ERROR, name);
}
return result;
@ -465,14 +464,14 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(environmentName)) {
logger.warn("parameter environment name is empty.");
log.warn("parameter environment name is empty.");
putMsg(result, Status.ENVIRONMENT_NAME_IS_NULL);
return result;
}
Environment environment = environmentMapper.queryByEnvironmentName(environmentName);
if (environment != null) {
logger.warn("Environment with the same name already exist, name:{}.", environment.getName());
log.warn("Environment with the same name already exist, name:{}.", environment.getName());
putMsg(result, Status.ENVIRONMENT_NAME_EXISTS, environmentName);
return result;
}
@ -493,7 +492,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
if (Objects.nonNull(taskDefinitionList) && taskDefinitionList.size() != 0) {
Set<String> collect =
taskDefinitionList.stream().map(TaskDefinition::getName).collect(Collectors.toSet());
logger.warn("Environment {} and worker group {} is being used by task {}, so can not update.",
log.warn("Environment {} and worker group {} is being used by task {}, so can not update.",
taskDefinitionList.get(0).getEnvironmentCode(), taskDefinitionList.get(0).getWorkerGroup(),
collect);
putMsg(result, Status.UPDATE_ENVIRONMENT_WORKER_GROUP_RELATION_ERROR, workerGroup, environmentName,
@ -508,12 +507,12 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
public Map<String, Object> checkParams(String name, String config, String workerGroups) {
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(name)) {
logger.warn("parameter environment name is empty.");
log.warn("parameter environment name is empty.");
putMsg(result, Status.ENVIRONMENT_NAME_IS_NULL);
return result;
}
if (StringUtils.isEmpty(config)) {
logger.warn("parameter environment config is empty.");
log.warn("parameter environment config is empty.");
putMsg(result, Status.ENVIRONMENT_CONFIG_IS_NULL);
return result;
}
@ -521,7 +520,7 @@ public class EnvironmentServiceImpl extends BaseServiceImpl implements Environme
List<String> workerGroupList = JSONUtils.parseObject(workerGroups, new TypeReference<List<String>>() {
});
if (Objects.isNull(workerGroupList)) {
logger.warn("Parameter worker groups list is invalid.");
log.warn("Parameter worker groups list is invalid.");
putMsg(result, Status.ENVIRONMENT_WORKER_GROUPS_IS_INVALID);
return result;
}

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/EnvironmentWorkerGroupRelationServiceImpl.java

@ -27,8 +27,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -36,12 +36,11 @@ import org.springframework.stereotype.Service;
* task definition service impl
*/
@Service
@Slf4j
public class EnvironmentWorkerGroupRelationServiceImpl extends BaseServiceImpl
implements
EnvironmentWorkerGroupRelationService {
private static final Logger logger = LoggerFactory.getLogger(EnvironmentWorkerGroupRelationServiceImpl.class);
@Autowired
private EnvironmentWorkerGroupRelationMapper environmentWorkerGroupRelationMapper;

125
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java

@ -104,8 +104,8 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
@ -118,10 +118,9 @@ import com.google.common.collect.Lists;
* executor service impl
*/
@Service
@Slf4j
public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService {
private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@ -214,7 +213,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
}
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
logger.warn("Parameter timeout is invalid, timeout:{}.", timeout);
log.warn("Parameter timeout is invalid, timeout:{}.", timeout);
putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR);
return result;
}
@ -230,7 +229,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// check current version whether include startNodeList
checkStartNodeList(startNodeList, processDefinitionCode, processDefinition.getVersion());
if (!checkTenantSuitable(processDefinition)) {
logger.error(
log.error(
"There is not any valid tenant for the process definition, processDefinitionCode:{}, processDefinitionName:{}.",
processDefinition.getCode(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
@ -264,12 +263,12 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
if (create > 0) {
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
logger.info("Create command complete, processDefinitionCode:{}, commandCount:{}.",
log.info("Create command complete, processDefinitionCode:{}, commandCount:{}.",
processDefinition.getCode(), create);
result.put(Constants.DATA_LIST, triggerCode);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Start process instance failed because create command error, processDefinitionCode:{}.",
log.error("Start process instance failed because create command error, processDefinitionCode:{}.",
processDefinition.getCode());
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR);
}
@ -288,7 +287,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// no master
if (masterServers.isEmpty()) {
logger.error("Master does not exist.");
log.error("Master does not exist.");
putMsg(result, Status.MASTER_NOT_EXISTS);
return false;
}
@ -311,7 +310,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
if (cronMap.containsKey(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST)) {
String[] stringDates = cronMap.get(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST).split(COMMA);
if (stringDates.length > SCHEDULE_TIME_MAX_LENGTH) {
logger.warn("Parameter cornTime is bigger than {}.", SCHEDULE_TIME_MAX_LENGTH);
log.warn("Parameter cornTime is bigger than {}.", SCHEDULE_TIME_MAX_LENGTH);
return false;
}
}
@ -425,7 +424,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
return result;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error(
log.error(
"There is not any valid tenant for the process definition, processDefinitionId:{}, processDefinitionCode:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
@ -461,7 +460,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
break;
case STOP:
if (processInstance.getState() == WorkflowExecutionStatus.READY_STOP) {
logger.warn("Process instance status is already {}, processInstanceName:{}.",
log.warn("Process instance status is already {}, processInstanceName:{}.",
WorkflowExecutionStatus.READY_STOP.getDesc(), processInstance.getName());
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(),
processInstance.getState());
@ -473,7 +472,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
break;
case PAUSE:
if (processInstance.getState() == WorkflowExecutionStatus.READY_PAUSE) {
logger.warn("Process instance status is already {}, processInstanceName:{}.",
log.warn("Process instance status is already {}, processInstanceName:{}.",
WorkflowExecutionStatus.READY_STOP.getDesc(), processInstance.getName());
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(),
processInstance.getState());
@ -483,7 +482,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
}
break;
default:
logger.warn("Unknown execute type for process instance, processInstanceId:{}.",
log.warn("Unknown execute type for process instance, processInstanceId:{}.",
processInstance.getId());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type");
@ -537,7 +536,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
.orElseThrow(() -> new ServiceException(Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId));
if (!processInstance.getState().isFinished()) {
logger.error("Can not execute task for process instance which is not finished, processInstanceId:{}.",
log.error("Can not execute task for process instance which is not finished, processInstanceId:{}.",
processInstanceId);
putMsg(response, Status.WORKFLOW_INSTANCE_IS_NOT_FINISHED);
return response;
@ -551,7 +550,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
processInstance.getProcessDefinitionVersion());
if (!checkTenantSuitable(processDefinition)) {
logger.error(
log.error(
"There is not any valid tenant for the process definition, processDefinitionId:{}, processDefinitionCode:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(response, Status.TENANT_NOT_SUITABLE);
@ -564,7 +563,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
try {
startNodeListLong = Long.parseLong(startNodeList);
} catch (NumberFormatException e) {
logger.error("startNodeList is not a number");
log.error("startNodeList is not a number");
putMsg(response, Status.REQUEST_PARAMS_NOT_VALID_ERROR, startNodeList);
return response;
}
@ -593,7 +592,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
command.setTaskDependType(taskDependType);
if (!commandService.verifyIsNeedCreateCommand(command)) {
logger.warn(
log.warn(
"Process instance is executing the command, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.",
processDefinition.getCode(), processDefinition.getVersion(), processInstanceId);
putMsg(response, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,
@ -601,16 +600,16 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
return response;
}
logger.info("Creating command, commandInfo:{}.", command);
log.info("Creating command, commandInfo:{}.", command);
int create = commandService.createCommand(command);
if (create > 0) {
logger.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.",
log.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode(),
processDefinition.getVersion());
putMsg(response, Status.SUCCESS);
} else {
logger.error(
log.error(
"Execute process instance failed because create {} command error, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode(),
processDefinition.getVersion(),
@ -628,7 +627,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// check process instance exist
ProcessInstance processInstance = processInstanceMapper.selectById(taskGroupQueue.getProcessId());
if (processInstance == null) {
logger.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.",
log.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.",
taskGroupQueue.getProjectCode(), taskGroupQueue.getProcessId());
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, taskGroupQueue.getProcessId());
return result;
@ -735,7 +734,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// determine whether the process is normal
if (update > 0) {
logger.info("Process instance state is updated to {} in database, processInstanceName:{}.",
log.info("Process instance state is updated to {} in database, processInstanceName:{}.",
executionStatus.getDesc(), processInstance.getName());
// directly send the process instance state change event to target master, not guarantee the event send
// success
@ -745,7 +744,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
stateEventCallbackService.sendResult(host, workflowStateEventChangeCommand.convert2Command());
putMsg(result, Status.SUCCESS);
} else {
logger.error("Process instance state update error, processInstanceName:{}.", processInstance.getName());
log.error("Process instance state update error, processInstanceName:{}.", processInstance.getName());
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
@ -760,14 +759,14 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
private Map<String, Object> forceStart(ProcessInstance processInstance, TaskGroupQueue taskGroupQueue) {
Map<String, Object> result = new HashMap<>();
if (taskGroupQueue.getStatus() != TaskGroupQueueStatus.WAIT_QUEUE) {
logger.warn("Task group queue already starts, taskGroupQueueId:{}.", taskGroupQueue.getId());
log.warn("Task group queue already starts, taskGroupQueueId:{}.", taskGroupQueue.getId());
putMsg(result, Status.TASK_GROUP_QUEUE_ALREADY_START);
return result;
}
taskGroupQueue.setForceStart(Flag.YES.getCode());
processService.updateTaskGroupQueue(taskGroupQueue);
logger.info("Sending force start command to master.");
log.info("Sending force start command to master.");
processService.sendStartTask2Master(processInstance, taskGroupQueue.getTaskId(),
org.apache.dolphinscheduler.remote.command.CommandType.TASK_FORCE_STATE_EVENT_REQUEST);
putMsg(result, Status.SUCCESS);
@ -805,22 +804,22 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
command.setProcessInstanceId(instanceId);
command.setTestFlag(testFlag);
if (!commandService.verifyIsNeedCreateCommand(command)) {
logger.warn(
log.warn(
"Process instance is executing the command, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.",
processDefinitionCode, processVersion, instanceId);
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, String.valueOf(processDefinitionCode));
return result;
}
logger.info("Creating command, commandInfo:{}.", command);
log.info("Creating command, commandInfo:{}.", command);
int create = commandService.createCommand(command);
if (create > 0) {
logger.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.",
log.info("Create {} command complete, processDefinitionCode:{}, processDefinitionVersion:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode(), processVersion);
putMsg(result, Status.SUCCESS);
} else {
logger.error(
log.error(
"Execute process instance failed because create {} command error, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode(), processVersion,
instanceId);
@ -843,7 +842,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
logger.error("Process definition is not be found, processDefinitionCode:{}.", processDefinitionCode);
log.error("Process definition is not be found, processDefinitionCode:{}.", processDefinitionCode);
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "processDefinitionCode");
return result;
}
@ -858,7 +857,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
* if there is no online process, exit directly
*/
if (processDefinitionTmp.getReleaseState() != ReleaseState.ONLINE) {
logger.warn("Subprocess definition {} of process definition {} is not {}.",
log.warn("Subprocess definition {} of process definition {} is not {}.",
processDefinitionTmp.getName(),
processDefinition.getName(), ReleaseState.ONLINE.getDescp());
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinitionTmp.getName());
@ -942,7 +941,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
// determine whether to complement
if (commandType == CommandType.COMPLEMENT_DATA) {
if (schedule == null || StringUtils.isEmpty(schedule)) {
logger.error("Create {} type command error because parameter schedule is invalid.",
log.error("Create {} type command error because parameter schedule is invalid.",
command.getCommandType().getDescp());
return 0;
}
@ -950,7 +949,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
return 0;
}
try {
logger.info("Start to create {} command, processDefinitionCode:{}.",
log.info("Start to create {} command, processDefinitionCode:{}.",
command.getCommandType().getDescp(), processDefineCode);
return createComplementCommandList(triggerCode, schedule, runMode, command, expectedParallelismNumber,
complementDependentMode);
@ -1000,18 +999,18 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
}
switch (runMode) {
case RUN_MODE_SERIAL: {
logger.info("RunMode of {} command is serial run, processDefinitionCode:{}.",
log.info("RunMode of {} command is serial run, processDefinitionCode:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
if (StringUtils.isNotEmpty(dateList)) {
cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST, dateList);
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
logger.info("Creating command, commandInfo:{}.", command);
log.info("Creating command, commandInfo:{}.", command);
createCount = commandService.createCommand(command);
if (createCount > 0) {
logger.info("Create {} command complete, processDefinitionCode:{}",
log.info("Create {} command complete, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
} else {
logger.error("Create {} command error, processDefinitionCode:{}",
log.error("Create {} command error, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
}
}
@ -1019,13 +1018,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_START_DATE, startDate);
cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_END_DATE, endDate);
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
logger.info("Creating command, commandInfo:{}.", command);
log.info("Creating command, commandInfo:{}.", command);
createCount = commandService.createCommand(command);
if (createCount > 0) {
logger.info("Create {} command complete, processDefinitionCode:{}",
log.info("Create {} command complete, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
} else {
logger.error("Create {} command error, processDefinitionCode:{}",
log.error("Create {} command error, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
}
// dependent process definition
@ -1033,11 +1032,11 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
command.getProcessDefinitionCode());
if (schedules.isEmpty() || complementDependentMode == ComplementDependentMode.OFF_MODE) {
logger.info(
log.info(
"Complement dependent mode is off mode or Scheduler is empty, so skip create complement dependent command, processDefinitionCode:{}.",
command.getProcessDefinitionCode());
} else {
logger.info(
log.info(
"Complement dependent mode is all dependent and Scheduler is not empty, need create complement dependent command, processDefinitionCode:{}.",
command.getProcessDefinitionCode());
dependentProcessDefinitionCreateCount += createComplementDependentCommand(schedules, command);
@ -1049,7 +1048,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
break;
}
case RUN_MODE_PARALLEL: {
logger.info("RunMode of {} command is parallel run, processDefinitionCode:{}.",
log.info("RunMode of {} command is parallel run, processDefinitionCode:{}.",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
if (startDate != null && endDate != null) {
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionCode(
@ -1064,7 +1063,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
if (expectedParallelismNumber != null && expectedParallelismNumber != 0) {
createCount = Math.min(createCount, expectedParallelismNumber);
}
logger.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.",
log.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.",
createCount);
// Distribute the number of tasks equally to each command.
@ -1090,22 +1089,22 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_END_DATE,
DateUtils.dateToString(listDate.get(endDateIndex)));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
logger.info("Creating command, commandInfo:{}.", command);
log.info("Creating command, commandInfo:{}.", command);
if (commandService.createCommand(command) > 0) {
logger.info("Create {} command complete, processDefinitionCode:{}",
log.info("Create {} command complete, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
triggerRelationService.saveTriggerToDb(ApiTriggerType.COMMAND, triggerCode,
command.getId());
} else {
logger.error("Create {} command error, processDefinitionCode:{}",
log.error("Create {} command error, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
}
if (schedules.isEmpty() || complementDependentMode == ComplementDependentMode.OFF_MODE) {
logger.info(
log.info(
"Complement dependent mode is off mode or Scheduler is empty, so skip create complement dependent command, processDefinitionCode:{}.",
command.getProcessDefinitionCode());
} else {
logger.info(
log.info(
"Complement dependent mode is all dependent and Scheduler is not empty, need create complement dependent command, processDefinitionCode:{}.",
command.getProcessDefinitionCode());
dependentProcessDefinitionCreateCount +=
@ -1121,17 +1120,17 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
if (expectedParallelismNumber != null && expectedParallelismNumber != 0) {
createCount = Math.min(createCount, expectedParallelismNumber);
}
logger.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.",
log.info("Complement command run in parallel mode, current expectedParallelismNumber:{}.",
createCount);
for (List<String> stringDate : Lists.partition(listDate, createCount)) {
cmdParam.put(CMD_PARAM_COMPLEMENT_DATA_SCHEDULE_DATE_LIST, String.join(COMMA, stringDate));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
logger.info("Creating command, commandInfo:{}.", command);
log.info("Creating command, commandInfo:{}.", command);
if (commandService.createCommand(command) > 0) {
logger.info("Create {} command complete, processDefinitionCode:{}",
log.info("Create {} command complete, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
} else {
logger.error("Create {} command error, processDefinitionCode:{}",
log.error("Create {} command error, processDefinitionCode:{}",
command.getCommandType().getDescp(), command.getProcessDefinitionCode());
}
}
@ -1142,7 +1141,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
default:
break;
}
logger.info("Create complement command count:{}, Create dependent complement command count:{}", createCount,
log.info("Create complement command count:{}, Create dependent complement command count:{}", createCount,
dependentProcessDefinitionCreateCount);
return createCount;
}
@ -1157,7 +1156,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
try {
dependentCommand = (Command) BeanUtils.cloneBean(command);
} catch (Exception e) {
logger.error("Copy dependent command error.", e);
log.error("Copy dependent command error.", e);
return dependentProcessDefinitionCreateCount;
}
@ -1175,7 +1174,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
Map<String, String> cmdParam = JSONUtils.toMap(dependentCommand.getCommandParam());
cmdParam.put(CMD_PARAM_START_NODES, String.valueOf(dependentProcessDefinition.getTaskDefinitionCode()));
dependentCommand.setCommandParam(JSONUtils.toJsonString(cmdParam));
logger.info("Creating complement dependent command, commandInfo:{}.", command);
log.info("Creating complement dependent command, commandInfo:{}.", command);
dependentProcessDefinitionCreateCount += commandService.createCommand(dependentCommand);
}
@ -1255,13 +1254,13 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
return false;
}
if (start.isAfter(end)) {
logger.error(
log.error(
"Complement data parameter error, start time should be before end time, startDate:{}, endDate:{}.",
start, end);
return false;
}
} catch (Exception ex) {
logger.warn("Parse schedule time error, startDate:{}, endDate:{}.", startDate, endDate);
log.warn("Parse schedule time error, startDate:{}, endDate:{}.", startDate, endDate);
return false;
}
}
@ -1289,7 +1288,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
public WorkflowExecuteDto queryExecutingWorkflowByProcessInstanceId(Integer processInstanceId) {
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId).orElse(null);
if (processInstance == null) {
logger.error("Process instance does not exist, processInstanceId:{}.", processInstanceId);
log.error("Process instance does not exist, processInstanceId:{}.", processInstanceId);
return null;
}
Host host = new Host(processInstance.getHost());
@ -1298,7 +1297,7 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
org.apache.dolphinscheduler.remote.command.Command command =
stateEventCallbackService.sendSync(host, requestCommand.convert2Command());
if (command == null) {
logger.error("Query executing process instance from master error, processInstanceId:{}.",
log.error("Query executing process instance from master error, processInstanceId:{}.",
processInstanceId);
return null;
}
@ -1344,10 +1343,10 @@ public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorServ
org.apache.dolphinscheduler.remote.command.Command response =
stateEventCallbackService.sendSync(host, taskExecuteStartCommand.convert2Command());
if (response != null) {
logger.info("Send task execute start command complete, response is {}.", response);
log.info("Send task execute start command complete, response is {}.", response);
putMsg(result, Status.SUCCESS);
} else {
logger.error(
log.error(
"Start to execute stream task instance error, projectCode:{}, taskDefinitionCode:{}, taskVersion:{}.",
projectCode, taskDefinitionCode, taskDefinitionVersion);
putMsg(result, Status.START_TASK_INSTANCE_ERROR);

55
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/K8SNamespaceServiceImpl.java

@ -43,8 +43,8 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -55,10 +55,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* k8s namespace service impl
*/
@Service
@Slf4j
public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNamespaceService {
private static final Logger logger = LoggerFactory.getLogger(K8SNamespaceServiceImpl.class);
private static String resourceYaml = "apiVersion: v1\n"
+ "kind: ResourceQuota\n"
+ "metadata:\n"
@ -91,7 +90,7 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
public Result queryListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Result result = new Result();
if (!isAdmin(loginUser)) {
logger.warn("Only admin can query namespace list, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can query namespace list, current login user name:{}.", loginUser.getUserName());
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
@ -125,43 +124,43 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
Integer limitsMemory) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
logger.warn("Only admin can create K8s namespace, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can create K8s namespace, current login user name:{}.", loginUser.getUserName());
return result;
}
if (StringUtils.isEmpty(namespace)) {
logger.warn("Parameter namespace is empty.");
log.warn("Parameter namespace is empty.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.NAMESPACE);
return result;
}
if (clusterCode == null) {
logger.warn("Parameter clusterCode is null.");
log.warn("Parameter clusterCode is null.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.CLUSTER);
return result;
}
if (limitsCpu != null && limitsCpu < 0.0) {
logger.warn("Parameter limitsCpu is invalid.");
log.warn("Parameter limitsCpu is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_CPU);
return result;
}
if (limitsMemory != null && limitsMemory < 0) {
logger.warn("Parameter limitsMemory is invalid.");
log.warn("Parameter limitsMemory is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_MEMORY);
return result;
}
if (checkNamespaceExistInDb(namespace, clusterCode)) {
logger.warn("K8S namespace already exists.");
log.warn("K8S namespace already exists.");
putMsg(result, Status.K8S_NAMESPACE_EXIST, namespace, clusterCode);
return result;
}
Cluster cluster = clusterMapper.queryByClusterCode(clusterCode);
if (cluster == null) {
logger.error("Cluster does not exist, clusterCode:{}", clusterCode);
log.error("Cluster does not exist, clusterCode:{}", clusterCode);
putMsg(result, Status.CLUSTER_NOT_EXISTS, namespace, clusterCode);
return result;
}
@ -171,7 +170,7 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
code = CodeGenerateUtils.getInstance().genCode();
cluster.setCode(code);
} catch (CodeGenerateUtils.CodeGenerateException e) {
logger.error("Generate cluster code error.", e);
log.error("Generate cluster code error.", e);
}
if (code == 0L) {
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating cluster code");
@ -198,14 +197,14 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
String yamlStr = genDefaultResourceYaml(k8sNamespaceObj);
k8sClientService.upsertNamespaceAndResourceToK8s(k8sNamespaceObj, yamlStr);
} catch (Exception e) {
logger.error("Namespace create to k8s error", e);
log.error("Namespace create to k8s error", e);
putMsg(result, Status.K8S_CLIENT_OPS_ERROR, e.getMessage());
return result;
}
}
k8sNamespaceMapper.insert(k8sNamespaceObj);
logger.info("K8s namespace create complete, namespace:{}.", k8sNamespaceObj.getNamespace());
log.info("K8s namespace create complete, namespace:{}.", k8sNamespaceObj.getNamespace());
putMsg(result, Status.SUCCESS);
return result;
@ -225,25 +224,25 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
Integer limitsMemory) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
logger.warn("Only admin can update K8s namespace, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can update K8s namespace, current login user name:{}.", loginUser.getUserName());
return result;
}
if (limitsCpu != null && limitsCpu < 0.0) {
logger.warn("Parameter limitsCpu is invalid.");
log.warn("Parameter limitsCpu is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_CPU);
return result;
}
if (limitsMemory != null && limitsMemory < 0) {
logger.warn("Parameter limitsMemory is invalid.");
log.warn("Parameter limitsMemory is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.LIMITS_MEMORY);
return result;
}
K8sNamespace k8sNamespaceObj = k8sNamespaceMapper.selectById(id);
if (k8sNamespaceObj == null) {
logger.error("K8s namespace does not exist, namespaceId:{}.", id);
log.error("K8s namespace does not exist, namespaceId:{}.", id);
putMsg(result, Status.K8S_NAMESPACE_NOT_EXIST, id);
return result;
}
@ -258,14 +257,14 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
String yamlStr = genDefaultResourceYaml(k8sNamespaceObj);
k8sClientService.upsertNamespaceAndResourceToK8s(k8sNamespaceObj, yamlStr);
} catch (Exception e) {
logger.error("Namespace update to k8s error", e);
log.error("Namespace update to k8s error", e);
putMsg(result, Status.K8S_CLIENT_OPS_ERROR, e.getMessage());
return result;
}
}
// update to db
k8sNamespaceMapper.updateById(k8sNamespaceObj);
logger.info("K8s namespace update complete, namespace:{}.", k8sNamespaceObj.getNamespace());
log.info("K8s namespace update complete, namespace:{}.", k8sNamespaceObj.getNamespace());
putMsg(result, Status.SUCCESS);
return result;
}
@ -281,19 +280,19 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
public Result<Object> verifyNamespaceK8s(String namespace, Long clusterCode) {
Result<Object> result = new Result<>();
if (StringUtils.isEmpty(namespace)) {
logger.warn("Parameter namespace is empty.");
log.warn("Parameter namespace is empty.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.NAMESPACE);
return result;
}
if (clusterCode == null) {
logger.warn("Parameter clusterCode is null.");
log.warn("Parameter clusterCode is null.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.CLUSTER);
return result;
}
if (checkNamespaceExistInDb(namespace, clusterCode)) {
logger.warn("K8S namespace already exists.");
log.warn("K8S namespace already exists.");
putMsg(result, Status.K8S_NAMESPACE_EXIST, namespace, clusterCode);
return result;
}
@ -313,13 +312,13 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
public Map<String, Object> deleteNamespaceById(User loginUser, int id) {
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
logger.warn("Only admin can delete K8s namespace, current login user name:{}.", loginUser.getUserName());
log.warn("Only admin can delete K8s namespace, current login user name:{}.", loginUser.getUserName());
return result;
}
K8sNamespace k8sNamespaceObj = k8sNamespaceMapper.selectById(id);
if (k8sNamespaceObj == null) {
logger.error("K8s namespace does not exist, namespaceId:{}.", id);
log.error("K8s namespace does not exist, namespaceId:{}.", id);
putMsg(result, Status.K8S_NAMESPACE_NOT_EXIST, id);
return result;
}
@ -327,13 +326,13 @@ public class K8SNamespaceServiceImpl extends BaseServiceImpl implements K8sNames
try {
k8sClientService.deleteNamespaceToK8s(k8sNamespaceObj.getNamespace(), k8sNamespaceObj.getClusterCode());
} catch (RemotingException e) {
logger.error("Namespace delete in k8s error, namespaceId:{}.", id, e);
log.error("Namespace delete in k8s error, namespaceId:{}.", id, e);
putMsg(result, Status.K8S_CLIENT_OPS_ERROR, id);
return result;
}
}
k8sNamespaceMapper.deleteById(id);
logger.info("K8s namespace delete complete, namespace:{}.", k8sNamespaceObj.getNamespace());
log.info("K8s namespace delete complete, namespace:{}.", k8sNamespaceObj.getNamespace());
putMsg(result, Status.SUCCESS);
return result;
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/LoggerServiceImpl.java

@ -42,8 +42,8 @@ import org.apache.commons.lang3.StringUtils;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -53,10 +53,9 @@ import com.google.common.primitives.Bytes;
* logger service impl
*/
@Service
@Slf4j
public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService {
private static final Logger logger = LoggerFactory.getLogger(LoggerServiceImpl.class);
private static final String LOG_HEAD_FORMAT = "[LOG-PATH]: %s, [HOST]: %s%s";
@Autowired
@ -90,11 +89,11 @@ public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService
TaskInstance taskInstance = taskInstanceDao.findTaskInstanceById(taskInstId);
if (taskInstance == null) {
logger.error("Task instance does not exist, taskInstanceId:{}.", taskInstId);
log.error("Task instance does not exist, taskInstanceId:{}.", taskInstId);
return Result.error(Status.TASK_INSTANCE_NOT_FOUND);
}
if (StringUtils.isBlank(taskInstance.getHost())) {
logger.error("Host of task instance is null, taskInstanceId:{}.", taskInstId);
log.error("Host of task instance is null, taskInstanceId:{}.", taskInstId);
return Result.error(Status.TASK_INSTANCE_HOST_IS_NULL);
}
Project project = projectMapper.queryProjectByTaskInstanceId(taskInstId);
@ -200,7 +199,7 @@ public class LoggerServiceImpl extends BaseServiceImpl implements LoggerService
private String queryLog(TaskInstance taskInstance, int skipLineNum, int limit) {
Host host = Host.of(taskInstance.getHost());
logger.info("Query task instance log, taskInstanceId:{}, taskInstanceName:{}, host:{}, logPath:{}, port:{}",
log.info("Query task instance log, taskInstanceId:{}, taskInstanceName:{}, host:{}, logPath:{}, port:{}",
taskInstance.getId(), taskInstance.getName(), host.getIp(), taskInstance.getLogPath(), host.getPort());
StringBuilder log = new StringBuilder();

7
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/MonitorServiceImpl.java

@ -34,8 +34,8 @@ import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -45,10 +45,9 @@ import com.google.common.collect.Sets;
* monitor service impl
*/
@Service
@Slf4j
public class MonitorServiceImpl extends BaseServiceImpl implements MonitorService {
public static final Logger logger = LoggerFactory.getLogger(MonitorServiceImpl.class);
@Autowired
private MonitorDBDao monitorDBDao;

236
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -156,9 +156,8 @@ import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.http.MediaType;
@ -177,10 +176,9 @@ import com.google.common.collect.Lists;
* process definition service impl
*/
@Service
@Slf4j
public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
private static final String RELEASESTATE = "releaseState";
@Autowired
@ -292,13 +290,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result;
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
throw new ServiceException(Status.DESCRIPTION_TOO_LONG_ERROR);
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
definition.getName(), definition.getCode());
throw new ServiceException(Status.PROCESS_DEFINITION_NAME_EXIST, name);
}
@ -308,7 +306,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
logger.error("Tenant does not exist.");
log.error("Tenant does not exist.");
throw new ServiceException(Status.TENANT_NOT_EXIST);
}
tenantId = tenant.getId();
@ -406,29 +404,29 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(),
taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
logger.info("The task has not changed, so skip");
log.info("The task has not changed, so skip");
}
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
logger.error("Save task definition error.");
log.error("Save task definition error.");
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion == 0) {
logger.error("Save process definition error, processCode:{}.", processDefinition.getCode());
log.error("Save process definition error, processCode:{}.", processDefinition.getCode());
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
} else {
logger.info("Save process definition complete, processCode:{}, processVersion:{}.",
log.info("Save process definition complete, processCode:{}, processVersion:{}.",
processDefinition.getCode(), insertVersion);
}
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(),
insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE);
if (insertResult != Constants.EXIT_CODE_SUCCESS) {
logger.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
log.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
} else {
logger.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
log.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
}
@ -443,7 +441,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
try {
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
if (CollectionUtils.isEmpty(taskDefinitionLogs)) {
logger.error("Generate task definition list failed, the given taskDefinitionJson is invalided: {}",
log.error("Generate task definition list failed, the given taskDefinitionJson is invalided: {}",
taskDefinitionJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID, taskDefinitionJson);
}
@ -453,7 +451,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
.taskParams(taskDefinitionLog.getTaskParams())
.dependence(taskDefinitionLog.getDependence())
.build())) {
logger.error(
log.error(
"Generate task definition list failed, the given task definition parameter is invalided, taskName: {}, taskDefinition: {}",
taskDefinitionLog.getName(), taskDefinitionLog);
throw new ServiceException(Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
@ -463,7 +461,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
} catch (ServiceException ex) {
throw ex;
} catch (Exception e) {
logger.error("Generate task definition list failed, meet an unknown exception", e);
log.error("Generate task definition list failed, meet an unknown exception", e);
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR);
}
}
@ -474,7 +472,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
List<ProcessTaskRelationLog> taskRelationList =
JSONUtils.toList(taskRelationJson, ProcessTaskRelationLog.class);
if (CollectionUtils.isEmpty(taskRelationList)) {
logger.error("Generate task relation list failed the taskRelation list is empty, taskRelationJson: {}",
log.error("Generate task relation list failed the taskRelation list is empty, taskRelationJson: {}",
taskRelationJson);
throw new ServiceException(Status.DATA_IS_NOT_VALID);
}
@ -490,19 +488,19 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
Collection<Long> codes = CollectionUtils.subtract(postTaskCodes, taskNodeCodes);
if (CollectionUtils.isNotEmpty(codes)) {
String taskCodes = StringUtils.join(codes, Constants.COMMA);
logger.error("Task definitions do not exist, taskCodes:{}.", taskCodes);
log.error("Task definitions do not exist, taskCodes:{}.", taskCodes);
throw new ServiceException(Status.TASK_DEFINE_NOT_EXIST, taskCodes);
}
}
if (graphHasCycle(taskNodeList)) {
logger.error("Process DAG has cycle.");
log.error("Process DAG has cycle.");
throw new ServiceException(Status.PROCESS_NODE_HAS_CYCLE);
}
// check whether the task relation json is normal
for (ProcessTaskRelationLog processTaskRelationLog : taskRelationList) {
if (processTaskRelationLog.getPostTaskCode() == 0) {
logger.error("The post_task_code or post_task_version of processTaskRelationLog can not be zero, " +
log.error("The post_task_code or post_task_version of processTaskRelationLog can not be zero, " +
"processTaskRelationLogId:{}.", processTaskRelationLog.getId());
throw new ServiceException(Status.CHECK_PROCESS_TASK_RELATION_ERROR);
}
@ -511,7 +509,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
} catch (ServiceException ex) {
throw ex;
} catch (Exception e) {
logger.error("Check task relation list error, meet an unknown exception, given taskRelationJson: {}",
log.error("Check task relation list error, meet an unknown exception, given taskRelationJson: {}",
taskRelationJson, e);
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR);
}
@ -686,7 +684,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processCode:{}.", code);
log.error("Process definition does not exist, processCode:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
} else {
Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId());
@ -738,7 +736,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineName(projectCode, name);
if (processDefinition == null) {
logger.error("Process definition does not exist, projectCode:{}.", projectCode);
log.error("Process definition does not exist, projectCode:{}.", projectCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, name);
} else {
DagData dagData = processService.genDagData(processDefinition);
@ -790,7 +788,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -801,7 +799,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
logger.error("Tenant does not exist.");
log.error("Tenant does not exist.");
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
@ -811,13 +809,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processCode:{}.", code);
log.error("Process definition does not exist, processCode:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
logger.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.",
log.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), processDefinition.getCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
@ -826,7 +824,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
logger.warn("Process definition with the same name already exists, processDefinitionCode:{}.",
log.warn("Process definition with the same name already exists, processDefinitionCode:{}.",
definition.getCode());
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
@ -865,7 +863,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
taskDepMsg.ifPresent(sb::append);
}
if (sb.length() != 0) {
logger.error("Task cannot be deleted because it is dependent");
log.error("Task cannot be deleted because it is dependent");
throw new ServiceException(sb.toString());
}
}
@ -881,10 +879,10 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int saveTaskResult = processService.saveTaskDefine(loginUser, processDefinition.getProjectCode(),
taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.EXIT_CODE_SUCCESS) {
logger.info("The task has not changed, so skip");
log.info("The task has not changed, so skip");
}
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
logger.error("Update task definitions error, projectCode:{}, processCode:{}.",
log.error("Update task definitions error, projectCode:{}, processCode:{}.",
processDefinition.getProjectCode(), processDefinition.getCode());
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
@ -911,17 +909,17 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
isChange = true;
}
if (isChange) {
logger.info("Process definition needs to be updated, projectCode:{}, processCode:{}, processVersion:{}.",
log.info("Process definition needs to be updated, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion());
processDefinition.setUpdateTime(new Date());
int insertVersion =
processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion <= 0) {
logger.error("Update process definition error, processCode:{}.", processDefinition.getCode());
log.error("Update process definition error, processCode:{}.", processDefinition.getCode());
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
} else {
logger.info("Update process definition complete, processCode:{}, processVersion:{}.",
log.info("Update process definition complete, processCode:{}, processVersion:{}.",
processDefinition.getCode(), insertVersion);
}
@ -929,20 +927,20 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
logger.info(
log.info(
"Update process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
logger.error("Update process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
log.error("Update process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
saveOtherRelation(loginUser, processDefinition, result, otherParamsJson);
} else {
logger.info(
log.info(
"Process definition does not need to be updated because there is no change, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion());
putMsg(result, Status.SUCCESS);
@ -979,7 +977,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
putMsg(result, Status.SUCCESS);
return result;
}
logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
processDefinition.getName(), processDefinition.getCode());
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name.trim());
return result;
@ -990,7 +988,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
public Map<String, Object> batchDeleteProcessDefinitionByCodes(User loginUser, long projectCode, String codes) {
Map<String, Object> result = new HashMap<>();
if (StringUtils.isEmpty(codes)) {
logger.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode);
log.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode);
putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY);
return result;
}
@ -1005,7 +1003,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
definitionCodes.stream().filter(code -> !queryCodes.contains(code)).collect(Collectors.toSet());
if (CollectionUtils.isNotEmpty(diffCode)) {
logger.error("Process definition does not exist, processCodes:{}.",
log.error("Process definition does not exist, processCodes:{}.",
diffCode.stream().map(String::valueOf).collect(Collectors.joining(Constants.COMMA)));
throw new ServiceException(Status.BATCH_DELETE_PROCESS_DEFINE_BY_CODES_ERROR,
diffCode.stream().map(code -> code + "[process definition not exist]")
@ -1099,7 +1097,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
// we delete the workflow definition at last to avoid using transaction here.
// If delete error, we can call this interface again.
processDefinitionDao.deleteByWorkflowDefinitionCode(processDefinition.getCode());
logger.info("Success delete workflow definition workflowDefinitionCode: {}", code);
log.info("Success delete workflow definition workflowDefinitionCode: {}", code);
}
/**
@ -1131,7 +1129,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", code);
log.error("Process definition does not exist, processDefinitionCode:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
@ -1140,13 +1138,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
List<ProcessTaskRelation> relationList =
processService.findRelationByCode(code, processDefinition.getVersion());
if (CollectionUtils.isEmpty(relationList)) {
logger.warn("Process definition has no task relation, processDefinitionCode:{}.", code);
log.warn("Process definition has no task relation, processDefinitionCode:{}.", code);
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
processDefinition.setReleaseState(releaseState);
processDefinitionMapper.updateById(processDefinition);
logger.info("Set process definition online, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.info("Set process definition online, projectCode:{}, processDefinitionCode:{}.", projectCode,
code);
break;
case OFFLINE:
@ -1154,20 +1152,20 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int updateProcess = processDefinitionMapper.updateById(processDefinition);
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(code);
if (updateProcess > 0) {
logger.info("Set process definition offline, projectCode:{}, processDefinitionCode:{}.",
log.info("Set process definition offline, projectCode:{}, processDefinitionCode:{}.",
projectCode, code);
if (schedule != null) {
// set status
schedule.setReleaseState(releaseState);
int updateSchedule = scheduleMapper.updateById(schedule);
if (updateSchedule == 0) {
logger.error(
log.error(
"Set schedule offline error, projectCode:{}, processDefinitionCode:{}, scheduleId:{}",
projectCode, code, schedule.getId());
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
throw new ServiceException(Status.OFFLINE_SCHEDULE_ERROR);
} else {
logger.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}",
log.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}",
projectCode, code, schedule.getId());
}
schedulerService.deleteSchedule(project.getId(), schedule.getId());
@ -1190,7 +1188,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
public void batchExportProcessDefinitionByCodes(User loginUser, long projectCode, String codes,
HttpServletResponse response) {
if (StringUtils.isEmpty(codes)) {
logger.warn("Process definition codes to be exported is empty.");
log.warn("Process definition codes to be exported is empty.");
return;
}
Project project = projectMapper.queryByCode(projectCode);
@ -1204,7 +1202,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
.collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.error("Process definitions to be exported do not exist, processDefinitionCodes:{}.", defineCodeSet);
log.error("Process definitions to be exported do not exist, processDefinitionCodes:{}.", defineCodeSet);
return;
}
// check processDefinition exist in project
@ -1213,10 +1211,10 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
List<DagDataSchedule> dagDataSchedules =
processDefinitionListInProject.stream().map(this::exportProcessDagData).collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(dagDataSchedules)) {
logger.info("Start download process definition file, processDefinitionCodes:{}.", defineCodeSet);
log.info("Start download process definition file, processDefinitionCodes:{}.", defineCodeSet);
downloadProcessDefinitionFile(response, dagDataSchedules);
} else {
logger.error("There is no exported process dag data.");
log.error("There is no exported process dag data.");
}
}
@ -1234,20 +1232,20 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("Export process definition fail", e);
log.warn("Export process definition fail", e);
} finally {
if (null != buff) {
try {
buff.close();
} catch (Exception e) {
logger.warn("Buffer does not close", e);
log.warn("Buffer does not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("Output stream does not close", e);
log.warn("Output stream does not close", e);
}
}
}
@ -1290,7 +1288,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
// check file content
if (CollectionUtils.isEmpty(dagDataScheduleList)) {
logger.warn("Process definition file content is empty.");
log.warn("Process definition file content is empty.");
putMsg(result, Status.DATA_IS_NULL, "fileContent");
return result;
}
@ -1408,7 +1406,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
dataSource = queryDatasourceByNameAndUser(datasourceName, loginUser);
}
if (dataSource == null) {
logger.error("Datasource does not found, may be its name is illegal.");
log.error("Datasource does not found, may be its name is illegal.");
putMsg(result, Status.DATASOURCE_NAME_ILLEGAL);
return result;
}
@ -1433,7 +1431,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
}
} catch (Exception e) {
logger.error("Import process definition error.", e);
log.error("Import process definition error.", e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return result;
}
@ -1538,7 +1536,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
try {
processDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
} catch (CodeGenerateException e) {
logger.error(
log.error(
"Save process definition error because generate process definition code error, projectCode:{}.",
projectCode, e);
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
@ -1563,7 +1561,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
taskCodeMap.put(taskDefinitionLog.getCode(), code);
taskDefinitionLog.setCode(code);
} catch (CodeGenerateException e) {
logger.error("Generate task definition code error, projectCode:{}, processDefinitionCode:{}",
log.error("Generate task definition code error, projectCode:{}, processDefinitionCode:{}",
projectCode, processDefinition.getCode(), e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code");
return false;
@ -1573,7 +1571,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int insert = taskDefinitionMapper.batchInsert(taskDefinitionLogList);
int logInsert = taskDefinitionLogMapper.batchInsert(taskDefinitionLogList);
if ((logInsert & insert) == 0) {
logger.error("Save task definition error, projectCode:{}, processDefinitionCode:{}", projectCode,
log.error("Save task definition error, projectCode:{}, processDefinitionCode:{}", projectCode,
processDefinition.getCode());
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
@ -1617,7 +1615,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
putMsg(createDagResult, Status.SUCCESS);
} else {
result.putAll(createDagResult);
logger.error("Import process definition error, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Import process definition error, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinition.getCode());
throw new ServiceException(Status.IMPORT_PROCESS_DEFINE_ERROR);
}
@ -1631,7 +1629,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
schedule.setUpdateTime(now);
int scheduleInsert = scheduleMapper.insert(schedule);
if (0 == scheduleInsert) {
logger.error(
log.error(
"Import process definition error due to save schedule fail, projectCode:{}, processDefinitionCode:{}.",
projectCode, processDefinition.getCode());
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
@ -1639,7 +1637,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
}
logger.info("Import process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.info("Import process definition complete, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinition.getCode());
return true;
}
@ -1649,17 +1647,17 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
*/
private boolean checkImportanceParams(DagDataSchedule dagDataSchedule, Map<String, Object> result) {
if (dagDataSchedule.getProcessDefinition() == null) {
logger.warn("Process definition is null.");
log.warn("Process definition is null.");
putMsg(result, Status.DATA_IS_NULL, "ProcessDefinition");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getTaskDefinitionList())) {
logger.warn("Task definition list is null.");
log.warn("Task definition list is null.");
putMsg(result, Status.DATA_IS_NULL, "TaskDefinitionList");
return false;
}
if (CollectionUtils.isEmpty(dagDataSchedule.getProcessTaskRelationList())) {
logger.warn("Process task relation list is null.");
log.warn("Process task relation list is null.");
putMsg(result, Status.DATA_IS_NULL, "ProcessTaskRelationList");
return false;
}
@ -1694,7 +1692,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
Map<String, Object> result = new HashMap<>();
try {
if (processTaskRelationJson == null) {
logger.error("Process task relation data is null.");
log.error("Process task relation data is null.");
putMsg(result, Status.DATA_IS_NOT_VALID, processTaskRelationJson);
return result;
}
@ -1705,14 +1703,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
List<TaskNode> taskNodes = processService.transformTask(taskRelationList, taskDefinitionLogsList);
if (CollectionUtils.isEmpty(taskNodes)) {
logger.error("Task node data is empty.");
log.error("Task node data is empty.");
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
// check has cycle
if (graphHasCycle(taskNodes)) {
logger.error("Process DAG has cycle.");
log.error("Process DAG has cycle.");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
@ -1725,7 +1723,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
.dependence(taskNode.getDependence())
.switchResult(taskNode.getSwitchResult())
.build())) {
logger.error("Task node {} parameter invalid.", taskNode.getName());
log.error("Task node {} parameter invalid.", taskNode.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName());
return result;
}
@ -1737,7 +1735,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
} catch (Exception e) {
result.put(Constants.STATUS, Status.INTERNAL_SERVER_ERROR_ARGS);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, e.getMessage());
logger.error(Status.INTERNAL_SERVER_ERROR_ARGS.getMsg(), e);
log.error(Status.INTERNAL_SERVER_ERROR_ARGS.getMsg(), e);
}
return result;
}
@ -1760,7 +1758,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", code);
log.error("Process definition does not exist, processDefinitionCode:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
@ -1792,7 +1790,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
.collect(Collectors.toSet());
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryByCodes(defineCodeSet);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.error("Process definitions do not exist, codes:{}.", defineCodeSet);
log.error("Process definitions do not exist, codes:{}.", defineCodeSet);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes);
return result;
}
@ -1806,7 +1804,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (CollectionUtils.isEmpty(processDefinitionListInProject)) {
Set<Long> codesInProject = processDefinitionListInProject.stream()
.map(ProcessDefinition::getCode).collect(Collectors.toSet());
logger.error("Process definitions do not exist in project, projectCode:{}, processDefinitionsCodes:{}.",
log.error("Process definitions do not exist in project, projectCode:{}, processDefinitionsCodes:{}.",
processDefinitionListInProject.get(0).getProjectCode(), codesInProject);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, codes);
return result;
@ -1924,7 +1922,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (null == processDefinition || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, code:{}.", code);
log.error("Process definition does not exist, code:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
@ -2113,7 +2111,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result;
}
if (projectCode == targetProjectCode) {
logger.warn("Project code is same as target project code, projectCode:{}.", projectCode);
log.warn("Project code is same as target project code, projectCode:{}.", projectCode);
return result;
}
@ -2135,7 +2133,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
if (StringUtils.isEmpty(processDefinitionCodes)) {
logger.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode);
log.error("Parameter processDefinitionCodes is empty, projectCode is {}.", projectCode);
putMsg(result, Status.PROCESS_DEFINITION_CODES_IS_EMPTY, processDefinitionCodes);
return result;
}
@ -2176,7 +2174,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
processDefinition.setProjectCode(targetProjectCode);
String otherParamsJson = doOtherOperateProcess(loginUser, processDefinition);
if (isCopy) {
logger.info("Copy process definition...");
log.info("Copy process definition...");
List<TaskDefinitionLog> taskDefinitionLogs =
taskDefinitionLogDao.getTaskDefineLogList(processTaskRelations);
Map<Long, Long> taskCodeMap = new HashMap<>();
@ -2186,7 +2184,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
taskCodeMap.put(taskDefinitionLog.getCode(), taskCode);
taskDefinitionLog.setCode(taskCode);
} catch (CodeGenerateException e) {
logger.error("Generate task definition code error, projectCode:{}.", targetProjectCode, e);
log.error("Generate task definition code error, projectCode:{}.", targetProjectCode, e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
@ -2207,7 +2205,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
try {
processDefinition.setCode(CodeGenerateUtils.getInstance().genCode());
} catch (CodeGenerateException e) {
logger.error("Generate process definition code error, projectCode:{}.", targetProjectCode, e);
log.error("Generate process definition code error, projectCode:{}.", targetProjectCode, e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
throw new ServiceException(Status.INTERNAL_SERVER_ERROR_ARGS);
}
@ -2238,7 +2236,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
scheduleObj.setUpdateTime(date);
int insertResult = scheduleMapper.insert(scheduleObj);
if (insertResult != 1) {
logger.error("Schedule create error, processDefinitionCode:{}.", processDefinition.getCode());
log.error("Schedule create error, processDefinitionCode:{}.", processDefinition.getCode());
putMsg(result, Status.CREATE_SCHEDULE_ERROR);
throw new ServiceException(Status.CREATE_SCHEDULE_ERROR);
}
@ -2247,18 +2245,18 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
result.putAll(createDagDefine(loginUser, taskRelationList, processDefinition, taskDefinitionLogs,
otherParamsJson));
} catch (Exception e) {
logger.error("Copy process definition error, processDefinitionCode from {} to {}.",
log.error("Copy process definition error, processDefinitionCode from {} to {}.",
oldProcessDefinitionCode, processDefinition.getCode(), e);
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.COPY_PROCESS_DEFINITION_ERROR);
}
} else {
logger.info("Move process definition...");
log.info("Move process definition...");
try {
result.putAll(updateDagDefine(loginUser, taskRelationList, processDefinition, null,
Lists.newArrayList(), otherParamsJson));
} catch (Exception e) {
logger.error("Move process definition error, processDefinitionCode:{}.",
log.error("Move process definition error, processDefinitionCode:{}.",
processDefinition.getCode(), e);
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.MOVE_PROCESS_DEFINITION_ERROR);
@ -2315,7 +2313,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) {
logger.error(
log.error(
"Switch process definition error because it does not exist, projectCode:{}, processDefinitionCode:{}.",
projectCode, code);
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR, code);
@ -2325,7 +2323,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinitionLog processDefinitionLog =
processDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, version);
if (Objects.isNull(processDefinitionLog)) {
logger.error(
log.error(
"Switch process definition error because version does not exist, projectCode:{}, processDefinitionCode:{}, version:{}.",
projectCode, code, version);
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR,
@ -2334,13 +2332,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
int switchVersion = processService.switchVersion(processDefinition, processDefinitionLog);
if (switchVersion <= 0) {
logger.error(
log.error(
"Switch process definition version error, projectCode:{}, processDefinitionCode:{}, version:{}.",
projectCode, code, version);
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
throw new ServiceException(Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
}
logger.info("Switch process definition version complete, projectCode:{}, processDefinitionCode:{}, version:{}.",
log.info("Switch process definition version complete, projectCode:{}, processDefinitionCode:{}, version:{}.",
projectCode, code, version);
putMsg(result, Status.SUCCESS);
return result;
@ -2360,18 +2358,18 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (!failedProcessList.isEmpty()) {
String failedProcess = String.join(",", failedProcessList);
if (isCopy) {
logger.error(
log.error(
"Copy process definition error, srcProjectCode:{}, targetProjectCode:{}, failedProcessList:{}.",
srcProjectCode, targetProjectCode, failedProcess);
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, failedProcess);
} else {
logger.error(
log.error(
"Move process definition error, srcProjectCode:{}, targetProjectCode:{}, failedProcessList:{}.",
srcProjectCode, targetProjectCode, failedProcess);
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectCode, targetProjectCode, failedProcess);
}
} else {
logger.info("Batch {} process definition complete, srcProjectCode:{}, targetProjectCode:{}.",
log.info("Batch {} process definition complete, srcProjectCode:{}, targetProjectCode:{}.",
isCopy ? "copy" : "move", srcProjectCode, targetProjectCode);
putMsg(result, Status.SUCCESS);
}
@ -2438,11 +2436,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, code:{}.", code);
log.error("Process definition does not exist, code:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
} else {
if (processDefinition.getVersion() == version) {
logger.warn(
log.warn(
"Process definition can not be deleted due to version is being used, projectCode:{}, processDefinitionCode:{}, version:{}.",
projectCode, code, version);
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
@ -2451,14 +2449,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int deleteLog = processDefinitionLogMapper.deleteByProcessDefinitionCodeAndVersion(code, version);
int deleteRelationLog = processTaskRelationLogMapper.deleteByCode(code, version);
if (deleteLog == 0 || deleteRelationLog == 0) {
logger.error(
log.error(
"Delete process definition version error, projectCode:{}, processDefinitionCode:{}, version:{}.",
projectCode, code, version);
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_PROCESS_DEFINE_BY_CODE_ERROR);
}
deleteOtherRelation(project, result, processDefinition);
logger.info(
log.info(
"Delete process definition version complete, projectCode:{}, processDefinitionCode:{}, version:{}.",
projectCode, code, version);
putMsg(result, Status.SUCCESS);
@ -2499,14 +2497,14 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result;
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
definition.getName(), definition.getCode());
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
@ -2516,7 +2514,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
logger.error("Tenant does not exist.");
log.error("Tenant does not exist.");
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
@ -2526,7 +2524,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
try {
processDefinitionCode = CodeGenerateUtils.getInstance().genCode();
} catch (CodeGenerateException e) {
logger.error("Generate process definition code error, projectCode:{}.", projectCode, e);
log.error("Generate process definition code error, projectCode:{}.", projectCode, e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS);
return result;
}
@ -2536,7 +2534,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
processDefinition.setExecutionType(executionType);
result = createEmptyDagDefine(loginUser, processDefinition);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
logger.error("Create empty process definition error.");
log.error("Create empty process definition error.");
return result;
}
@ -2558,7 +2556,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
Map<String, Object> result = new HashMap<>();
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion == 0) {
logger.error("Save process definition error, processDefinitionCode:{}.", processDefinition.getCode());
log.error("Save process definition error, processDefinitionCode:{}.", processDefinition.getCode());
putMsg(result, Status.CREATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_DEFINITION_ERROR);
}
@ -2578,13 +2576,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
Date now = new Date();
scheduleObj.setProcessDefinitionCode(processDefinition.getCode());
if (DateUtils.differSec(scheduleObj.getStartTime(), scheduleObj.getEndTime()) == 0) {
logger.warn("The schedule start time must not be the same as the end, processDefinitionCode:{}.",
log.warn("The schedule start time must not be the same as the end, processDefinitionCode:{}.",
processDefinition.getCode());
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
if (!org.quartz.CronExpression.isValidExpression(scheduleObj.getCrontab())) {
logger.error("CronExpression verify failure, cron:{}.", scheduleObj.getCrontab());
log.error("CronExpression verify failure, cron:{}.", scheduleObj.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleObj.getCrontab());
return result;
}
@ -2647,7 +2645,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
return result;
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -2655,7 +2653,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
logger.error("Tenant does not exist.");
log.error("Tenant does not exist.");
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
@ -2665,13 +2663,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
// check process definition exists
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, code:{}.", code);
log.error("Process definition does not exist, code:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
logger.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.",
log.warn("Process definition is not allowed to be modified due to {}, processDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), processDefinition.getCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName());
return result;
@ -2680,7 +2678,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
// check whether the new process define name exist
ProcessDefinition definition = processDefinitionMapper.verifyByDefineName(project.getCode(), name);
if (definition != null) {
logger.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
log.warn("Process definition with the same name {} already exists, processDefinitionCode:{}.",
definition.getName(), definition.getCode());
putMsg(result, Status.PROCESS_DEFINITION_NAME_EXIST, name);
return result;
@ -2695,7 +2693,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
result = updateDagDefine(loginUser, taskRelationList, processDefinition, processDefinitionDeepCopy,
Lists.newArrayList(), otherParamsJson);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
logger.error("Update process definition basic info error.");
log.error("Update process definition basic info error.");
return result;
}
@ -2768,7 +2766,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
}
int insertVersion = this.saveProcessDefine(loginUser, processDefinitionUpdate);
if (insertVersion == 0) {
logger.error("Update process definition error, projectCode:{}, processDefinitionName:{}.",
log.error("Update process definition error, projectCode:{}, processDefinitionName:{}.",
processDefinitionUpdate.getCode(),
processDefinitionUpdate.getName());
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
@ -2776,11 +2774,11 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
int insertRelationVersion = this.saveTaskRelation(loginUser, processDefinitionUpdate, insertVersion);
if (insertRelationVersion != Constants.EXIT_CODE_SUCCESS) {
logger.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
log.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
logger.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
log.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
processDefinitionUpdate.setVersion(insertVersion);
return processDefinitionUpdate;
@ -2859,7 +2857,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
boolean isSame = CollectionUtils.isEqualCollection(processTaskRelationSet,
taskRelationSet);
if (isSame) {
logger.info("process task relations is non-existent, projectCode:{}, processCode:{}.",
log.info("process task relations is non-existent, projectCode:{}, processCode:{}.",
processDefinition.getProjectCode(), processDefinition.getCode());
return Constants.EXIT_CODE_SUCCESS;
}
@ -2939,13 +2937,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (processDefinition == null) {
logger.error("Process definition does not exist, code:{}.", code);
log.error("Process definition does not exist, code:{}.", code);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
Schedule scheduleObj = scheduleMapper.queryByProcessDefinitionCode(code);
if (scheduleObj == null) {
logger.error("Schedule cron does not exist, processDefinitionCode:{}.", code);
log.error("Schedule cron does not exist, processDefinitionCode:{}.", code);
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, "processDefinitionCode:" + code);
return result;
}
@ -2954,7 +2952,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
List<ProcessTaskRelation> relationList =
processService.findRelationByCode(code, processDefinition.getVersion());
if (CollectionUtils.isEmpty(relationList)) {
logger.warn("Process definition has no task relation, processDefinitionCode:{}.", code);
log.warn("Process definition has no task relation, processDefinitionCode:{}.", code);
putMsg(result, Status.PROCESS_DAG_IS_EMPTY);
return result;
}
@ -2966,13 +2964,13 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
processDefinition.setReleaseState(releaseState);
int updateProcess = processDefinitionMapper.updateById(processDefinition);
if (updateProcess > 0) {
logger.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.",
log.info("Set schedule offline, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.",
projectCode, code, scheduleObj.getId());
// set status
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
int updateSchedule = scheduleMapper.updateById(scheduleObj);
if (updateSchedule == 0) {
logger.error(
log.error(
"Set schedule offline error, projectCode:{}, processDefinitionCode:{}, scheduleId:{}",
projectCode, code, scheduleObj.getId());
putMsg(result, Status.OFFLINE_SCHEDULE_ERROR);
@ -3035,7 +3033,7 @@ public class ProcessDefinitionServiceImpl extends BaseServiceImpl implements Pro
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(code);
if (Objects.isNull(processDefinition) || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
code);
putMsg(result, PROCESS_DEFINE_NOT_EXIST, code);
return result;

73
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessInstanceServiceImpl.java

@ -102,8 +102,8 @@ import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
@ -117,10 +117,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* process instance service impl
*/
@Service
@Slf4j
public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceImpl.class);
public static final String TASK_TYPE = "taskType";
public static final String LOCAL_PARAMS_LIST = "localParamsList";
@ -263,7 +262,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
processInstance.getProcessDefinitionVersion());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, projectCode:{}.", projectCode);
log.error("Process definition does not exist, projectCode:{}.", projectCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
} else {
Tenant tenant = tenantMapper.queryById(processDefinition.getTenantId());
@ -468,7 +467,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessDefinition processDefinition =
processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, projectCode:{}, processDefinitionId:{}.", projectCode,
log.error("Process definition does not exist, projectCode:{}, processDefinitionId:{}.", projectCode,
processId);
putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processId);
return result;
@ -491,7 +490,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
private void addDependResultForTaskList(User loginUser, List<TaskInstance> taskInstanceList) throws IOException {
for (TaskInstance taskInstance : taskInstanceList) {
if (TASK_TYPE_DEPENDENT.equalsIgnoreCase(taskInstance.getTaskType())) {
logger.info("DEPENDENT type task instance need to set dependent result, taskCode:{}, taskInstanceId:{}",
log.info("DEPENDENT type task instance need to set dependent result, taskCode:{}, taskInstanceId:{}",
taskInstance.getTaskCode(), taskInstance.getId());
Result<ResponseTaskLog> logResult = loggerService.queryLog(loginUser,
taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT);
@ -505,14 +504,14 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
}
@Override
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
public Map<String, DependResult> parseLogForDependentResult(String content) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if (StringUtils.isEmpty(log)) {
logger.warn("Log content is empty.");
if (StringUtils.isEmpty(content)) {
log.warn("Log content is empty.");
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(content.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
@ -555,21 +554,21 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
TaskInstance taskInstance = taskInstanceDao.findTaskInstanceById(taskId);
if (taskInstance == null) {
logger.error("Task instance does not exist, projectCode:{}, taskInstanceId{}.", projectCode, taskId);
log.error("Task instance does not exist, projectCode:{}, taskInstanceId{}.", projectCode, taskId);
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskInstance.getTaskCode());
if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) {
logger.error("Task definition does not exist, projectCode:{}, taskDefinitionCode:{}.", projectCode,
log.error("Task definition does not exist, projectCode:{}, taskDefinitionCode:{}.", projectCode,
taskInstance.getTaskCode());
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
if (!taskInstance.isSubProcess()) {
logger.warn("Task instance is not {} type instance, projectCode:{}, taskInstanceId:{}.",
log.warn("Task instance is not {} type instance, projectCode:{}, taskInstanceId:{}.",
TASK_TYPE_SUB_PROCESS, projectCode, taskId);
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName());
return result;
@ -578,7 +577,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
logger.error("SubProcess instance does not exist, projectCode:{}, taskInstanceId:{}.", projectCode,
log.error("SubProcess instance does not exist, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstance.getId());
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
return result;
@ -628,14 +627,14 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessDefinition processDefinition0 =
processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition0 != null && projectCode != processDefinition0.getProjectCode()) {
logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
processInstance.getProcessDefinitionCode());
putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
// check process instance status
if (!processInstance.getState().isFinished()) {
logger.warn("Process Instance state is {} so can not update process instance, processInstanceId:{}.",
log.warn("Process Instance state is {} so can not update process instance, processInstanceId:{}.",
processInstance.getState().getDesc(), processInstanceId);
putMsg(result, PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "update");
@ -654,7 +653,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
setProcessInstance(processInstance, tenantCode, scheduleTime, globalParams, timeout, timezoneId);
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
if (taskDefinitionLogs.isEmpty()) {
logger.warn("Parameter taskDefinitionJson is empty");
log.warn("Parameter taskDefinitionJson is empty");
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
@ -664,14 +663,14 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
.taskParams(taskDefinitionLog.getTaskParams())
.dependence(taskDefinitionLog.getDependence())
.build())) {
logger.error("Task parameters are invalid, taskDefinitionName:{}.", taskDefinitionLog.getName());
log.error("Task parameters are invalid, taskDefinitionName:{}.", taskDefinitionLog.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, syncDefine);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
logger.error("Update task definition error, projectCode:{}, processInstanceId:{}", projectCode,
log.error("Update task definition error, projectCode:{}, processInstanceId:{}", projectCode,
processInstanceId);
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
@ -689,7 +688,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
logger.error("Tenant does not exist.");
log.error("Tenant does not exist.");
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
@ -700,23 +699,23 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
processDefinition.setUpdateTime(new Date());
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, syncDefine, Boolean.FALSE);
if (insertVersion == 0) {
logger.error("Update process definition error, projectCode:{}, processDefinitionName:{}.", projectCode,
log.error("Update process definition error, projectCode:{}, processDefinitionName:{}.", projectCode,
processDefinition.getName());
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
} else
logger.info("Update process definition complete, projectCode:{}, processDefinitionName:{}.", projectCode,
log.info("Update process definition complete, projectCode:{}, processDefinitionName:{}.", projectCode,
processDefinition.getName());
int insertResult = processService.saveTaskRelation(loginUser, processDefinition.getProjectCode(),
processDefinition.getCode(), insertVersion, taskRelationList, taskDefinitionLogs, syncDefine);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
logger.info(
log.info(
"Update task relations complete, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.",
projectCode, processDefinition.getCode(), insertVersion);
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
logger.info(
log.info(
"Update task relations error, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.",
projectCode, processDefinition.getCode(), insertVersion);
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
@ -725,13 +724,13 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
processInstance.setProcessDefinitionVersion(insertVersion);
int update = processInstanceDao.updateProcessInstance(processInstance);
if (update == 0) {
logger.error(
log.error(
"Update process instance version error, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}",
projectCode, processDefinition.getCode(), insertVersion);
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
logger.info(
log.info(
"Update process instance complete, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}, processInstanceId:{}",
projectCode, processDefinition.getCode(), insertVersion, processInstanceId);
putMsg(result, Status.SUCCESS);
@ -780,7 +779,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId)
.orElseThrow(() -> new ServiceException(PROCESS_INSTANCE_NOT_EXIST, subId));
if (subInstance.getIsSubProcess() == Flag.NO) {
logger.warn(
log.warn(
"Process instance is not sub process instance type, processInstanceId:{}, processInstanceName:{}.",
subId, subInstance.getName());
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName());
@ -789,7 +788,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
logger.error("Parent process instance does not exist, projectCode:{}, subProcessInstanceId:{}.",
log.error("Parent process instance does not exist, projectCode:{}, subProcessInstanceId:{}.",
projectCode, subId);
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
@ -822,7 +821,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ApiFuncIdentificationConstant.INSTANCE_DELETE);
// check process instance status
if (!processInstance.getState().isFinished()) {
logger.warn("Process Instance state is {} so can not delete process instance, processInstanceId:{}.",
log.warn("Process Instance state is {} so can not delete process instance, processInstanceId:{}.",
processInstance.getState().getDesc(), processInstanceId);
throw new ServiceException(PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(),
processInstance.getState(), "delete");
@ -844,7 +843,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
logger.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode,
log.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode,
processInstanceId);
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
@ -853,7 +852,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessDefinition processDefinition =
processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode());
if (processDefinition != null && projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
processInstance.getProcessDefinitionCode());
putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
@ -938,7 +937,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
logger.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode,
log.error("Process instance does not exist, projectCode:{}, processInstanceId:{}.", projectCode,
processInstanceId);
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
@ -948,7 +947,7 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
processInstance.getProcessDefinitionCode(),
processInstance.getProcessDefinitionVersion());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
processInstance.getProcessDefinitionCode());
putMsg(result, PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
@ -1056,17 +1055,17 @@ public class ProcessInstanceServiceImpl extends BaseServiceImpl implements Proce
if (CollectionUtils.isEmpty(processInstances)) {
break;
}
logger.info("Begin to delete workflow instance, workflow definition code: {}", workflowDefinitionCode);
log.info("Begin to delete workflow instance, workflow definition code: {}", workflowDefinitionCode);
for (ProcessInstance processInstance : processInstances) {
if (!processInstance.getState().isFinished()) {
logger.warn("Workflow instance is not finished cannot delete, process instance id:{}",
log.warn("Workflow instance is not finished cannot delete, process instance id:{}",
processInstance.getId());
throw new ServiceException(PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(),
processInstance.getState(), "delete");
}
deleteProcessInstanceById(processInstance.getId());
}
logger.info("Success delete workflow instance, workflow definition code: {}, size: {}",
log.info("Success delete workflow instance, workflow definition code: {}, size: {}",
workflowDefinitionCode, processInstances.size());
}
}

67
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessTaskRelationServiceImpl.java

@ -60,8 +60,8 @@ import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -74,10 +74,9 @@ import com.google.common.collect.Lists;
* process task relation service impl
*/
@Service
@Slf4j
public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements ProcessTaskRelationService {
private static final Logger logger = LoggerFactory.getLogger(ProcessTaskRelationServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@ -127,12 +126,12 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
logger.error("Process definition does not exist, processCode:{}.", processDefinitionCode);
log.error("Process definition does not exist, processCode:{}.", processDefinitionCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode));
return result;
}
if (processDefinition.getProjectCode() != projectCode) {
logger.error("Process definition's project does not match project {}.", projectCode);
log.error("Process definition's project does not match project {}.", projectCode);
putMsg(result, Status.PROJECT_PROCESS_NOT_MATCH);
return result;
}
@ -278,12 +277,12 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
ProcessDefinition processDefinition) {
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion <= 0) {
logger.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.",
log.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.",
processDefinition.getProjectCode(), processDefinition.getCode());
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
} else
logger.info(
log.info(
"Update process definition complete, new version is {}, projectCode:{}, processDefinitionCode:{}.",
insertVersion, processDefinition.getProjectCode(), processDefinition.getCode());
processDefinition.setVersion(insertVersion);
@ -309,7 +308,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
return result;
}
if (taskCode == 0) {
logger.error(
log.error(
"Delete task process relation error due to parameter taskCode is 0, projectCode:{}, processDefinitionCode:{}.",
projectCode, processDefinitionCode);
putMsg(result, Status.DELETE_TASK_PROCESS_RELATION_ERROR);
@ -317,13 +316,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode));
return result;
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (null == taskDefinition) {
logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode));
return result;
}
@ -331,7 +330,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
if (CollectionUtils.isEmpty(processTaskRelationList)) {
logger.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinitionCode);
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
return result;
@ -347,7 +346,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
}
if (CollectionUtils.isNotEmpty(downstreamList)) {
String downstream = StringUtils.join(downstreamList, ",");
logger.warn(
log.warn(
"Relation can not be deleted because task has downstream tasks:[{}], projectCode:{}, processDefinitionCode:{}, taskDefinitionCode:{}.",
downstream, projectCode, processDefinitionCode, taskCode);
putMsg(result, Status.TASK_HAS_DOWNSTREAM, downstream);
@ -360,11 +359,11 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
|| TASK_TYPE_SUB_PROCESS.equals(taskDefinition.getTaskType())) {
int deleteTaskDefinition = taskDefinitionMapper.deleteByCode(taskCode);
if (0 == deleteTaskDefinition) {
logger.error("Delete task definition error, taskDefinitionCode:{}.", taskCode);
log.error("Delete task definition error, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
throw new ServiceException(Status.DELETE_TASK_DEFINE_BY_CODE_ERROR);
} else
logger.info("Delete {} type task definition complete, taskDefinitionCode:{}.",
log.info("Delete {} type task definition complete, taskDefinitionCode:{}.",
taskDefinition.getTaskType(), taskCode);
}
putMsg(result, Status.SUCCESS);
@ -500,11 +499,11 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
// batch sync to process task relation log
int saveTaskRelationResult = saveTaskRelation(loginUser, processDefinition, insertVersion);
if (saveTaskRelationResult != Constants.EXIT_CODE_SUCCESS) {
logger.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
log.error("Save process task relations error, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
}
logger.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
log.info("Save process task relations complete, projectCode:{}, processCode:{}, processVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), insertVersion);
processTaskRelations.get(0).setProcessDefinitionVersion(insertVersion);
return processTaskRelations;
@ -598,13 +597,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
processDefinition.getCode(),
processDefinition.getVersion(), relationLogs, Lists.newArrayList(), Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
logger.info(
log.info(
"Update task relations complete, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion());
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
} else {
logger.error(
log.error(
"Update task relations error, projectCode:{}, processDefinitionCode:{}, processDefinitionVersion:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), processDefinition.getVersion());
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
@ -632,13 +631,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
return result;
}
if (StringUtils.isEmpty(preTaskCodes)) {
logger.warn("Parameter preTaskCodes is empty.");
log.warn("Parameter preTaskCodes is empty.");
putMsg(result, Status.DATA_IS_NULL, "preTaskCodes");
return result;
}
List<ProcessTaskRelation> upstreamList = processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (CollectionUtils.isEmpty(upstreamList)) {
logger.error("Upstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode);
log.error("Upstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode);
putMsg(result, Status.DATA_IS_NULL, "taskCode");
return result;
}
@ -646,14 +645,14 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
List<Long> preTaskCodeList = Lists.newArrayList(preTaskCodes.split(Constants.COMMA)).stream()
.map(Long::parseLong).collect(Collectors.toList());
if (preTaskCodeList.contains(0L)) {
logger.warn("Parameter preTaskCodes contain 0.");
log.warn("Parameter preTaskCodes contain 0.");
putMsg(result, Status.DATA_IS_NULL, "preTaskCodes");
return result;
}
List<Long> currentUpstreamList =
upstreamList.stream().map(ProcessTaskRelation::getPreTaskCode).collect(Collectors.toList());
if (currentUpstreamList.contains(0L)) {
logger.error("Upstream taskCodes based on the task contain, theTaskDefinitionCode:{}.", taskCode);
log.error("Upstream taskCodes based on the task contain, theTaskDefinitionCode:{}.", taskCode);
putMsg(result, Status.DATA_IS_NOT_VALID, "currentUpstreamList");
return result;
}
@ -662,14 +661,14 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
preTaskCodeList.removeAll(currentUpstreamList);
if (!preTaskCodeList.isEmpty()) {
String invalidPreTaskCodes = StringUtils.join(preTaskCodeList, Constants.COMMA);
logger.error("Some upstream taskCodes are invalid, preTaskCodeList:{}.", invalidPreTaskCodes);
log.error("Some upstream taskCodes are invalid, preTaskCodeList:{}.", invalidPreTaskCodes);
putMsg(result, Status.DATA_IS_NOT_VALID, invalidPreTaskCodes);
return result;
}
ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(upstreamList.get(0).getProcessDefinitionCode());
if (processDefinition == null) {
logger.error("Process definition does not exist, processDefinitionCode:{}.",
log.error("Process definition does not exist, processDefinitionCode:{}.",
upstreamList.get(0).getProcessDefinitionCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,
String.valueOf(upstreamList.get(0).getProcessDefinitionCode()));
@ -719,28 +718,28 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
return result;
}
if (StringUtils.isEmpty(postTaskCodes)) {
logger.warn("Parameter postTaskCodes is empty.");
log.warn("Parameter postTaskCodes is empty.");
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result;
}
List<ProcessTaskRelation> downstreamList =
processTaskRelationMapper.queryDownstreamByCode(projectCode, taskCode);
if (CollectionUtils.isEmpty(downstreamList)) {
logger.error("Downstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode);
log.error("Downstream tasks based on the task do not exist, theTaskDefinitionCode:{}.", taskCode);
putMsg(result, Status.DATA_IS_NULL, "taskCode");
return result;
}
List<Long> postTaskCodeList = Lists.newArrayList(postTaskCodes.split(Constants.COMMA)).stream()
.map(Long::parseLong).collect(Collectors.toList());
if (postTaskCodeList.contains(0L)) {
logger.warn("Parameter postTaskCodes contains 0.");
log.warn("Parameter postTaskCodes contains 0.");
putMsg(result, Status.DATA_IS_NULL, "postTaskCodes");
return result;
}
ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(downstreamList.get(0).getProcessDefinitionCode());
if (processDefinition == null) {
logger.error("Process definition does not exist, processDefinitionCode:{}.",
log.error("Process definition does not exist, processDefinitionCode:{}.",
downstreamList.get(0).getProcessDefinitionCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,
String.valueOf(downstreamList.get(0).getProcessDefinitionCode()));
@ -853,7 +852,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
logger.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process definition does not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinitionCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode));
return result;
@ -862,7 +861,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
processTaskRelationMapper.queryByProcessCode(projectCode, processDefinitionCode);
List<ProcessTaskRelation> processTaskRelationList = Lists.newArrayList(processTaskRelations);
if (CollectionUtils.isEmpty(processTaskRelationList)) {
logger.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process task relations are empty, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinitionCode);
putMsg(result, Status.DATA_IS_NULL, "processTaskRelationList");
return result;
@ -886,13 +885,13 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
if (processTaskRelation.getPreTaskCode() == preTaskCode) {
int delete = processTaskRelationMapper.deleteById(processTaskRelation.getId());
if (delete == 0) {
logger.error(
log.error(
"Delete task relation edge error, processTaskRelationId:{}, preTaskCode:{}, postTaskCode:{}",
processTaskRelation.getId(), preTaskCode, postTaskCode);
putMsg(result, Status.DELETE_EDGE_ERROR);
throw new ServiceException(Status.DELETE_EDGE_ERROR);
} else
logger.info(
log.info(
"Delete task relation edge complete, processTaskRelationId:{}, preTaskCode:{}, postTaskCode:{}",
processTaskRelation.getId(), preTaskCode, postTaskCode);
processTaskRelationList.remove(processTaskRelation);
@ -904,7 +903,7 @@ public class ProcessTaskRelationServiceImpl extends BaseServiceImpl implements P
processTaskRelation.setPreTaskVersion(0);
processTaskRelation.setPreTaskCode(0L);
processTaskRelationList.add(processTaskRelation);
logger.info(
log.info(
"Delete task relation through set invalid value for it: preTaskCode from {} to 0, processTaskRelationId:{}.",
preTaskCode, processTaskRelation.getId());
}

60
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProjectServiceImpl.java

@ -57,9 +57,8 @@ import java.util.Set;
import javax.annotation.Nullable;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
@ -72,10 +71,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* project service impl
**/
@Service
@Slf4j
public class ProjectServiceImpl extends BaseServiceImpl implements ProjectService {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceImpl.class);
@Lazy
@Autowired
private TaskGroupService taskGroupService;
@ -116,7 +114,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
Project project = projectMapper.queryByName(name);
if (project != null) {
logger.warn("Project {} already exists.", project.getName());
log.warn("Project {} already exists.", project.getName());
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
@ -135,19 +133,19 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
.updateTime(now)
.build();
} catch (CodeGenerateException e) {
logger.error("Generate process definition code error.", e);
log.error("Generate process definition code error.", e);
putMsg(result, Status.CREATE_PROJECT_ERROR);
return result;
}
if (projectMapper.insert(project) > 0) {
logger.info("Project is created and id is :{}", project.getId());
log.info("Project is created and id is :{}", project.getId());
result.setData(project);
putMsg(result, Status.SUCCESS);
permissionPostHandle(AuthorizationType.PROJECTS, loginUser.getId(),
Collections.singletonList(project.getId()), logger);
Collections.singletonList(project.getId()), log);
} else {
logger.error("Project create error, projectName:{}.", project.getName());
log.error("Project create error, projectName:{}.", project.getName());
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
@ -161,7 +159,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
*/
public static void checkDesc(Result result, String desc) {
if (!StringUtils.isEmpty(desc) && desc.codePointCount(0, desc.length()) > 255) {
logger.warn("Parameter description check failed.");
log.warn("Parameter description check failed.");
result.setCode(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode());
result.setMsg(MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "desc length"));
} else {
@ -218,12 +216,12 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
String permission) {
Map<String, Object> result = new HashMap<>();
if (project == null) {
logger.error("Project does not exist, projectCode:{}.", projectCode);
log.error("Project does not exist, projectCode:{}.", projectCode);
putMsg(result, Status.PROJECT_NOT_EXIST);
} else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS,
permission)) {
// check read permission
logger.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.",
log.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.",
permission, loginUser.getUserName(), projectCode);
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectCode);
} else {
@ -248,11 +246,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result, String permission) {
boolean checkResult = false;
if (project == null) {
logger.error("Project does not exist.");
log.error("Project does not exist.");
putMsg(result, Status.PROJECT_NOT_FOUND, "");
} else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS,
permission)) {
logger.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.",
log.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.",
permission, loginUser.getUserName(), project.getCode());
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getCode());
} else {
@ -265,7 +263,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
public boolean hasProjectAndWritePerm(User loginUser, Project project, Result result) {
boolean checkResult = false;
if (project == null) {
logger.error("Project does not exist.");
log.error("Project does not exist.");
putMsg(result, Status.PROJECT_NOT_FOUND, "");
} else {
// case 1: user is admin
@ -292,7 +290,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
public boolean hasProjectAndWritePerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
logger.error("Project does not exist.");
log.error("Project does not exist.");
putMsg(result, Status.PROJECT_NOT_FOUND, "");
} else {
// case 1: user is admin
@ -319,11 +317,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
public boolean hasProjectAndPerm(User loginUser, Project project, Result result, String permission) {
boolean checkResult = false;
if (project == null) {
logger.error("Project does not exist.");
log.error("Project does not exist.");
putMsg(result, Status.PROJECT_NOT_FOUND, "");
} else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS,
permission)) {
logger.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.",
log.error("User does not have {} permission to operate project, userName:{}, projectCode:{}.",
permission, loginUser.getUserName(), project.getCode());
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
@ -347,7 +345,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
PageInfo<Project> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<Project> page = new Page<>(pageNo, pageSize);
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -386,9 +384,9 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
PageInfo<Project> pageInfo = new PageInfo<>(pageNo, pageSize);
Page<Project> page = new Page<>(pageNo, pageSize);
Set<Integer> allProjectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
Set<Integer> userProjectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, userId, logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, userId, log);
if (allProjectIds.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -448,7 +446,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
processDefinitionMapper.queryAllDefinitionList(project.getCode());
if (!processDefinitionList.isEmpty()) {
logger.warn("Please delete the process definitions in project first! project code:{}.", projectCode);
log.warn("Please delete the process definitions in project first! project code:{}.", projectCode);
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
@ -457,11 +455,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
int delete = projectMapper.deleteById(project.getId());
if (delete > 0) {
logger.info("Project is deleted and id is :{}.", project.getId());
log.info("Project is deleted and id is :{}.", project.getId());
result.setData(Boolean.TRUE);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Project delete error, project code:{}, project name:{}.", projectCode, project.getName());
log.error("Project delete error, project code:{}, project name:{}.", projectCode, project.getName());
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
@ -515,7 +513,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
}
User user = userMapper.queryByUserNameAccurately(userName);
if (user == null) {
logger.error("User does not exist.");
log.error("User does not exist.");
putMsg(result, Status.USER_NOT_EXIST, userName);
return result;
}
@ -525,11 +523,11 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
project.setUserId(user.getId());
int update = projectMapper.updateById(project);
if (update > 0) {
logger.info("Project is updated and id is :{}", project.getId());
log.info("Project is updated and id is :{}", project.getId());
result.setData(project);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Project update error, projectCode:{}, projectName:{}.", project.getCode(), project.getName());
log.error("Project update error, projectCode:{}, projectName:{}.", project.getCode(), project.getName());
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
@ -546,7 +544,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
Result result = new Result();
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
List<Project> projectList = projectMapper.listAuthorizedProjects(
loginUser.getUserType().equals(UserType.ADMIN_USER) ? 0 : loginUser.getId(),
new ArrayList<>(projectIds));
@ -589,7 +587,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
Result result = new Result();
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
result.setData(Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -702,7 +700,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
Result result = new Result();
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
result.setData(Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -784,7 +782,7 @@ public class ProjectServiceImpl extends BaseServiceImpl implements ProjectServic
public void checkProjectAndAuth(Result result, User loginUser, Project project, long projectCode,
String permission) {
if (project == null) {
logger.error("Project does not exist, project code:{}.", projectCode);
log.error("Project does not exist, project code:{}.", projectCode);
putMsg(result, Status.PROJECT_NOT_EXIST);
} else if (!canOperatorPermissions(loginUser, new Object[]{project.getId()}, AuthorizationType.PROJECTS,
permission)) {

21
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/QueueServiceImpl.java

@ -42,8 +42,8 @@ import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -55,10 +55,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* queue service impl
*/
@Service
@Slf4j
public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
private static final Logger logger = LoggerFactory.getLogger(QueueServiceImpl.class);
@Autowired
private QueueMapper queueMapper;
@ -120,7 +119,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
public Result queryList(User loginUser) {
Result result = new Result();
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.QUEUE,
loginUser.getId(), logger);
loginUser.getId(), log);
if (loginUser.getUserType().equals(UserType.GENERAL_USER)) {
ids = ids.isEmpty() ? new HashSet<>() : ids;
ids.add(Constants.DEFAULT_QUEUE_ID);
@ -145,7 +144,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
Result result = new Result();
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize);
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.QUEUE,
loginUser.getId(), logger);
loginUser.getId(), log);
if (ids.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -183,10 +182,10 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
queueMapper.insert(queueObj);
result.setData(queueObj);
logger.info("Queue create complete, queueName:{}.", queueObj.getQueueName());
log.info("Queue create complete, queueName:{}.", queueObj.getQueueName());
putMsg(result, Status.SUCCESS);
permissionPostHandle(AuthorizationType.QUEUE, loginUser.getId(), Collections.singletonList(queueObj.getId()),
logger);
log);
return result;
}
@ -215,7 +214,7 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
// update user related old queue
Integer relatedUserNums =
userMapper.updateUserQueue(existsQueue.getQueueName(), updateQueue.getQueueName());
logger.info("Old queue have related {} users, exec update user success.", relatedUserNums);
log.info("Old queue have related {} users, exec update user success.", relatedUserNums);
}
queueMapper.updateById(updateQueue);
@ -290,13 +289,13 @@ public class QueueServiceImpl extends BaseServiceImpl implements QueueService {
public Queue createQueueIfNotExists(String queue, String queueName) {
Queue existsQueue = queueMapper.queryQueueName(queue, queueName);
if (!Objects.isNull(existsQueue)) {
logger.info("Queue exists, so return it, queueName:{}.", queueName);
log.info("Queue exists, so return it, queueName:{}.", queueName);
return existsQueue;
}
Queue queueObj = new Queue(queueName, queue);
createQueueValid(queueObj);
queueMapper.insert(queueObj);
logger.info("Queue create complete, queueName:{}.", queueObj.getQueueName());
log.info("Queue create complete, queueName:{}.", queueObj.getQueueName());
return queueObj;
}

217
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ResourcesServiceImpl.java

@ -88,8 +88,8 @@ import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -104,10 +104,9 @@ import com.google.common.io.Files;
* resources service impl
*/
@Service
@Slf4j
public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesService {
private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceImpl.class);
@Autowired
private ResourceMapper resourcesMapper;
@ -170,21 +169,21 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
return result;
}
if (FileUtils.directoryTraversal(name)) {
logger.warn("Parameter name is invalid, name:{}.", RegexUtils.escapeNRT(name));
log.warn("Parameter name is invalid, name:{}.", RegexUtils.escapeNRT(name));
putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED);
return result;
}
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -192,13 +191,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, "")) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -209,12 +208,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
if (checkResourceExists(fullName)) {
logger.error("resource directory {} has exist, can't recreate", fullName);
log.error("resource directory {} has exist, can't recreate", fullName);
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
} catch (Exception e) {
logger.warn("Resource exists, can not create again, fullName:{}.", fullName, e);
log.warn("Resource exists, can not create again, fullName:{}.", fullName, e);
throw new ServiceException("resource already exists, can't recreate");
}
@ -256,14 +255,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -271,7 +270,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, "")) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -288,7 +287,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
if (checkResourceExists(currDirNFileName)) {
logger.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name));
log.error("resource {} has exist, can't recreate", RegexUtils.escapeNRT(name));
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
@ -296,7 +295,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
throw new ServiceException("resource already exists, can't recreate");
}
if (currDirNFileName.length() > Constants.RESOURCE_FULL_NAME_MAX_LENGTH) {
logger.error(
log.error(
"Resource file's name is longer than max full name length, fullName:{}, " +
"fullNameSize:{}, maxFullNameSize:{}",
RegexUtils.escapeNRT(name), currDirNFileName.length(), Constants.RESOURCE_FULL_NAME_MAX_LENGTH);
@ -306,13 +305,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
// fail upload
if (!upload(loginUser, currDirNFileName, file, type)) {
logger.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name),
log.error("upload resource: {} file: {} failed.", RegexUtils.escapeNRT(name),
RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.STORE_OPERATE_CREATE_ERROR);
throw new ServiceException(
String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename()));
} else
logger.info("Upload resource file complete, resourceName:{}, fileName:{}.",
log.info("Upload resource file complete, resourceName:{}, fileName:{}.",
RegexUtils.escapeNRT(name), RegexUtils.escapeNRT(file.getOriginalFilename()));
return result;
}
@ -339,7 +338,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
parentResource.setSize(0L);
}
resourcesMapper.updateById(parentResource);
logger.info("Resource size update complete, resourceFullName:{}, newSize:{}.",
log.info("Resource size update complete, resourceFullName:{}, newSize:{}.",
parentResource.getFullName(), parentResource.getSize());
}
}
@ -358,7 +357,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
existResource = storageOperate.exists(fullName);
} catch (IOException e) {
logger.error("error occurred when checking resource: " + fullName, e);
log.error("error occurred when checking resource: " + fullName, e);
}
return Boolean.TRUE.equals(existResource);
}
@ -394,14 +393,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -409,7 +408,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -420,13 +419,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
resource = storageOperate.getFileStatus(resourceFullName, defaultPath, resTenantCode, type);
} catch (Exception e) {
logger.error("Get file status fail, resource path: {}", resourceFullName, e);
log.error("Get file status fail, resource path: {}", resourceFullName, e);
putMsg(result, Status.RESOURCE_NOT_EXIST);
throw new ServiceException((String.format("Get file status fail, resource path: %s", resourceFullName)));
}
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("Storage does not start up, resource upload startup state: {}.",
log.error("Storage does not start up, resource upload startup state: {}.",
PropertyUtils.getResUploadStartupState());
putMsg(result, Status.STORAGE_NOT_STARTUP);
return result;
@ -435,13 +434,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
// TODO: deal with OSS
if (resource.isDirectory() && storageOperate.returnStorageType().equals(ResUploadType.S3)
&& !resource.getFileName().equals(name)) {
logger.warn("Directory in S3 storage can not be renamed.");
log.warn("Directory in S3 storage can not be renamed.");
putMsg(result, Status.S3_CANNOT_RENAME);
return result;
}
if (file == null && name.equals(resource.getAlias()) && desc.equals(resource.getDescription())) {
logger.info("Resource does not need to be updated due to no change, resource full name:{}.",
log.info("Resource does not need to be updated due to no change, resource full name:{}.",
resourceFullName);
putMsg(result, Status.SUCCESS);
return result;
@ -460,7 +459,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (!originResourceName.equals(name)) {
try {
if (checkResourceExists(fullName)) {
logger.error("resource {} already exists, can't recreate", fullName);
log.error("resource {} already exists, can't recreate", fullName);
putMsg(result, Status.RESOURCE_EXIST);
return result;
}
@ -505,8 +504,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
for (ResourcesTask existResource : existResourcesList) {
int taskId = existResource.getTaskId();
if (processService.isTaskOnline(taskDefinitionMapper.selectById(taskId).getCode())) {
logger.error("can't be updated,because it is used of process definition that's online");
logger.error("resource task relation id:{} is used of task code {}", existResource.getId(),
log.error("can't be updated,because it is used of process definition that's online");
log.error("resource task relation id:{} is used of task code {}", existResource.getId(),
taskDefinitionMapper.selectById(taskId).getCode());
putMsg(result, Status.RESOURCE_IS_USED);
return result;
@ -576,7 +575,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (file != null) {
// fail upload
if (!upload(loginUser, fullName, file, type)) {
logger.error("Storage operation error, resourceName:{}, originFileName:{}.",
log.error("Storage operation error, resourceName:{}, originFileName:{}.",
name, RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.HDFS_OPERATION_ERROR);
throw new ServiceException(
@ -586,7 +585,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
storageOperate.delete(originFullName, false);
} catch (IOException e) {
logger.error("Resource delete error, resourceFullName:{}.", originFullName, e);
log.error("Resource delete error, resourceFullName:{}.", originFullName, e);
throw new ServiceException(String.format("delete resource: %s failed.", originFullName));
}
}
@ -597,10 +596,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
// get the path of dest file in hdfs
String destHdfsFileName = fullName;
try {
logger.info("start copy {} -> {}", originFullName, destHdfsFileName);
log.info("start copy {} -> {}", originFullName, destHdfsFileName);
storageOperate.copy(originFullName, destHdfsFileName, true, true);
} catch (Exception e) {
logger.error(MessageFormat.format(" copy {0} -> {1} fail", originFullName, destHdfsFileName), e);
log.error(MessageFormat.format(" copy {0} -> {1} fail", originFullName, destHdfsFileName), e);
putMsg(result, Status.HDFS_COPY_FAIL);
throw new ServiceException(MessageFormat.format(
Status.HDFS_COPY_FAIL.getMsg(), originFullName, destHdfsFileName));
@ -614,13 +613,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
putMsg(result, Status.SUCCESS);
if (FileUtils.directoryTraversal(name)) {
logger.warn("Parameter file alias name verify failed, fileAliasName:{}.", RegexUtils.escapeNRT(name));
log.warn("Parameter file alias name verify failed, fileAliasName:{}.", RegexUtils.escapeNRT(name));
putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED);
return result;
}
if (file != null && FileUtils.directoryTraversal(Objects.requireNonNull(file.getOriginalFilename()))) {
logger.warn("File original name verify failed, fileOriginalName:{}.",
log.warn("File original name verify failed, fileOriginalName:{}.",
RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED);
return result;
@ -629,7 +628,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (file != null) {
// file is empty
if (file.isEmpty()) {
logger.warn("Parameter file is empty, fileOriginalName:{}.",
log.warn("Parameter file is empty, fileOriginalName:{}.",
RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_FILE_IS_EMPTY);
return result;
@ -642,7 +641,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
// determine file suffix
if (!fileSuffix.equalsIgnoreCase(nameSuffix)) {
// rename file suffix and original suffix must be consistent
logger.warn("Rename file suffix and original suffix must be consistent, fileOriginalName:{}.",
log.warn("Rename file suffix and original suffix must be consistent, fileOriginalName:{}.",
RegexUtils.escapeNRT(file.getOriginalFilename()));
putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE);
return result;
@ -650,12 +649,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
// If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar
if (Constants.UDF.equals(type.name()) && !JAR.equalsIgnoreCase(fileSuffix)) {
logger.warn(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg());
log.warn(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg());
putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR);
return result;
}
if (file.getSize() > Constants.MAX_FILE_SIZE) {
logger.warn(
log.warn(
"Resource file size is larger than max file size, fileOriginalName:{}, fileSize:{}, maxFileSize:{}.",
RegexUtils.escapeNRT(file.getOriginalFilename()), file.getSize(), Constants.MAX_FILE_SIZE);
putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT);
@ -686,20 +685,20 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
Result<PageInfo<StorageEntity>> result = new Result<>();
PageInfo<StorageEntity> pageInfo = new PageInfo<>(pageNo, pageSize);
if (storageOperate == null) {
logger.warn("The resource storage is not opened.");
log.warn("The resource storage is not opened.");
return Result.success(pageInfo);
}
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -707,7 +706,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -734,7 +733,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
visitedTenantEntityCode.add(tenantEntityCode);
} catch (Exception e) {
logger.error(e.getMessage() + " Resource path: {}", defaultPath, e);
log.error(e.getMessage() + " Resource path: {}", defaultPath, e);
putMsg(result, Status.RESOURCE_NOT_EXIST);
throw new ServiceException(String.format(e.getMessage() +
" make sure resource path: %s exists in %s", defaultPath, resourceStorageType));
@ -754,7 +753,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
resourcesList = storageOperate.listFilesStatus(fullName, defaultPath, tenantCode, type);
}
} catch (Exception e) {
logger.error(e.getMessage() + " Resource path: {}", fullName, e);
log.error(e.getMessage() + " Resource path: {}", fullName, e);
putMsg(result, Status.RESOURCE_NOT_EXIST);
throw new ServiceException(String.format(e.getMessage() +
" make sure resource path: %s exists in %s", defaultPath, resourceStorageType));
@ -796,12 +795,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
}
if (!storageOperate.mkdir(tenantCode, fullName)) {
logger.error("create resource directory {} failed", fullName);
log.error("create resource directory {} failed", fullName);
putMsg(result, Status.STORE_OPERATE_CREATE_ERROR);
// throw new ServiceException(String.format("create resource directory: %s failed.", fullName));
}
} catch (Exception e) {
logger.error("create resource directory {} failed", fullName);
log.error("create resource directory {} failed", fullName);
putMsg(result, Status.STORE_OPERATE_CREATE_ERROR);
throw new ServiceException(String.format("create resource directory: %s failed.", fullName));
}
@ -841,7 +840,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
storageOperate.upload(tenantCode, localFilename, fullName, true, true);
} catch (Exception e) {
FileUtils.deleteFile(localFilename);
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
return false;
}
return true;
@ -866,14 +865,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return null;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return null;
}
@ -935,7 +934,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
Result<Object> result = new Result<>();
Set<Integer> resourceIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), log);
if (resourceIds.isEmpty()) {
result.setData(Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -1009,14 +1008,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1024,7 +1023,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -1034,13 +1033,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
resource = storageOperate.getFileStatus(fullName, defaultPath, resTenantCode, null);
} catch (Exception e) {
logger.error(e.getMessage() + " Resource path: {}", fullName, e);
log.error(e.getMessage() + " Resource path: {}", fullName, e);
putMsg(result, Status.RESOURCE_NOT_EXIST);
throw new ServiceException(String.format(e.getMessage() + " Resource path: %s", fullName));
}
if (resource == null) {
logger.error("Resource does not exist, resource full name:{}.", fullName);
log.error("Resource does not exist, resource full name:{}.", fullName);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -1063,7 +1062,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (resource.getType() == (ResourceType.UDF)) {
List<UdfFunc> udfFuncs = udfFunctionMapper.listUdfByResourceFullName(allChildrenFullNameArray);
if (CollectionUtils.isNotEmpty(udfFuncs)) {
logger.warn("Resource can not be deleted because it is bound by UDF functions, udfFuncIds:{}",
log.warn("Resource can not be deleted because it is bound by UDF functions, udfFuncIds:{}",
udfFuncs);
putMsg(result, Status.UDF_RESOURCE_IS_BOUND, udfFuncs.get(0).getFuncName());
return result;
@ -1075,8 +1074,8 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
for (ResourcesTask resourcesTask : resourcesNeedToDeleteSet) {
int taskId = resourcesTask.getTaskId();
if (processService.isTaskOnline(taskDefinitionMapper.selectById(taskId).getCode())) {
logger.error("can't be deleted,because it is used of process definition that's online");
logger.error("resource task relation id:{} is used of task code {}", resourcesTask.getId(),
log.error("can't be deleted,because it is used of process definition that's online");
log.error("resource task relation id:{} is used of task code {}", resourcesTask.getId(),
taskDefinitionMapper.selectById(taskId).getCode());
putMsg(result, Status.RESOURCE_IS_USED);
return result;
@ -1227,7 +1226,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
Result<Object> result = new Result<>();
putMsg(result, Status.SUCCESS);
if (checkResourceExists(fullName)) {
logger.error("Resource with same name exists so can not create again, resourceType:{}, resourceName:{}.",
log.error("Resource with same name exists so can not create again, resourceType:{}, resourceName:{}.",
type, RegexUtils.escapeNRT(fullName));
putMsg(result, Status.RESOURCE_EXIST);
}
@ -1255,14 +1254,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1270,7 +1269,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -1284,7 +1283,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
file = storageOperate.getFileStatus(defaultPath + fileName, defaultPath, resTenantCode, type);
} catch (Exception e) {
logger.error(e.getMessage() + " Resource path: {}", defaultPath + fileName, e);
log.error(e.getMessage() + " Resource path: {}", defaultPath + fileName, e);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -1308,14 +1307,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1323,7 +1322,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -1337,7 +1336,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
file = storageOperate.getFileStatus(fullName, defaultPath, resTenantCode, type);
} catch (Exception e) {
logger.error(e.getMessage() + " Resource path: {}", fullName, e);
log.error(e.getMessage() + " Resource path: {}", fullName, e);
putMsg(result, Status.RESOURCE_NOT_EXIST);
throw new ServiceException(String.format(e.getMessage() + " Resource path: %s", fullName));
}
@ -1366,14 +1365,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1381,7 +1380,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -1392,7 +1391,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (StringUtils.isNotEmpty(resourceViewSuffixes)) {
List<String> strList = Arrays.asList(resourceViewSuffixes.split(","));
if (!strList.contains(nameSuffix)) {
logger.error("Resource suffix does not support view,resourceFullName:{}, suffix:{}.", fullName,
log.error("Resource suffix does not support view,resourceFullName:{}, suffix:{}.", fullName,
nameSuffix);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
@ -1404,13 +1403,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (storageOperate.exists(fullName)) {
content = storageOperate.vimFile(tenantCode, fullName, skipLineNum, limit);
} else {
logger.error("read file {} not exist in storage", fullName);
log.error("read file {} not exist in storage", fullName);
putMsg(result, Status.RESOURCE_FILE_NOT_EXIST, fullName);
return result;
}
} catch (Exception e) {
logger.error("Resource {} read failed", fullName, e);
log.error("Resource {} read failed", fullName, e);
putMsg(result, Status.HDFS_OPERATION_ERROR);
return result;
}
@ -1449,14 +1448,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1464,13 +1463,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, "")) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
if (FileUtils.directoryTraversal(fileName)) {
logger.warn("File name verify failed, fileName:{}.", RegexUtils.escapeNRT(fileName));
log.warn("File name verify failed, fileName:{}.", RegexUtils.escapeNRT(fileName));
putMsg(result, Status.VERIFY_PARAMETER_NAME_FAILED);
return result;
}
@ -1481,7 +1480,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (StringUtils.isNotEmpty(resourceViewSuffixes)) {
List<String> strList = Arrays.asList(resourceViewSuffixes.split(","));
if (!strList.contains(nameSuffix)) {
logger.warn("Resource suffix does not support view, suffix:{}.", nameSuffix);
log.warn("Resource suffix does not support view, suffix:{}.", nameSuffix);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
}
@ -1534,7 +1533,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
AuthorizationType authorizationType =
resourceType.equals(ResourceType.FILE) ? AuthorizationType.RESOURCE_FILE_ID
: AuthorizationType.UDF_FILE;
permissionPostHandle(authorizationType, loginUser.getId(), Collections.singletonList(resourceId), logger);
permissionPostHandle(authorizationType, loginUser.getId(), Collections.singletonList(resourceId), log);
}
private Result<Object> checkResourceUploadStartupState() {
@ -1542,7 +1541,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
putMsg(result, Status.SUCCESS);
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("Storage does not start up, resource upload startup state: {}.",
log.error("Storage does not start up, resource upload startup state: {}.",
PropertyUtils.getResUploadStartupState());
putMsg(result, Status.STORAGE_NOT_STARTUP);
return result;
@ -1564,12 +1563,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (pid != -1) {
Resource parentResource = resourcesMapper.selectById(pid);
if (parentResource == null) {
logger.error("Parent resource does not exist, parentResourceId:{}.", pid);
log.error("Parent resource does not exist, parentResourceId:{}.", pid);
putMsg(result, Status.PARENT_RESOURCE_NOT_EXIST);
return result;
}
if (!canOperator(loginUser, parentResource.getUserId())) {
logger.warn("User does not have operation privilege, loginUserName:{}.", loginUser.getUserName());
log.warn("User does not have operation privilege, loginUserName:{}.", loginUser.getUserName());
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
@ -1597,14 +1596,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1612,7 +1611,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String tenantCode = tenant.getTenantCode();
if (!isUserTenantValid(isAdmin(loginUser), tenantCode, resTenantCode)) {
logger.error("current user does not have permission");
log.error("current user does not have permission");
putMsg(result, Status.NO_CURRENT_OPERATING_PERMISSION);
return result;
}
@ -1621,13 +1620,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
try {
resource = storageOperate.getFileStatus(fullName, "", resTenantCode, ResourceType.FILE);
} catch (Exception e) {
logger.error("error occurred when fetching resource information , resource full name {}", fullName);
log.error("error occurred when fetching resource information , resource full name {}", fullName);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (resource == null) {
logger.error("Resource does not exist, resource full name:{}.", fullName);
log.error("Resource does not exist, resource full name:{}.", fullName);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -1638,7 +1637,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (StringUtils.isNotEmpty(resourceViewSuffixes)) {
List<String> strList = Arrays.asList(resourceViewSuffixes.split(","));
if (!strList.contains(nameSuffix)) {
logger.warn("Resource suffix does not support view, resource full name:{}, suffix:{}.",
log.warn("Resource suffix does not support view, resource full name:{}, suffix:{}.",
fullName, nameSuffix);
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW);
return result;
@ -1650,7 +1649,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (!result.getCode().equals(Status.SUCCESS.getCode())) {
throw new ServiceException(result.getMsg());
} else
logger.info("Update resource content complete, resource full name:{}.", fullName);
log.info("Update resource content complete, resource full name:{}.", fullName);
return result;
}
@ -1668,7 +1667,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (!FileUtils.writeContent2File(content, localFilename)) {
// write file fail
logger.error("Write file error, fileName:{}, content:{}.", localFilename,
log.error("Write file error, fileName:{}, content:{}.", localFilename,
RegexUtils.escapeNRT(content));
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
@ -1676,12 +1675,12 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
// get resource file path
String resourcePath = storageOperate.getResDir(tenantCode);
logger.info("resource path is {}, resource dir is {}", fullName, resourcePath);
log.info("resource path is {}, resource dir is {}", fullName, resourcePath);
if (!storageOperate.exists(resourcePath)) {
// create if tenant dir not exists
storageOperate.createTenantDirIfNotExists(tenantCode);
logger.info("Create tenant dir because path {} does not exist, tenantCode:{}.", resourcePath,
log.info("Create tenant dir because path {} does not exist, tenantCode:{}.", resourcePath,
tenantCode);
}
if (storageOperate.exists(fullName)) {
@ -1690,13 +1689,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
storageOperate.upload(tenantCode, localFilename, fullName, true, true);
} catch (Exception e) {
logger.error("Upload content to storage error, tenantCode:{}, destFileName:{}.", tenantCode, localFilename,
log.error("Upload content to storage error, tenantCode:{}, destFileName:{}.", tenantCode, localFilename,
e);
result.setCode(Status.HDFS_OPERATION_ERROR.getCode());
result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, fullName));
return result;
}
logger.info("Upload content to storage complete, tenantCode:{}, destFileName:{}.", tenantCode, localFilename);
log.info("Upload content to storage complete, tenantCode:{}, destFileName:{}.", tenantCode, localFilename);
putMsg(result, Status.SUCCESS);
return result;
}
@ -1711,20 +1710,20 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String fullName) throws IOException {
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.warn("Storage does not start up, resource upload startup state: {}.",
log.warn("Storage does not start up, resource upload startup state: {}.",
PropertyUtils.getResUploadStartupState());
throw new ServiceException("hdfs not startup");
}
if (fullName.endsWith("/")) {
logger.error("resource id {} is directory,can't download it", fullName);
log.error("resource id {} is directory,can't download it", fullName);
throw new ServiceException("can't download directory");
}
int userId = loginUser.getId();
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("User does not exits, userId:{}.", userId);
log.error("User does not exits, userId:{}.", userId);
throw new ServiceException(String.format("Resource owner id %d does not exist", userId));
}
@ -1733,7 +1732,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
if (user.getTenantId() != 0) {
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("Tenant id {} not exists", user.getTenantId());
log.error("Tenant id {} not exists", user.getTenantId());
throw new ServiceException(
String.format("The tenant id %d of resource owner not exist", user.getTenantId()));
}
@ -1743,13 +1742,13 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
String[] aliasArr = fullName.split("/");
String alias = aliasArr[aliasArr.length - 1];
String localFileName = FileUtils.getDownloadFilename(alias);
logger.info("Resource path is {}, download local filename is {}", alias, localFileName);
log.info("Resource path is {}, download local filename is {}", alias, localFileName);
try {
storageOperate.download(tenantCode, fullName, localFileName, false, true);
return org.apache.dolphinscheduler.api.utils.FileUtils.file2Resource(localFileName);
} catch (IOException e) {
logger.error("Download resource error, the path is {}, and local filename is {}, the error message is {}",
log.error("Download resource error, the path is {}, and local filename is {}, the error message is {}",
fullName, localFileName, e.getMessage());
throw new ServiceException("Download the resource file failed ,it may be related to your storage");
}
@ -1811,14 +1810,14 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
User user = userMapper.selectById(loginUser.getId());
if (user == null) {
logger.error("user {} not exists", loginUser.getId());
log.error("user {} not exists", loginUser.getId());
putMsg(result, Status.USER_NOT_EXIST, loginUser.getId());
return result;
}
Tenant tenant = tenantMapper.queryById(user.getTenantId());
if (tenant == null) {
logger.error("tenant not exists");
log.error("tenant not exists");
putMsg(result, Status.CURRENT_LOGIN_USER_TENANT_NOT_EXIST);
return result;
}
@ -1835,7 +1834,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
storageEntities = new ArrayList<>(
storageOperate.listFilesStatus(baseFolder, baseFolder, tenantCode, ResourceType.FILE));
} catch (Exception e) {
logger.error("delete data transfer data error", e);
log.error("delete data transfer data error", e);
putMsg(result, Status.DELETE_RESOURCE_ERROR);
return result;
}
@ -1851,7 +1850,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
storageOperate.delete(storageEntity.getFullName(), true);
successList.add(storageEntity.getFullName());
} catch (Exception ex) {
logger.error("delete data transfer data {} error, please delete it manually", date, ex);
log.error("delete data transfer data {} error, please delete it manually", date, ex);
failList.add(storageEntity.getFullName());
}
}
@ -1981,10 +1980,10 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
.collect(Collectors.toList());
Visitor visitor = new ResourceTreeVisitor(transformedResourceList);
String visit = JSONUtils.toJsonString(visitor.visit(""), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS);
logger.info(visit);
log.info(visit);
String jsonTreeStr =
JSONUtils.toJsonString(visitor.visit("").getChildren(), SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS);
logger.info(jsonTreeStr);
log.info(jsonTreeStr);
result.put(Constants.DATA_LIST, visitor.visit("").getChildren());
putMsg(result, Status.SUCCESS);
return result;
@ -2046,7 +2045,7 @@ public class ResourcesServiceImpl extends BaseServiceImpl implements ResourcesSe
*/
private List<Resource> queryAuthoredResourceList(User loginUser, ResourceType type) {
Set<Integer> resourceIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(checkResourceType(type), loginUser.getId(), log);
if (resourceIds.isEmpty()) {
return Collections.emptyList();
}

64
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java

@ -73,10 +73,9 @@ import java.util.TimeZone;
import java.util.stream.Collectors;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -86,10 +85,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.cronutils.model.Cron;
@Service
@Slf4j
public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService {
private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class);
@Autowired
private ProjectService projectService;
@ -172,12 +170,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end or time can not be null.");
log.warn("The start time must not be the same as the end or time can not be null.");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) {
logger.warn("The start time must smaller than end time");
log.warn("The start time must smaller than end time");
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR);
return result;
}
@ -185,7 +183,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
scheduleObj.setStartTime(scheduleParam.getStartTime());
scheduleObj.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
logger.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab());
log.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab());
return result;
}
@ -213,7 +211,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
// return scheduler object with ID
result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId()));
putMsg(result, Status.SUCCESS);
logger.info("Schedule create complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.",
log.info("Schedule create complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.",
projectCode, processDefineCode, scheduleObj.getId());
result.put("scheduleId", scheduleObj.getId());
return result;
@ -334,14 +332,14 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
Schedule schedule = scheduleMapper.selectById(id);
if (schedule == null) {
logger.error("Schedule does not exist, scheduleId:{}.", id);
log.error("Schedule does not exist, scheduleId:{}.", id);
putMsg(result, Status.SCHEDULE_NOT_EXISTS, id);
return result;
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(schedule.getProcessDefinitionCode());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.",
log.error("Process definition does not exist, processDefinitionCode:{}.",
schedule.getProcessDefinitionCode());
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(schedule.getProcessDefinitionCode()));
return result;
@ -438,19 +436,19 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
Schedule scheduleObj = scheduleMapper.selectById(id);
if (scheduleObj == null) {
logger.error("Schedule does not exist, scheduleId:{}.", id);
log.error("Schedule does not exist, scheduleId:{}.", id);
throw new ServiceException(Status.SCHEDULE_CRON_NOT_EXISTS, id);
}
// check schedule release state
if (scheduleObj.getReleaseState() == scheduleStatus) {
logger.warn("Schedule state does not need to change due to schedule state is already {}, scheduleId:{}.",
log.warn("Schedule state does not need to change due to schedule state is already {}, scheduleId:{}.",
scheduleObj.getReleaseState().getDescp(), scheduleObj.getId());
throw new ServiceException(Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus);
}
ProcessDefinition processDefinition =
processDefinitionMapper.queryByCode(scheduleObj.getProcessDefinitionCode());
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.",
log.error("Process definition does not exist, processDefinitionCode:{}.",
scheduleObj.getProcessDefinitionCode());
throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST,
String.valueOf(scheduleObj.getProcessDefinitionCode()));
@ -458,14 +456,14 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
List<ProcessTaskRelation> processTaskRelations =
processTaskRelationMapper.queryByProcessCode(projectCode, scheduleObj.getProcessDefinitionCode());
if (processTaskRelations.isEmpty()) {
logger.error("Process task relations do not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
log.error("Process task relations do not exist, projectCode:{}, processDefinitionCode:{}.", projectCode,
processDefinition.getCode());
throw new ServiceException(Status.PROCESS_DAG_IS_EMPTY);
}
if (scheduleStatus == ReleaseState.ONLINE) {
// check process definition release state
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.warn("Only process definition state is {} can change schedule state, processDefinitionCode:{}.",
log.warn("Only process definition state is {} can change schedule state, processDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), processDefinition.getCode());
throw new ServiceException(Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName());
}
@ -473,7 +471,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
List<Long> subProcessDefineCodes = new ArrayList<>();
processService.recurseFindSubProcess(processDefinition.getCode(), subProcessDefineCodes);
if (!subProcessDefineCodes.isEmpty()) {
logger.info(
log.info(
"Need to check sub process definition state before change schedule state, subProcessDefineCodes:{}.",
org.apache.commons.lang.StringUtils.join(subProcessDefineCodes, ","));
List<ProcessDefinition> subProcessDefinitionList =
@ -484,7 +482,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
* if there is no online process, exit directly
*/
if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) {
logger.warn(
log.warn(
"Only sub process definition state is {} can change schedule state, subProcessDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), subProcessDefinition.getCode());
throw new ServiceException(Status.PROCESS_DEFINE_NOT_RELEASE,
@ -499,7 +497,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
List<Server> masterServers = monitorService.getServerListFromRegistry(true);
if (masterServers.isEmpty()) {
logger.error("Master does not exist.");
log.error("Master does not exist.");
throw new ServiceException(Status.MASTER_NOT_EXISTS);
}
@ -511,12 +509,12 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
try {
switch (scheduleStatus) {
case ONLINE:
logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}",
log.info("Call master client set schedule online, project id: {}, flow id: {},host: {}",
project.getId(), processDefinition.getId(), masterServers);
setSchedule(project.getId(), scheduleObj);
break;
case OFFLINE:
logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}",
log.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}",
project.getId(), processDefinition.getId(), masterServers);
deleteSchedule(project.getId(), id);
break;
@ -524,7 +522,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
throw new ServiceException(Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString());
}
} catch (Exception e) {
logger.error("Set schedule state to {} error, projectCode:{}, scheduleId:{}.", scheduleStatus.getDescp(),
log.error("Set schedule state to {} error, projectCode:{}, scheduleId:{}.", scheduleStatus.getDescp(),
projectCode, scheduleObj.getId());
Status status = scheduleStatus == ReleaseState.ONLINE ? Status.PUBLISH_SCHEDULE_ONLINE_ERROR
: Status.OFFLINE_SCHEDULE_ERROR;
@ -558,7 +556,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefineCode);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefineCode);
log.error("Process definition does not exist, processDefinitionCode:{}.", processDefineCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefineCode));
return result;
}
@ -645,7 +643,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
}
public void setSchedule(int projectId, Schedule schedule) {
logger.info("Set schedule state {}, project id: {}, scheduleId: {}", schedule.getReleaseState().getDescp(),
log.info("Set schedule state {}, project id: {}, scheduleId: {}", schedule.getReleaseState().getDescp(),
projectId, schedule.getId());
schedulerApi.insertOrUpdateScheduleTask(projectId, schedule);
}
@ -659,7 +657,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
*/
@Override
public void deleteSchedule(int projectId, int scheduleId) {
logger.info("Delete schedule of project, projectId:{}, scheduleId:{}", projectId, scheduleId);
log.info("Delete schedule of project, projectId:{}, scheduleId:{}", projectId, scheduleId);
schedulerApi.deleteScheduleTask(projectId, scheduleId);
}
@ -731,7 +729,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
try {
cron = CronUtils.parse2Cron(scheduleParam.getCrontab());
} catch (CronParseException e) {
logger.error("Parse cron to cron expression error, crontab:{}.", scheduleParam.getCrontab(), e);
log.error("Parse cron to cron expression error, crontab:{}.", scheduleParam.getCrontab(), e);
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return result;
}
@ -778,7 +776,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
// check schedule exists
Schedule schedule = scheduleMapper.queryByProcessDefinitionCode(processDefinitionCode);
if (schedule == null) {
logger.error("Schedule of process definition does not exist, processDefinitionCode:{}.",
log.error("Schedule of process definition does not exist, processDefinitionCode:{}.",
processDefinitionCode);
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, processDefinitionCode);
return result;
@ -786,7 +784,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode));
return result;
}
@ -802,7 +800,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
long environmentCode) {
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE,
Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
logger.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.",
log.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.",
ReleaseState.ONLINE.getDescp(), schedule.getId());
return;
}
@ -813,17 +811,17 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
if (!StringUtils.isEmpty(scheduleExpression)) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class);
if (scheduleParam == null) {
logger.warn("Parameter scheduleExpression is invalid, so parse cron error.");
log.warn("Parameter scheduleExpression is invalid, so parse cron error.");
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return;
}
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
logger.warn("The start time must not be the same as the end or time can not be null.");
log.warn("The start time must not be the same as the end or time can not be null.");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return;
}
if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) {
logger.warn("The start time must smaller than end time");
log.warn("The start time must smaller than end time");
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR);
return;
}
@ -831,7 +829,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
schedule.setStartTime(scheduleParam.getStartTime());
schedule.setEndTime(scheduleParam.getEndTime());
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) {
logger.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab());
log.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab());
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
return;
}
@ -859,7 +857,7 @@ public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerSe
processDefinitionMapper.updateById(processDefinition);
logger.info("Schedule update complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.",
log.info("Schedule update complete, projectCode:{}, processDefinitionCode:{}, scheduleId:{}.",
processDefinition.getProjectCode(), processDefinition.getCode(), schedule.getId());
putMsg(result, Status.SUCCESS);
}

11
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SessionServiceImpl.java

@ -34,8 +34,8 @@ import java.util.UUID;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -45,10 +45,9 @@ import org.springframework.web.util.WebUtils;
* session service implement
*/
@Service
@Slf4j
public class SessionServiceImpl extends BaseServiceImpl implements SessionService {
private static final Logger logger = LoggerFactory.getLogger(SessionService.class);
@Autowired
private SessionMapper sessionMapper;
@ -75,7 +74,7 @@ public class SessionServiceImpl extends BaseServiceImpl implements SessionServic
}
String ip = BaseController.getClientIpAddress(request);
logger.debug("Get session: {}, ip: {}.", sessionId, ip);
log.debug("Get session: {}, ip: {}.", sessionId, ip);
return sessionMapper.selectById(sessionId);
}
@ -156,7 +155,7 @@ public class SessionServiceImpl extends BaseServiceImpl implements SessionServic
// delete session
sessionMapper.deleteById(session.getId());
} catch (Exception e) {
logger.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip, e);
log.warn("userId : {} , ip : {} , find more one session", loginUser.getId(), ip, e);
}
}

109
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskDefinitionServiceImpl.java

@ -86,8 +86,8 @@ import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -100,10 +100,9 @@ import com.google.common.collect.Lists;
* task definition service impl
*/
@Service
@Slf4j
public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(TaskDefinitionServiceImpl.class);
private static final String RELEASESTATE = "releaseState";
@Autowired
@ -165,7 +164,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
List<TaskDefinitionLog> taskDefinitionLogs = JSONUtils.toList(taskDefinitionJson, TaskDefinitionLog.class);
if (CollectionUtils.isEmpty(taskDefinitionLogs)) {
logger.warn("Parameter taskDefinitionJson is invalid.");
log.warn("Parameter taskDefinitionJson is invalid.");
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJson);
return result;
}
@ -175,14 +174,14 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
.taskParams(taskDefinitionLog.getTaskParams())
.dependence(taskDefinitionLog.getDependence())
.build())) {
logger.warn("Task definition {} parameters are invalid.", taskDefinitionLog.getName());
log.warn("Task definition {} parameters are invalid.", taskDefinitionLog.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionLog.getName());
return result;
}
}
int saveTaskResult = processService.saveTaskDefine(loginUser, projectCode, taskDefinitionLogs, Boolean.TRUE);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
logger.error("Create task definition error, projectCode:{}.", projectCode);
log.error("Create task definition error, projectCode:{}.", projectCode);
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
}
@ -309,19 +308,19 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
}
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null || projectCode != processDefinition.getProjectCode()) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, String.valueOf(processDefinitionCode));
return result;
}
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
logger.warn("Task definition can not be created due to process definition is {}, processDefinitionCode:{}.",
log.warn("Task definition can not be created due to process definition is {}, processDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), processDefinition.getCode());
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, String.valueOf(processDefinitionCode));
return result;
}
TaskDefinitionLog taskDefinition = JSONUtils.parseObject(taskDefinitionJsonObj, TaskDefinitionLog.class);
if (taskDefinition == null) {
logger.warn("Parameter taskDefinitionJsonObj is invalid json.");
log.warn("Parameter taskDefinitionJsonObj is invalid json.");
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj);
return result;
}
@ -330,7 +329,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
.taskParams(taskDefinition.getTaskParams())
.dependence(taskDefinition.getDependence())
.build())) {
logger.error("Task definition {} parameters are invalid", taskDefinition.getName());
log.error("Task definition {} parameters are invalid", taskDefinition.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinition.getName());
return result;
}
@ -355,7 +354,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
.collect(Collectors.toSet());
if (CollectionUtils.isNotEmpty(diffCode)) {
String taskCodes = StringUtils.join(diffCode, Constants.COMMA);
logger.error("Some task definitions with parameter upstreamCodes do not exist, taskDefinitionCodes:{}.",
log.error("Some task definitions with parameter upstreamCodes do not exist, taskDefinitionCodes:{}.",
taskCodes);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskCodes);
return result;
@ -384,25 +383,25 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
processDefinition.getVersion(),
processTaskRelationLogList, Lists.newArrayList(), Boolean.TRUE);
if (insertResult != Constants.EXIT_CODE_SUCCESS) {
logger.error(
log.error(
"Save new version process task relations error, processDefinitionCode:{}, processDefinitionVersion:{}.",
processDefinition.getCode(), processDefinition.getVersion());
putMsg(result, Status.CREATE_PROCESS_TASK_RELATION_ERROR);
throw new ServiceException(Status.CREATE_PROCESS_TASK_RELATION_ERROR);
} else
logger.info(
log.info(
"Save new version process task relations complete, processDefinitionCode:{}, processDefinitionVersion:{}.",
processDefinition.getCode(), processDefinition.getVersion());
int saveTaskResult =
processService.saveTaskDefine(loginUser, projectCode, Lists.newArrayList(taskDefinition), Boolean.TRUE);
if (saveTaskResult == Constants.DEFINITION_FAILURE) {
logger.error("Save task definition error, projectCode:{}, taskDefinitionCode:{}.", projectCode,
log.error("Save task definition error, projectCode:{}, taskDefinitionCode:{}.", projectCode,
taskDefinition.getCode());
putMsg(result, Status.CREATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.CREATE_TASK_DEFINITION_ERROR);
} else
logger.info("Save task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode,
log.info("Save task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode,
taskDefinition.getCode());
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, taskDefinition);
@ -430,7 +429,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
TaskDefinition taskDefinition = taskDefinitionMapper.queryByName(project.getCode(), processCode, taskName);
if (taskDefinition == null) {
logger.error("Task definition does not exist, taskName:{}.", taskName);
log.error("Task definition does not exist, taskName:{}.", taskName);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, taskName);
} else {
result.put(Constants.DATA_LIST, taskDefinition);
@ -498,7 +497,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
List<ProcessTaskRelation> taskRelationList =
processTaskRelationMapper.queryUpstreamByCode(taskDefinition.getProjectCode(), taskCode);
if (CollectionUtils.isNotEmpty(taskRelationList)) {
logger.debug(
log.debug(
"Task definition has upstream tasks, start handle them after delete task, taskDefinitionCode:{}.",
taskCode);
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
@ -515,16 +514,16 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
List<TaskDefinitionLog> taskDefinitionLogs) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
logger.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
log.error("Process definition does not exist, processDefinitionCode:{}.", processDefinitionCode);
throw new ServiceException(Status.PROCESS_DEFINE_NOT_EXIST);
}
int insertVersion = processService.saveProcessDefine(loginUser, processDefinition, Boolean.TRUE, Boolean.TRUE);
if (insertVersion <= 0) {
logger.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.",
log.error("Update process definition error, projectCode:{}, processDefinitionCode:{}.",
processDefinition.getProjectCode(), processDefinitionCode);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
} else
logger.info(
log.info(
"Save new version process definition complete, projectCode:{}, processDefinitionCode:{}, newVersion:{}.",
processDefinition.getProjectCode(), processDefinitionCode, insertVersion);
List<ProcessTaskRelationLog> relationLogs =
@ -533,11 +532,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
processDefinition.getCode(),
insertVersion, relationLogs, taskDefinitionLogs, Boolean.TRUE);
if (insertResult == Constants.EXIT_CODE_SUCCESS) {
logger.info(
log.info(
"Save new version task relations complete, projectCode:{}, processDefinitionCode:{}, newVersion:{}.",
processDefinition.getProjectCode(), processDefinitionCode, insertVersion);
} else {
logger.error("Update task relations error, projectCode:{}, processDefinitionCode:{}.",
log.error("Update task relations error, projectCode:{}, processDefinitionCode:{}.",
processDefinition.getProjectCode(), processDefinitionCode);
throw new ServiceException(Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
@ -564,7 +563,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
List<ProcessTaskRelation> taskRelationList =
processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (CollectionUtils.isNotEmpty(taskRelationList)) {
logger.info(
log.info(
"Task definition has upstream tasks, start handle them after update task, taskDefinitionCode:{}.",
taskCode);
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
@ -573,7 +572,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
updateDag(loginUser, processDefinitionCode, processTaskRelations,
Lists.newArrayList(taskDefinitionToUpdate));
}
logger.info("Update task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode);
log.info("Update task definition complete, projectCode:{}, taskDefinitionCode:{}.", projectCode, taskCode);
result.put(Constants.DATA_LIST, taskCode);
putMsg(result, Status.SUCCESS);
return result;
@ -639,7 +638,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
processTaskRelationMapper.queryUpstreamByCode(taskDefinitionUpdate.getProjectCode(), taskCode);
if (CollectionUtils.isNotEmpty(taskRelationList)) {
logger.info(
log.info(
"Task definition has upstream tasks, start handle them after update task, taskDefinitionCode:{}.",
taskCode);
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
@ -713,14 +712,14 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null) {
logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode));
return null;
}
if (processService.isTaskOnline(taskCode) && taskDefinition.getFlag() == Flag.YES) {
// if stream, can update task definition without online check
if (taskDefinition.getTaskExecuteType() != TaskExecuteType.STREAM) {
logger.warn("Only {} type task can be updated without online check, taskDefinitionCode:{}.",
log.warn("Only {} type task can be updated without online check, taskDefinitionCode:{}.",
TaskExecuteType.STREAM, taskCode);
putMsg(result, Status.NOT_SUPPORT_UPDATE_TASK_DEFINITION);
return null;
@ -732,12 +731,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
taskDefinition.setTimeoutNotifyStrategy(null);
}
if (taskDefinition.equals(taskDefinitionToUpdate)) {
logger.warn("Task definition does not need update because no change, taskDefinitionCode:{}.", taskCode);
log.warn("Task definition does not need update because no change, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.TASK_DEFINITION_NOT_MODIFY_ERROR, String.valueOf(taskCode));
return null;
}
if (taskDefinitionToUpdate == null) {
logger.warn("Parameter taskDefinitionJson is invalid.");
log.warn("Parameter taskDefinitionJson is invalid.");
putMsg(result, Status.DATA_IS_NOT_VALID, taskDefinitionJsonObj);
return null;
}
@ -746,14 +745,14 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
.taskParams(taskDefinitionToUpdate.getTaskParams())
.dependence(taskDefinitionToUpdate.getDependence())
.build())) {
logger.warn("Task definition parameters are invalid, taskDefinitionName:{}.",
log.warn("Task definition parameters are invalid, taskDefinitionName:{}.",
taskDefinitionToUpdate.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskDefinitionToUpdate.getName());
return null;
}
Integer version = taskDefinitionLogMapper.queryMaxVersionForDefinition(taskCode);
if (version == null || version == 0) {
logger.error("Max version task definitionLog can not be found in database, taskDefinitionCode:{}.",
log.error("Max version task definitionLog can not be found in database, taskDefinitionCode:{}.",
taskCode);
putMsg(result, Status.DATA_IS_NOT_VALID, taskCode);
return null;
@ -774,12 +773,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
taskDefinitionToUpdate.setId(null);
int insert = taskDefinitionLogMapper.insert(taskDefinitionToUpdate);
if ((update & insert) != 1) {
logger.error("Update task definition or definitionLog error, projectCode:{}, taskDefinitionCode:{}.",
log.error("Update task definition or definitionLog error, projectCode:{}, taskDefinitionCode:{}.",
projectCode, taskCode);
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
} else
logger.info(
log.info(
"Update task definition and definitionLog complete, projectCode:{}, taskDefinitionCode:{}, newTaskVersion:{}.",
projectCode, taskCode, taskDefinitionToUpdate.getVersion());
// update process task relation
@ -794,7 +793,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
}
int count = processTaskRelationMapper.updateProcessTaskRelationTaskVersion(processTaskRelation);
if (count != 1) {
logger.error("batch update process task relation error, projectCode:{}, taskDefinitionCode:{}.",
log.error("batch update process task relation error, projectCode:{}, taskDefinitionCode:{}.",
projectCode, taskCode);
putMsg(result, Status.PROCESS_TASK_RELATION_BATCH_UPDATE_ERROR);
throw new ServiceException(Status.PROCESS_TASK_RELATION_BATCH_UPDATE_ERROR);
@ -842,7 +841,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
upstreamTaskCodes.removeAll(queryUpStreamTaskCodeMap.keySet());
if (CollectionUtils.isNotEmpty(upstreamTaskCodes)) {
String notExistTaskCodes = StringUtils.join(upstreamTaskCodes, Constants.COMMA);
logger.error("Some task definitions in parameter upstreamTaskCodes do not exist, notExistTaskCodes:{}.",
log.error("Some task definitions in parameter upstreamTaskCodes do not exist, notExistTaskCodes:{}.",
notExistTaskCodes);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, notExistTaskCodes);
return result;
@ -883,7 +882,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
processTaskRelationList.add(processTaskRelationList.get(0));
}
}
logger.info(
log.info(
"Update task with upstream tasks complete, projectCode:{}, taskDefinitionCode:{}, upstreamTaskCodes:{}.",
projectCode, taskCode, upstreamTaskCodes);
result.put(Constants.DATA_LIST, taskCode);
@ -996,7 +995,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
return result;
}
if (processService.isTaskOnline(taskCode)) {
logger.warn(
log.warn(
"Task definition version can not be switched due to process definition is {}, taskDefinitionCode:{}.",
ReleaseState.ONLINE.getDescp(), taskCode);
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE);
@ -1004,7 +1003,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
}
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) {
logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode));
return result;
}
@ -1018,7 +1017,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
List<ProcessTaskRelation> taskRelationList =
processTaskRelationMapper.queryUpstreamByCode(projectCode, taskCode);
if (CollectionUtils.isNotEmpty(taskRelationList)) {
logger.info(
log.info(
"Task definition has upstream tasks, start handle them after switch task, taskDefinitionCode:{}.",
taskCode);
long processDefinitionCode = taskRelationList.get(0).getProcessDefinitionCode();
@ -1027,13 +1026,13 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
updateDag(loginUser, processDefinitionCode, processTaskRelations,
Lists.newArrayList(taskDefinitionUpdate));
} else {
logger.info(
log.info(
"Task definition version switch complete, switch task version to {}, taskDefinitionCode:{}.",
version, taskCode);
putMsg(result, Status.SUCCESS);
}
} else {
logger.error("Task definition version switch error, taskDefinitionCode:{}.", taskCode);
log.error("Task definition version switch error, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.SWITCH_TASK_DEFINITION_VERSION_ERROR);
}
return result;
@ -1081,11 +1080,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null) {
logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode));
} else {
if (taskDefinition.getVersion() == version) {
logger.warn(
log.warn(
"Task definition can not be deleted due to version is being used, projectCode:{}, taskDefinitionCode:{}, version:{}.",
projectCode, taskCode, version);
putMsg(result, Status.MAIN_TABLE_USING_VERSION);
@ -1093,12 +1092,12 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
}
int delete = taskDefinitionLogMapper.deleteByCodeAndVersion(taskCode, version);
if (delete > 0) {
logger.info(
log.info(
"Task definition version delete complete, projectCode:{}, taskDefinitionCode:{}, version:{}.",
projectCode, taskCode, version);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Task definition version delete error, projectCode:{}, taskDefinitionCode:{}, version:{}.",
log.error("Task definition version delete error, projectCode:{}, taskDefinitionCode:{}, version:{}.",
projectCode, taskCode, version);
putMsg(result, Status.DELETE_TASK_DEFINITION_VERSION_ERROR);
}
@ -1118,7 +1117,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(taskCode);
if (taskDefinition == null || projectCode != taskDefinition.getProjectCode()) {
logger.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
log.error("Task definition does not exist, taskDefinitionCode:{}.", taskCode);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(taskCode));
} else {
List<ProcessTaskRelation> taskRelationList = processTaskRelationMapper
@ -1217,7 +1216,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
public Map<String, Object> genTaskCodeList(Integer genNum) {
Map<String, Object> result = new HashMap<>();
if (genNum == null || genNum < 1 || genNum > 100) {
logger.warn("Parameter genNum must be great than 1 and less than 100.");
log.warn("Parameter genNum must be great than 1 and less than 100.");
putMsg(result, Status.DATA_IS_NOT_VALID, genNum);
return result;
}
@ -1227,7 +1226,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
taskCodes.add(CodeGenerateUtils.getInstance().genCode());
}
} catch (CodeGenerateException e) {
logger.error("Generate task definition code error.", e);
log.error("Generate task definition code error.", e);
putMsg(result, Status.INTERNAL_SERVER_ERROR_ARGS, "Error generating task definition code");
}
putMsg(result, Status.SUCCESS);
@ -1268,7 +1267,7 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
TaskDefinitionLog taskDefinitionLog =
taskDefinitionLogMapper.queryByDefinitionCodeAndVersion(code, taskDefinition.getVersion());
if (taskDefinitionLog == null) {
logger.error("Task definition does not exist, taskDefinitionCode:{}.", code);
log.error("Task definition does not exist, taskDefinitionCode:{}.", code);
putMsg(result, Status.TASK_DEFINE_NOT_EXIST, String.valueOf(code));
return result;
}
@ -1283,11 +1282,11 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
Integer[] resourceIdArray =
Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck(AuthorizationType.RESOURCE_FILE_ID,
processService, resourceIdArray, loginUser.getId(), logger);
processService, resourceIdArray, loginUser.getId(), log);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error("Resources permission check error, resourceIds:{}.", resourceIds, e);
log.error("Resources permission check error, resourceIds:{}.", resourceIds, e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION);
return result;
}
@ -1296,18 +1295,18 @@ public class TaskDefinitionServiceImpl extends BaseServiceImpl implements TaskDe
taskDefinitionLog.setFlag(Flag.YES);
break;
default:
logger.warn("Parameter releaseState is invalid.");
log.warn("Parameter releaseState is invalid.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
int update = taskDefinitionMapper.updateById(taskDefinition);
int updateLog = taskDefinitionLogMapper.updateById(taskDefinitionLog);
if ((update == 0 && updateLog == 1) || (update == 1 && updateLog == 0)) {
logger.error("Update taskDefinition state or taskDefinitionLog state error, taskDefinitionCode:{}.", code);
log.error("Update taskDefinition state or taskDefinitionLog state error, taskDefinitionCode:{}.", code);
putMsg(result, Status.UPDATE_TASK_DEFINITION_ERROR);
throw new ServiceException(Status.UPDATE_TASK_DEFINITION_ERROR);
}
logger.error("Update taskDefinition state or taskDefinitionLog state to complete, taskDefinitionCode:{}.",
log.error("Update taskDefinition state or taskDefinitionLog state to complete, taskDefinitionCode:{}.",
code);
putMsg(result, Status.SUCCESS);
return result;

9
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupQueueServiceImpl.java

@ -35,8 +35,8 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -47,6 +47,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* task group queue service
*/
@Service
@Slf4j
public class TaskGroupQueueServiceImpl extends BaseServiceImpl implements TaskGroupQueueService {
@Autowired
@ -55,8 +56,6 @@ public class TaskGroupQueueServiceImpl extends BaseServiceImpl implements TaskGr
@Autowired
private ProjectMapper projectMapper;
private static final Logger logger = LoggerFactory.getLogger(TaskGroupQueueServiceImpl.class);
/**
* query tasks in task group queue by group id
*
@ -73,7 +72,7 @@ public class TaskGroupQueueServiceImpl extends BaseServiceImpl implements TaskGr
Page<TaskGroupQueue> page = new Page<>(pageNo, pageSize);
PageInfo<TaskGroupQueue> pageInfo = new PageInfo<>(pageNo, pageSize);
Set<Integer> projectIds = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.PROJECTS, loginUser.getId(), log);
if (projectIds.isEmpty()) {
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);

53
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskGroupServiceImpl.java

@ -42,8 +42,8 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -56,6 +56,7 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* task Group Service
*/
@Service
@Slf4j
public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupService {
@Autowired
@ -67,8 +68,6 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
@Autowired
private ExecutorService executorService;
private static final Logger logger = LoggerFactory.getLogger(TaskGroupServiceImpl.class);
/**
* create a Task group
*
@ -90,23 +89,23 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
return result;
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
if (name == null) {
logger.warn("Parameter name can ot be null.");
log.warn("Parameter name can ot be null.");
putMsg(result, Status.NAME_NULL);
return result;
}
if (groupSize <= 0) {
logger.warn("Parameter task group size is must bigger than 1.");
log.warn("Parameter task group size is must bigger than 1.");
putMsg(result, Status.TASK_GROUP_SIZE_ERROR);
return result;
}
TaskGroup taskGroup1 = taskGroupMapper.queryByName(loginUser.getId(), name);
if (taskGroup1 != null) {
logger.warn("Task group with the same name already exists, taskGroupName:{}.", taskGroup1.getName());
log.warn("Task group with the same name already exists, taskGroupName:{}.", taskGroup1.getName());
putMsg(result, Status.TASK_GROUP_NAME_EXSIT);
return result;
}
@ -124,11 +123,11 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
if (taskGroupMapper.insert(taskGroup) > 0) {
permissionPostHandle(AuthorizationType.TASK_GROUP, loginUser.getId(),
Collections.singletonList(taskGroup.getId()), logger);
logger.info("Create task group complete, taskGroupName:{}.", taskGroup.getName());
Collections.singletonList(taskGroup.getId()), log);
log.info("Create task group complete, taskGroupName:{}.", taskGroup.getName());
putMsg(result, Status.SUCCESS);
} else {
logger.error("Create task group error, taskGroupName:{}.", taskGroup.getName());
log.error("Create task group error, taskGroupName:{}.", taskGroup.getName());
putMsg(result, Status.CREATE_TASK_GROUP_ERROR);
return result;
}
@ -155,17 +154,17 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
return result;
}
if (checkDescriptionLength(description)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
if (name == null) {
logger.warn("Parameter name can ot be null.");
log.warn("Parameter name can ot be null.");
putMsg(result, Status.NAME_NULL);
return result;
}
if (groupSize <= 0) {
logger.warn("Parameter task group size is must bigger than 1.");
log.warn("Parameter task group size is must bigger than 1.");
putMsg(result, Status.TASK_GROUP_SIZE_ERROR);
return result;
}
@ -175,13 +174,13 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
.ne(TaskGroup::getId, id));
if (exists > 0) {
logger.error("Task group with the same name already exists.");
log.error("Task group with the same name already exists.");
putMsg(result, Status.TASK_GROUP_NAME_EXSIT);
return result;
}
TaskGroup taskGroup = taskGroupMapper.selectById(id);
if (taskGroup.getStatus() != Flag.YES.getCode()) {
logger.warn("Task group has been closed, taskGroupId:{}.", id);
log.warn("Task group has been closed, taskGroupId:{}.", id);
putMsg(result, Status.TASK_GROUP_STATUS_ERROR);
return result;
}
@ -193,10 +192,10 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
}
int i = taskGroupMapper.updateById(taskGroup);
if (i > 0) {
logger.info("Update task group complete, taskGroupId:{}.", id);
log.info("Update task group complete, taskGroupId:{}.", id);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Update task group error, taskGroupId:{}.", id);
log.error("Update task group error, taskGroupId:{}.", id);
putMsg(result, Status.UPDATE_TASK_GROUP_ERROR);
}
return result;
@ -256,7 +255,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
Page<TaskGroup> page = new Page<>(pageNo, pageSize);
PageInfo<TaskGroup> emptyPageInfo = new PageInfo<>(pageNo, pageSize);
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TASK_GROUP,
loginUser.getId(), logger);
loginUser.getId(), log);
if (ids.isEmpty()) {
result.put(Constants.DATA_LIST, emptyPageInfo);
putMsg(result, Status.SUCCESS);
@ -314,7 +313,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
Page<TaskGroup> page = new Page<>(pageNo, pageSize);
PageInfo<TaskGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TASK_GROUP,
userId, logger);
userId, log);
if (ids.isEmpty()) {
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
@ -345,16 +344,16 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
}
TaskGroup taskGroup = taskGroupMapper.selectById(id);
if (taskGroup.getStatus() == Flag.NO.getCode()) {
logger.info("Task group has been closed, taskGroupId:{}.", id);
log.info("Task group has been closed, taskGroupId:{}.", id);
putMsg(result, Status.TASK_GROUP_STATUS_CLOSED);
return result;
}
taskGroup.setStatus(Flag.NO.getCode());
int update = taskGroupMapper.updateById(taskGroup);
if (update > 0)
logger.info("Task group close complete, taskGroupId:{}.", id);
log.info("Task group close complete, taskGroupId:{}.", id);
else
logger.error("Task group close error, taskGroupId:{}.", id);
log.error("Task group close error, taskGroupId:{}.", id);
putMsg(result, Status.SUCCESS);
return result;
}
@ -378,7 +377,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
}
TaskGroup taskGroup = taskGroupMapper.selectById(id);
if (taskGroup.getStatus() == Flag.YES.getCode()) {
logger.info("Task group has been started, taskGroupId:{}.", id);
log.info("Task group has been started, taskGroupId:{}.", id);
putMsg(result, Status.TASK_GROUP_STATUS_OPENED);
return result;
}
@ -386,9 +385,9 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
taskGroup.setUpdateTime(new Date(System.currentTimeMillis()));
int update = taskGroupMapper.updateById(taskGroup);
if (update > 0)
logger.info("Task group start complete, taskGroupId:{}.", id);
log.info("Task group start complete, taskGroupId:{}.", id);
else
logger.error("Task group start error, taskGroupId:{}.", id);
log.error("Task group start error, taskGroupId:{}.", id);
putMsg(result, Status.SUCCESS);
return result;
}
@ -423,7 +422,7 @@ public class TaskGroupServiceImpl extends BaseServiceImpl implements TaskGroupSe
return result;
}
taskGroupQueueService.modifyPriority(queueId, priority);
logger.info("Modify task group queue priority complete, queueId:{}, priority:{}.", queueId, priority);
log.info("Modify task group queue priority complete, queueId:{}, priority:{}.", queueId, priority);
putMsg(result, Status.SUCCESS);
return result;
}

25
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TaskInstanceServiceImpl.java

@ -61,8 +61,8 @@ import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -74,10 +74,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* task instance service impl
*/
@Service
@Slf4j
public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInstanceService {
private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceImpl.class);
@Autowired
ProjectMapper projectMapper;
@ -232,7 +231,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
// check whether the task instance can be found
TaskInstance task = taskInstanceMapper.selectById(taskInstanceId);
if (task == null) {
logger.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
log.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstanceId);
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND);
return result;
@ -240,7 +239,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
TaskDefinition taskDefinition = taskDefinitionMapper.queryByCode(task.getTaskCode());
if (taskDefinition != null && projectCode != taskDefinition.getProjectCode()) {
logger.error("Task definition can not be found, projectCode:{}, taskDefinitionCode:{}.", projectCode,
log.error("Task definition can not be found, projectCode:{}, taskDefinitionCode:{}.", projectCode,
task.getTaskCode());
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND, taskInstanceId);
return result;
@ -248,7 +247,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
// check whether the task instance state type is failure or cancel
if (!task.getState().isFailure() && !task.getState().isKill()) {
logger.warn("{} type task instance can not perform force success, projectCode:{}, taskInstanceId:{}.",
log.warn("{} type task instance can not perform force success, projectCode:{}, taskInstanceId:{}.",
task.getState().getDesc(), projectCode, taskInstanceId);
putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString());
return result;
@ -259,11 +258,11 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
int changedNum = taskInstanceMapper.updateById(task);
if (changedNum > 0) {
processService.forceProcessInstanceSuccessByTaskInstanceId(taskInstanceId);
logger.info("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", projectCode,
log.info("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}", projectCode,
taskInstanceId);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}",
log.error("Task instance performs force success complete, projectCode:{}, taskInstanceId:{}",
projectCode, taskInstanceId);
putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR);
}
@ -286,7 +285,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId);
if (taskInstance == null) {
logger.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
log.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstanceId);
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND);
return result;
@ -317,7 +316,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId);
if (taskInstance == null) {
logger.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
log.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstanceId);
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND);
return result;
@ -338,7 +337,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
projectService.checkProjectAndAuthThrowException(loginUser, project, FORCED_SUCCESS);
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId);
if (taskInstance == null) {
logger.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
log.error("Task instance can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstanceId);
}
return taskInstance;
@ -354,7 +353,7 @@ public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInst
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstanceId);
if (taskInstance == null) {
logger.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
log.error("Task definition can not be found, projectCode:{}, taskInstanceId:{}.", projectCode,
taskInstanceId);
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND);
return new TaskInstanceRemoveCacheResponse(result);

35
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/TenantServiceImpl.java

@ -54,8 +54,8 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -67,10 +67,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* tenant service impl
*/
@Service
@Slf4j
public class TenantServiceImpl extends BaseServiceImpl implements TenantService {
private static final Logger logger = LoggerFactory.getLogger(TenantServiceImpl.class);
@Autowired
private TenantMapper tenantMapper;
@ -115,7 +114,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
private void updateTenantValid(Tenant existsTenant, Tenant updateTenant) throws ServiceException {
// Check the exists tenant
if (Objects.isNull(existsTenant)) {
logger.error("Tenant does not exist.");
log.error("Tenant does not exist.");
throw new ServiceException(Status.TENANT_NOT_EXIST);
}
// Check the update tenant parameters
@ -153,7 +152,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
throw new ServiceException(Status.USER_NO_OPERATION_PERM);
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -166,7 +165,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
storageOperate.createTenantDirIfNotExists(tenantCode);
}
permissionPostHandle(AuthorizationType.TENANT, loginUser.getId(), Collections.singletonList(tenant.getId()),
logger);
log);
result.put(Constants.DATA_LIST, tenant);
putMsg(result, Status.SUCCESS);
return result;
@ -187,7 +186,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
Result<Object> result = new Result<>();
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize);
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TENANT,
loginUser.getId(), logger);
loginUser.getId(), log);
if (ids.isEmpty()) {
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
@ -224,7 +223,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
throw new ServiceException(Status.USER_NO_OPERATION_PERM);
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -240,10 +239,10 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
}
int update = tenantMapper.updateById(updateTenant);
if (update > 0) {
logger.info("Tenant is updated and id is {}.", updateTenant.getId());
log.info("Tenant is updated and id is {}.", updateTenant.getId());
putMsg(result, Status.SUCCESS);
} else {
logger.error("Tenant update error, id:{}.", updateTenant.getId());
log.error("Tenant update error, id:{}.", updateTenant.getId());
putMsg(result, Status.UPDATE_TENANT_ERROR);
}
return result;
@ -268,13 +267,13 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
Tenant tenant = tenantMapper.queryById(id);
if (Objects.isNull(tenant)) {
logger.error("Tenant does not exist, userId:{}.", id);
log.error("Tenant does not exist, userId:{}.", id);
throw new ServiceException(Status.TENANT_NOT_EXIST);
}
List<ProcessInstance> processInstances = getProcessInstancesByTenant(tenant);
if (CollectionUtils.isNotEmpty(processInstances)) {
logger.warn("Delete tenant failed, because there are {} executing process instances using it.",
log.warn("Delete tenant failed, because there are {} executing process instances using it.",
processInstances.size());
throw new ServiceException(Status.DELETE_TENANT_BY_ID_FAIL, processInstances.size());
}
@ -282,14 +281,14 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
List<ProcessDefinition> processDefinitions =
processDefinitionMapper.queryDefinitionListByTenant(tenant.getId());
if (CollectionUtils.isNotEmpty(processDefinitions)) {
logger.warn("Delete tenant failed, because there are {} process definitions using it.",
log.warn("Delete tenant failed, because there are {} process definitions using it.",
processDefinitions.size());
throw new ServiceException(Status.DELETE_TENANT_BY_ID_FAIL_DEFINES, processDefinitions.size());
}
List<User> userList = userMapper.queryUserListByTenant(tenant.getId());
if (CollectionUtils.isNotEmpty(userList)) {
logger.warn("Delete tenant failed, because there are {} users using it.", userList.size());
log.warn("Delete tenant failed, because there are {} users using it.", userList.size());
throw new ServiceException(Status.DELETE_TENANT_BY_ID_FAIL_USERS, userList.size());
}
@ -301,10 +300,10 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
int delete = tenantMapper.deleteById(id);
if (delete > 0) {
processInstanceMapper.updateProcessInstanceByTenantId(id, -1);
logger.info("Tenant is deleted and id is {}.", id);
log.info("Tenant is deleted and id is {}.", id);
putMsg(result, Status.SUCCESS);
} else {
logger.error("Tenant delete failed, tenantId:{}.", id);
log.error("Tenant delete failed, tenantId:{}.", id);
putMsg(result, Status.DELETE_TENANT_BY_ID_ERROR);
}
@ -327,7 +326,7 @@ public class TenantServiceImpl extends BaseServiceImpl implements TenantService
Map<String, Object> result = new HashMap<>();
Set<Integer> ids = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.TENANT,
loginUser.getId(), logger);
loginUser.getId(), log);
if (ids.isEmpty()) {
result.put(Constants.DATA_LIST, Collections.emptyList());
putMsg(result, Status.SUCCESS);

45
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UdfFuncServiceImpl.java

@ -41,8 +41,8 @@ import java.util.Date;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -54,10 +54,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* udf func service impl
*/
@Service
@Slf4j
public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService {
private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceImpl.class);
@Autowired
private ResourceMapper resourceMapper;
@ -101,13 +100,13 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
return result;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("Storage does not start up, resource upload startup state: {}.",
log.error("Storage does not start up, resource upload startup state: {}.",
PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
@ -115,7 +114,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
// verify udf func name exist
if (checkUdfFuncNameExists(funcName)) {
logger.warn("Udf function with the same name already exists.");
log.warn("Udf function with the same name already exists.");
putMsg(result, Status.UDF_FUNCTION_EXISTS);
return result;
}
@ -124,11 +123,11 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
try {
existResource = storageOperate.exists(fullName);
} catch (IOException e) {
logger.error("Check resource error: {}", fullName, e);
log.error("Check resource error: {}", fullName, e);
}
if (!existResource) {
logger.error("resource full name {} is not exist", fullName);
log.error("resource full name {} is not exist", fullName);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -155,9 +154,9 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
udf.setUpdateTime(now);
udfFuncMapper.insert(udf);
logger.info("UDF function create complete, udfFuncName:{}.", udf.getFuncName());
log.info("UDF function create complete, udfFuncName:{}.", udf.getFuncName());
putMsg(result, Status.SUCCESS);
permissionPostHandle(AuthorizationType.UDF, loginUser.getId(), Collections.singletonList(udf.getId()), logger);
permissionPostHandle(AuthorizationType.UDF, loginUser.getId(), Collections.singletonList(udf.getId()), log);
return result;
}
@ -188,7 +187,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
}
UdfFunc udfFunc = udfFuncMapper.selectById(id);
if (udfFunc == null) {
logger.error("Resource does not exist, udf func id:{}.", id);
log.error("Resource does not exist, udf func id:{}.", id);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -229,7 +228,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
return result;
}
if (checkDescriptionLength(desc)) {
logger.warn("Parameter description is too long.");
log.warn("Parameter description is too long.");
putMsg(result, Status.DESCRIPTION_TOO_LONG_ERROR);
return result;
}
@ -237,7 +236,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId);
if (udf == null) {
logger.error("UDF function does not exist, udfFuncId:{}.", udfFuncId);
log.error("UDF function does not exist, udfFuncId:{}.", udfFuncId);
result.setCode(Status.UDF_FUNCTION_NOT_EXIST.getCode());
result.setMsg(Status.UDF_FUNCTION_NOT_EXIST.getMsg());
return result;
@ -245,7 +244,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
// if resource upload startup
if (!PropertyUtils.getResUploadStartupState()) {
logger.error("Storage does not start up, resource upload startup state: {}.",
log.error("Storage does not start up, resource upload startup state: {}.",
PropertyUtils.getResUploadStartupState());
putMsg(result, Status.HDFS_NOT_STARTUP);
return result;
@ -254,7 +253,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
// verify udfFuncName is exist
if (!funcName.equals(udf.getFuncName())) {
if (checkUdfFuncNameExists(funcName)) {
logger.warn("Udf function exists, can not create again, udfFuncName:{}.", funcName);
log.warn("Udf function exists, can not create again, udfFuncName:{}.", funcName);
result.setCode(Status.UDF_FUNCTION_EXISTS.getCode());
result.setMsg(Status.UDF_FUNCTION_EXISTS.getMsg());
return result;
@ -265,14 +264,14 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
try {
doesResExist = storageOperate.exists(fullName);
} catch (Exception e) {
logger.error("udf resource checking error", fullName);
log.error("udf resource checking error", fullName);
result.setCode(Status.RESOURCE_NOT_EXIST.getCode());
result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg());
return result;
}
if (!doesResExist) {
logger.error("resource full name {} is not exist", fullName);
log.error("resource full name {} is not exist", fullName);
result.setCode(Status.RESOURCE_NOT_EXIST.getCode());
result.setMsg(Status.RESOURCE_NOT_EXIST.getMsg());
return result;
@ -294,7 +293,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
udf.setUpdateTime(now);
udfFuncMapper.updateById(udf);
logger.info("UDF function update complete, udfFuncId:{}, udfFuncName:{}.", udfFuncId, funcName);
log.info("UDF function update complete, udfFuncId:{}, udfFuncName:{}.", udfFuncId, funcName);
putMsg(result, Status.SUCCESS);
return result;
}
@ -337,7 +336,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
*/
private IPage<UdfFunc> getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) {
Set<Integer> udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF,
loginUser.getId(), logger);
loginUser.getId(), log);
Page<UdfFunc> page = new Page<>(pageNo, pageSize);
if (udfFuncIds.isEmpty()) {
return page;
@ -363,7 +362,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
return result;
}
Set<Integer> udfFuncIds = resourcePermissionCheckService.userOwnedResourceIdsAcquisition(AuthorizationType.UDF,
loginUser.getId(), logger);
loginUser.getId(), log);
if (udfFuncIds.isEmpty()) {
result.setData(Collections.emptyList());
putMsg(result, Status.SUCCESS);
@ -395,7 +394,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
}
udfFuncMapper.deleteById(id);
udfUserMapper.deleteByUdfFuncId(id);
logger.info("UDF function delete complete, udfFuncId:{}.", id);
log.info("UDF function delete complete, udfFuncId:{}.", id);
putMsg(result, Status.SUCCESS);
return result;
}
@ -417,7 +416,7 @@ public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncServic
}
if (checkUdfFuncNameExists(name)) {
logger.warn("Udf function with the same already exists.");
log.warn("Udf function with the same already exists.");
putMsg(result, Status.UDF_FUNCTION_EXISTS);
} else {
putMsg(result, Status.SUCCESS);

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java

@ -30,8 +30,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@ -39,10 +39,9 @@ import org.springframework.stereotype.Service;
* ui plugin service impl
*/
@Service
@Slf4j
public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginService {
private static final Logger logger = LoggerFactory.getLogger(UiPluginServiceImpl.class);
@Autowired
PluginDefineMapper pluginDefineMapper;
@ -50,14 +49,14 @@ public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginServ
public Map<String, Object> queryUiPluginsByType(PluginType pluginType) {
Map<String, Object> result = new HashMap<>();
if (!pluginType.getHasUi()) {
logger.warn("Plugin does not have UI.");
log.warn("Plugin does not have UI.");
putMsg(result, Status.PLUGIN_NOT_A_UI_COMPONENT);
return result;
}
List<PluginDefine> pluginDefines = pluginDefineMapper.queryByPluginType(pluginType.getDesc());
if (CollectionUtils.isEmpty(pluginDefines)) {
logger.warn("Query plugins result is null, check status of plugins.");
log.warn("Query plugins result is null, check status of plugins.");
putMsg(result, Status.QUERY_PLUGINS_RESULT_IS_NULL);
return result;
}
@ -72,7 +71,7 @@ public class UiPluginServiceImpl extends BaseServiceImpl implements UiPluginServ
Map<String, Object> result = new HashMap<>();
PluginDefine pluginDefine = pluginDefineMapper.queryDetailById(id);
if (null == pluginDefine) {
logger.warn("Query plugins result is empty, pluginId:{}.", id);
log.warn("Query plugins result is empty, pluginId:{}.", id);
putMsg(result, Status.QUERY_PLUGIN_DETAIL_RESULT_IS_NULL);
return result;
}

105
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UsersServiceImpl.java

@ -74,8 +74,8 @@ import java.util.Set;
import java.util.TimeZone;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -87,10 +87,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
* users service impl
*/
@Service
@Slf4j
public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
private static final Logger logger = LoggerFactory.getLogger(UsersServiceImpl.class);
@Autowired
private AccessTokenMapper accessTokenMapper;
@ -173,7 +172,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
if (!checkTenantExists(tenantId)) {
logger.warn("Tenant does not exist, tenantId:{}.", tenantId);
log.warn("Tenant does not exist, tenantId:{}.", tenantId);
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
@ -186,7 +185,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
storageOperate.createTenantDirIfNotExists(tenant.getTenantCode());
}
logger.info("User is created and id is {}.", user.getId());
log.info("User is created and id is {}.", user.getId());
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
@ -340,7 +339,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
return result;
}
if (!isAdmin(loginUser)) {
logger.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(),
log.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(),
loginUser.getUserName());
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
@ -392,27 +391,27 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
return result;
}
if (check(result, !canOperator(loginUser, userId), Status.USER_NO_OPERATION_PERM)) {
logger.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(),
log.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(),
loginUser.getUserName());
return result;
}
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (StringUtils.isNotEmpty(userName)) {
if (!CheckUtils.checkUserName(userName)) {
logger.warn("Parameter userName check failed.");
log.warn("Parameter userName check failed.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, userName);
return result;
}
User tempUser = userMapper.queryByUserNameAccurately(userName);
if (tempUser != null && tempUser.getId() != userId) {
logger.warn("User name already exists, userName:{}.", tempUser.getUserName());
log.warn("User name already exists, userName:{}.", tempUser.getUserName());
putMsg(result, Status.USER_NAME_EXIST);
return result;
}
@ -421,7 +420,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
if (StringUtils.isNotEmpty(userPassword)) {
if (!CheckUtils.checkPasswordLength(userPassword)) {
logger.warn("Parameter userPassword check failed.");
log.warn("Parameter userPassword check failed.");
putMsg(result, Status.USER_PASSWORD_LENGTH_ERROR);
return result;
}
@ -430,7 +429,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
if (StringUtils.isNotEmpty(email)) {
if (!CheckUtils.checkEmail(email)) {
logger.warn("Parameter email check failed.");
log.warn("Parameter email check failed.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, email);
return result;
}
@ -438,13 +437,13 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
if (StringUtils.isNotEmpty(phone) && !CheckUtils.checkPhone(phone)) {
logger.warn("Parameter phone check failed.");
log.warn("Parameter phone check failed.");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, phone);
return result;
}
if (state == 0 && user.getState() != state && Objects.equals(loginUser.getId(), user.getId())) {
logger.warn("Not allow to disable your own account, userId:{}, userName:{}.", user.getId(),
log.warn("Not allow to disable your own account, userId:{}, userName:{}.", user.getId(),
user.getUserName());
putMsg(result, Status.NOT_ALLOW_TO_DISABLE_OWN_ACCOUNT);
return result;
@ -452,7 +451,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
if (StringUtils.isNotEmpty(timeZone)) {
if (!CheckUtils.checkTimeZone(timeZone)) {
logger.warn("Parameter time zone is illegal.");
log.warn("Parameter time zone is illegal.");
putMsg(result, Status.TIME_ZONE_ILLEGAL, timeZone);
return result;
}
@ -468,10 +467,10 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// updateProcessInstance user
int update = userMapper.updateById(user);
if (update > 0) {
logger.info("User is updated and id is :{}.", userId);
log.info("User is updated and id is :{}.", userId);
putMsg(result, Status.SUCCESS);
} else {
logger.error("User update error, userId:{}.", userId);
log.error("User update error, userId:{}.", userId);
putMsg(result, Status.UPDATE_USER_ERROR);
}
@ -496,7 +495,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
// only admin can operate
if (!isAdmin(loginUser)) {
logger.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(),
log.warn("User does not have permission for this feature, userId:{}, userName:{}.", loginUser.getId(),
loginUser.getUserName());
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
return result;
@ -504,7 +503,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// check exist
User tempUser = userMapper.selectById(id);
if (tempUser == null) {
logger.error("User does not exist, userId:{}.", id);
log.error("User does not exist, userId:{}.", id);
putMsg(result, Status.USER_NOT_EXIST, id);
return result;
}
@ -513,7 +512,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
if (CollectionUtils.isNotEmpty(projects)) {
String projectNames = projects.stream().map(Project::getName).collect(Collectors.joining(","));
putMsg(result, Status.TRANSFORM_PROJECT_OWNERSHIP, projectNames);
logger.warn("Please transfer the project ownership before deleting the user, userId:{}, projects:{}.", id,
log.warn("Please transfer the project ownership before deleting the user, userId:{}, projects:{}.", id,
projectNames);
return result;
}
@ -523,11 +522,11 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
accessTokenMapper.deleteAccessTokenByUserId(id);
if (userMapper.deleteById(id) > 0) {
logger.info("User is deleted and id is :{}.", id);
log.info("User is deleted and id is :{}.", id);
putMsg(result, Status.SUCCESS);
return result;
} else {
logger.error("User delete error, userId:{}.", id);
log.error("User delete error, userId:{}.", id);
putMsg(result, Status.DELETE_USER_BY_ID_ERROR);
return result;
}
@ -645,13 +644,13 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// check exist
User tempUser = userMapper.selectById(userId);
if (tempUser == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) {
logger.warn("Parameter projectIds is empty.");
log.warn("Parameter projectIds is empty.");
return result;
}
Arrays.stream(projectIds.split(",")).distinct().forEach(projectId -> {
@ -694,7 +693,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// 1. check if user is existed
User tempUser = this.userMapper.selectById(userId);
if (tempUser == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
this.putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
@ -702,14 +701,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// 2. check if project is existed
Project project = this.projectMapper.queryByCode(projectCode);
if (project == null) {
logger.error("Project does not exist, projectCode:{}.", projectCode);
log.error("Project does not exist, projectCode:{}.", projectCode);
this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
// 3. only project owner can operate
if (!this.canOperator(loginUser, project.getUserId())) {
logger.warn("User does not have permission for project, userId:{}, userName:{}, projectCode:{}.",
log.warn("User does not have permission for project, userId:{}, userName:{}, projectCode:{}.",
loginUser.getId(), loginUser.getUserName(), projectCode);
this.putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
@ -727,7 +726,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
projectUser.setUpdateTime(today);
this.projectUserMapper.insert(projectUser);
}
logger.info("User is granted permission for projects, userId:{}, projectCode:{}.", userId, projectCode);
log.info("User is granted permission for projects, userId:{}, projectCode:{}.", userId, projectCode);
this.putMsg(result, Status.SUCCESS);
return result;
}
@ -751,14 +750,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
// 1. only admin can operate
if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
logger.warn("Only admin can revoke the project permission.");
log.warn("Only admin can revoke the project permission.");
return result;
}
// 2. check if user is existed
User user = this.userMapper.selectById(userId);
if (user == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
this.putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
@ -766,14 +765,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// 3. check if project is existed
Project project = this.projectMapper.queryByCode(projectCode);
if (project == null) {
logger.error("Project does not exist, projectCode:{}.", projectCode);
log.error("Project does not exist, projectCode:{}.", projectCode);
this.putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
// 4. delete th relationship between project and user
this.projectUserMapper.deleteProjectRelation(project.getId(), user.getId());
logger.info("User is revoked permission for projects, userId:{}, projectCode:{}.", userId, projectCode);
log.info("User is revoked permission for projects, userId:{}, projectCode:{}.", userId, projectCode);
this.putMsg(result, Status.SUCCESS);
return result;
}
@ -797,7 +796,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
@ -837,7 +836,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
resourceIdSet.retainAll(oldAuthorizedResIds);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
for (Integer resId : resourceIdSet) {
logger.error("Resource id:{} is used by process definition {}", resId,
log.error("Resource id:{} is used by process definition {}", resId,
resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
@ -849,14 +848,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
resourceUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) {
logger.warn("Parameter resourceIds is empty.");
log.warn("Parameter resourceIds is empty.");
return result;
}
for (int resourceIdValue : needAuthorizeResIds) {
Resource resource = resourceMapper.selectById(resourceIdValue);
if (resource == null) {
logger.error("Resource does not exist, resourceId:{}.", resourceIdValue);
log.error("Resource does not exist, resourceId:{}.", resourceIdValue);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
@ -877,7 +876,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
logger.info("User is granted permission for resources, userId:{}, resourceIds:{}.", user.getId(),
log.info("User is granted permission for resources, userId:{}, resourceIds:{}.", user.getId(),
needAuthorizeResIds);
putMsg(result, Status.SUCCESS);
@ -904,7 +903,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
User user = userMapper.selectById(userId);
if (user == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
@ -912,7 +911,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
udfUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) {
logger.warn("Parameter udfIds is empty.");
log.warn("Parameter udfIds is empty.");
return result;
}
@ -929,7 +928,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
udfUserMapper.insert(udfUser);
}
logger.info("User is granted permission for UDF, userName:{}.", user.getUserName());
log.info("User is granted permission for UDF, userName:{}.", user.getUserName());
putMsg(result, Status.SUCCESS);
@ -955,14 +954,14 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
// only admin can operate
if (this.check(result, !this.isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
logger.warn("Only admin can grant namespaces.");
log.warn("Only admin can grant namespaces.");
return result;
}
// check exist
User tempUser = userMapper.selectById(userId);
if (tempUser == null) {
logger.error("User does not exist, userId:{}.", userId);
log.error("User does not exist, userId:{}.", userId);
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
@ -982,7 +981,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
}
logger.info("User is granted permission for namespace, userId:{}.", tempUser.getId());
log.info("User is granted permission for namespace, userId:{}.", tempUser.getId());
putMsg(result, Status.SUCCESS);
@ -1098,7 +1097,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
// only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
logger.warn("Only admin can query all general users.");
log.warn("Only admin can query all general users.");
return result;
}
@ -1167,7 +1166,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
// only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
logger.warn("Only admin can deauthorize user.");
log.warn("Only admin can deauthorize user.");
return result;
}
@ -1208,7 +1207,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
// only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
logger.warn("Only admin can authorize user.");
log.warn("Only admin can authorize user.");
return result;
}
List<User> userList = userMapper.queryUserListByAlertGroupId(alertGroupId);
@ -1233,16 +1232,16 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
String msg = null;
if (!CheckUtils.checkUserName(userName)) {
logger.warn("Parameter userName check failed.");
log.warn("Parameter userName check failed.");
msg = userName;
} else if (!CheckUtils.checkPassword(password)) {
logger.warn("Parameter password check failed.");
log.warn("Parameter password check failed.");
msg = password;
} else if (!CheckUtils.checkEmail(email)) {
logger.warn("Parameter email check failed.");
log.warn("Parameter email check failed.");
msg = email;
} else if (!CheckUtils.checkPhone(phone)) {
logger.warn("Parameter phone check failed.");
log.warn("Parameter phone check failed.");
msg = phone;
}
@ -1268,7 +1267,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
// verify whether exist
if (!storageOperate.exists(
String.format(Constants.FORMAT_S_S, srcBasePath, component.getFullName()))) {
logger.error("Resource file: {} does not exist, copy error.", component.getFullName());
log.error("Resource file: {} does not exist, copy error.", component.getFullName());
throw new ServiceException(Status.RESOURCE_NOT_EXIST);
}
@ -1293,7 +1292,7 @@ public class UsersServiceImpl extends BaseServiceImpl implements UsersService {
}
} catch (IOException e) {
logger.error("copy the resources failed,the error message is {}", e.getMessage());
log.error("copy the resources failed,the error message is {}", e.getMessage());
}
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkFlowLineageServiceImpl.java

@ -52,8 +52,8 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@ -61,11 +61,10 @@ import org.springframework.util.CollectionUtils;
/**
* work flow lineage service impl
*/
@Slf4j
@Service
public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkFlowLineageService {
private static final Logger logger = LoggerFactory.getLogger(WorkFlowLineageServiceImpl.class);
@Autowired
private WorkFlowLineageMapper workFlowLineageMapper;
@ -83,7 +82,7 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
if (project == null) {
logger.error("Project does not exist, projectCode:{}.", projectCode);
log.error("Project does not exist, projectCode:{}.", projectCode);
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
@ -99,7 +98,7 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
if (project == null) {
logger.error("Project does not exist, projectCode:{}.", projectCode);
log.error("Project does not exist, projectCode:{}.", projectCode);
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}
@ -172,7 +171,7 @@ public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkF
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
if (project == null) {
logger.error("Project does not exist, projectCode:{}.", projectCode);
log.error("Project does not exist, projectCode:{}.", projectCode);
putMsg(result, Status.PROJECT_NOT_FOUND, projectCode);
return result;
}

29
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/WorkerGroupServiceImpl.java

@ -55,8 +55,8 @@ import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -67,10 +67,9 @@ import com.facebook.presto.jdbc.internal.guava.base.Strings;
* worker group service impl
*/
@Service
@Slf4j
public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGroupService {
private static final Logger logger = LoggerFactory.getLogger(WorkerGroupServiceImpl.class);
@Autowired
private WorkerGroupMapper workerGroupMapper;
@ -108,7 +107,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
return result;
}
if (StringUtils.isEmpty(name)) {
logger.warn("Parameter name can ot be null.");
log.warn("Parameter name can ot be null.");
putMsg(result, Status.NAME_NULL);
return result;
}
@ -127,18 +126,18 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
workerGroup.setDescription(description);
if (checkWorkerGroupNameExists(workerGroup)) {
logger.warn("Worker group with the same name already exists, name:{}.", workerGroup.getName());
log.warn("Worker group with the same name already exists, name:{}.", workerGroup.getName());
putMsg(result, Status.NAME_EXIST, workerGroup.getName());
return result;
}
String invalidAddr = checkWorkerGroupAddrList(workerGroup);
if (invalidAddr != null) {
logger.warn("Worker group address is invalid, invalidAddr:{}.", invalidAddr);
log.warn("Worker group address is invalid, invalidAddr:{}.", invalidAddr);
putMsg(result, Status.WORKER_ADDRESS_INVALID, invalidAddr);
return result;
}
handleDefaultWorkGroup(workerGroupMapper, workerGroup, loginUser, otherParamsJson);
logger.info("Worker group save complete, workerGroupName:{}.", workerGroup.getName());
log.info("Worker group save complete, workerGroupName:{}.", workerGroup.getName());
putMsg(result, Status.SUCCESS);
return result;
}
@ -150,7 +149,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
} else {
workerGroupMapper.insert(workerGroup);
permissionPostHandle(AuthorizationType.WORKER_GROUP, loginUser.getId(),
Collections.singletonList(workerGroup.getId()), logger);
Collections.singletonList(workerGroup.getId()), log);
}
}
@ -219,7 +218,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
workerGroups = getWorkerGroups(null);
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), log);
workerGroups = getWorkerGroups(ids.isEmpty() ? Collections.emptyList() : new ArrayList<>(ids));
}
List<WorkerGroup> resultDataList = new ArrayList<>();
@ -269,7 +268,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
workerGroups = getWorkerGroups(null);
} else {
Set<Integer> ids = resourcePermissionCheckService
.userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), logger);
.userOwnedResourceIdsAcquisition(AuthorizationType.WORKER_GROUP, loginUser.getId(), log);
workerGroups = getWorkerGroups(ids.isEmpty() ? Collections.emptyList() : new ArrayList<>(ids));
}
List<String> availableWorkerGroupList = workerGroups.stream()
@ -326,7 +325,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
}
WorkerGroup workerGroup = workerGroupMapper.selectById(id);
if (workerGroup == null) {
logger.error("Worker group does not exist, workerGroupId:{}.", id);
log.error("Worker group does not exist, workerGroupId:{}.", id);
putMsg(result, Status.DELETE_WORKER_GROUP_NOT_EXIST);
return result;
}
@ -336,7 +335,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
if (CollectionUtils.isNotEmpty(processInstances)) {
List<Integer> processInstanceIds =
processInstances.stream().map(ProcessInstance::getId).collect(Collectors.toList());
logger.warn(
log.warn(
"Delete worker group failed because there are {} processInstances are using it, processInstanceIds:{}.",
processInstances.size(), processInstanceIds);
putMsg(result, Status.DELETE_WORKER_GROUP_BY_ID_FAIL, processInstances.size());
@ -351,7 +350,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
}
workerGroupMapper.deleteById(id);
processInstanceMapper.updateProcessInstanceByWorkerGroupName(workerGroup.getName(), "");
logger.info("Delete worker group complete, workerGroupName:{}.", workerGroup.getName());
log.info("Delete worker group complete, workerGroupName:{}.", workerGroup.getName());
putMsg(result, Status.SUCCESS);
return result;
}
@ -387,7 +386,7 @@ public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGro
if (processInstance != null) {
return processInstance.getWorkerGroup();
}
logger.info("task : {} will use default worker group", taskInstance.getId());
log.info("task : {} will use default worker group", taskInstance.getId());
return Constants.DEFAULT_WORKER_GROUP;
}

13
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/utils/FileUtils.java

@ -26,8 +26,8 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;
import org.springframework.web.multipart.MultipartFile;
@ -35,10 +35,9 @@ import org.springframework.web.multipart.MultipartFile;
/**
* file utils
*/
@Slf4j
public class FileUtils {
private static final Logger logger = LoggerFactory.getLogger(FileUtils.class);
/**
* copy source InputStream to target file
* @param file
@ -48,7 +47,7 @@ public class FileUtils {
try {
org.apache.commons.io.FileUtils.copyInputStreamToFile(file.getInputStream(), new File(destFilename));
} catch (IOException e) {
logger.error("failed to copy file , {} is empty file", file.getOriginalFilename(), e);
log.error("failed to copy file , {} is empty file", file.getOriginalFilename(), e);
}
}
@ -66,7 +65,7 @@ public class FileUtils {
if (resource.exists() || resource.isReadable()) {
return resource;
} else {
logger.error("File can not be read, fileName:{}", filename);
log.error("File can not be read, fileName:{}", filename);
}
return null;
}
@ -80,7 +79,7 @@ public class FileUtils {
try (InputStream inputStream = file.getInputStream()) {
return IOUtils.toString(inputStream, StandardCharsets.UTF_8);
} catch (IOException e) {
logger.error("file convert to string failed: {}", file.getName());
log.error("file convert to string failed: {}", file.getName());
}
return "";

12
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/graph/DAG.java

@ -32,8 +32,7 @@ import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* analysis of DAG
@ -41,10 +40,9 @@ import org.slf4j.LoggerFactory;
* NodeInfonode description information
* EdgeInfo: edge description information
*/
@Slf4j
public class DAG<Node, NodeInfo, EdgeInfo> {
private static final Logger logger = LoggerFactory.getLogger(DAG.class);
private final ReadWriteLock lock = new ReentrantReadWriteLock();
/**
@ -123,7 +121,7 @@ public class DAG<Node, NodeInfo, EdgeInfo> {
try {
// Whether an edge can be successfully added(fromNode -> toNode)
if (!isLegalAddEdge(fromNode, toNode, createNode)) {
logger.error("serious error: add edge({} -> {}) is invalid, cause cycle!", fromNode, toNode);
log.error("serious error: add edge({} -> {}) is invalid, cause cycle!", fromNode, toNode);
return false;
}
@ -381,13 +379,13 @@ public class DAG<Node, NodeInfo, EdgeInfo> {
*/
private boolean isLegalAddEdge(Node fromNode, Node toNode, boolean createNode) {
if (fromNode.equals(toNode)) {
logger.error("edge fromNode({}) can't equals toNode({})", fromNode, toNode);
log.error("edge fromNode({}) can't equals toNode({})", fromNode, toNode);
return false;
}
if (!createNode) {
if (!containsNode(fromNode) || !containsNode(toNode)) {
logger.error("edge fromNode({}) or toNode({}) is not in vertices map", fromNode, toNode);
log.error("edge fromNode({}) or toNode({}) is not in vertices map", fromNode, toNode);
return false;
}
}

22
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/shell/AbstractShell.java

@ -28,8 +28,7 @@ import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* A base class for running a Unix command.
@ -38,10 +37,9 @@ import org.slf4j.LoggerFactory;
* <code>df</code>. It also offers facilities to gate commands by
* time-intervals.
*/
@Slf4j
public abstract class AbstractShell {
private static final Logger logger = LoggerFactory.getLogger(AbstractShell.class);
/**
* Time after which the executing script would be timedout
*/
@ -168,7 +166,7 @@ public abstract class AbstractShell {
line = errReader.readLine();
}
} catch (IOException ioe) {
logger.warn("Error reading the error stream", ioe);
log.warn("Error reading the error stream", ioe);
}
}
};
@ -179,7 +177,7 @@ public abstract class AbstractShell {
try {
parseExecResult(inReader);
} catch (IOException ioe) {
logger.warn("Error reading the in stream", ioe);
log.warn("Error reading the in stream", ioe);
}
super.run();
}
@ -188,7 +186,7 @@ public abstract class AbstractShell {
errThread.start();
inThread.start();
} catch (IllegalStateException ise) {
logger.warn("Illegal while starting the error and in thread", ise);
log.warn("Illegal while starting the error and in thread", ise);
}
try {
// parse the output
@ -198,7 +196,7 @@ public abstract class AbstractShell {
errThread.join();
inThread.join();
} catch (InterruptedException ie) {
logger.warn("Interrupted while reading the error and in stream", ie);
log.warn("Interrupted while reading the error and in stream", ie);
}
completed.compareAndSet(false, true);
// the timeout thread handling
@ -216,7 +214,7 @@ public abstract class AbstractShell {
try {
inReader.close();
} catch (IOException ioe) {
logger.warn("Error while closing the input stream", ioe);
log.warn("Error while closing the input stream", ioe);
}
if (!completed.get()) {
errThread.interrupt();
@ -224,7 +222,7 @@ public abstract class AbstractShell {
try {
errReader.close();
} catch (IOException ioe) {
logger.warn("Error while closing the error stream", ioe);
log.warn("Error while closing the error stream", ioe);
}
ProcessContainer.removeProcess(process);
process.destroy();
@ -347,11 +345,11 @@ public abstract class AbstractShell {
try {
entry.getValue().destroy();
} catch (Exception e) {
logger.error("Destroy All Processes error", e);
log.error("Destroy All Processes error", e);
}
}
logger.info("close " + set.size() + " executing process tasks");
log.info("close " + set.size() + " executing process tasks");
}
}
}

9
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/thread/ThreadUtils.java

@ -23,17 +23,14 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import lombok.experimental.UtilityClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
@UtilityClass
@Slf4j
public class ThreadUtils {
private static final Logger logger = LoggerFactory.getLogger(ThreadUtils.class);
/**
* Wrapper over newDaemonFixedThreadExecutor.
*
@ -62,7 +59,7 @@ public class ThreadUtils {
Thread.sleep(millis);
} catch (final InterruptedException interruptedException) {
Thread.currentThread().interrupt();
logger.error("Current thread sleep error", interruptedException);
log.error("Current thread sleep error", interruptedException);
}
}
}

8
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ConnectionUtils.java

@ -20,13 +20,11 @@ package org.apache.dolphinscheduler.common.utils;
import java.util.Arrays;
import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ConnectionUtils {
public static final Logger logger = LoggerFactory.getLogger(ConnectionUtils.class);
private ConnectionUtils() {
throw new UnsupportedOperationException("Construct ConnectionUtils");
}
@ -46,7 +44,7 @@ public class ConnectionUtils {
try {
resource.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
}
});
}

11
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java

@ -35,9 +35,9 @@ import java.util.TimeZone;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class DateUtils {
static final long C0 = 1L;
@ -48,7 +48,6 @@ public final class DateUtils {
static final long C5 = C4 * 60L;
static final long C6 = C5 * 24L;
private static final Logger logger = LoggerFactory.getLogger(DateUtils.class);
private static final DateTimeFormatter YYYY_MM_DD_HH_MM_SS =
DateTimeFormatter.ofPattern(DateConstants.YYYY_MM_DD_HH_MM_SS);
@ -218,7 +217,7 @@ public final class DateUtils {
}
return localDateTime2Date(ldt, ZoneId.of(timezone));
} catch (Exception e) {
logger.error("error while parse date:" + date, e);
log.error("error while parse date:" + date, e);
}
return null;
}
@ -357,7 +356,7 @@ public final class DateUtils {
end = new Date();
}
if (start.after(end)) {
logger.warn("start Time {} is later than end Time {}", start, end);
log.warn("start Time {} is later than end Time {}", start, end);
return null;
}
return format2Duration(differMs(start, end));
@ -721,7 +720,7 @@ public final class DateUtils {
LocalDateTime ldt = LocalDateTime.parse(date, DateTimeFormatter.ofPattern(format));
return localDateTime2Date(ldt);
} catch (Exception e) {
logger.error("error while parse date:" + date, e);
log.error("error while parse date:" + date, e);
}
return null;
}

14
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/FileUtils.java

@ -37,16 +37,14 @@ import java.nio.file.NoSuchFileException;
import java.util.zip.CRC32;
import java.util.zip.CheckedInputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* file utils
*/
@Slf4j
public class FileUtils {
public static final Logger logger = LoggerFactory.getLogger(FileUtils.class);
public static final String DATA_BASEDIR = PropertyUtils.getString(DATA_BASEDIR_PATH, "/tmp/dolphinscheduler");
public static final String APPINFO_PATH = "appInfo.log";
@ -160,7 +158,7 @@ public class FileUtils {
// create work dir
org.apache.commons.io.FileUtils.forceMkdir(execLocalPathFile);
String mkdirLog = "create dir success " + execLocalPath;
logger.info(mkdirLog);
log.info(mkdirLog);
}
/**
@ -175,13 +173,13 @@ public class FileUtils {
try {
File distFile = new File(filePath);
if (!distFile.getParentFile().exists() && !distFile.getParentFile().mkdirs()) {
logger.error("mkdir parent failed");
log.error("mkdir parent failed");
return false;
}
fos = new FileOutputStream(filePath);
IOUtils.write(content, fos, StandardCharsets.UTF_8);
} catch (IOException e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
return false;
} finally {
IOUtils.closeQuietly(fos);
@ -241,7 +239,7 @@ public class FileUtils {
}
return output.toString(UTF_8);
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}

20
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java

@ -48,16 +48,14 @@ import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* http utils
*/
@Slf4j
public class HttpUtils {
public static final Logger logger = LoggerFactory.getLogger(HttpUtils.class);
private HttpUtils() {
throw new UnsupportedOperationException("Construct HttpUtils");
}
@ -106,9 +104,9 @@ public class HttpUtils {
ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
ctx.init(null, new TrustManager[]{xtm}, null);
} catch (NoSuchAlgorithmException e) {
logger.error("SSLContext init with NoSuchAlgorithmException", e);
log.error("SSLContext init with NoSuchAlgorithmException", e);
} catch (KeyManagementException e) {
logger.error("SSLContext init with KeyManagementException", e);
log.error("SSLContext init with KeyManagementException", e);
}
socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
/** set timeout、request time、socket timeout */
@ -149,7 +147,7 @@ public class HttpUtils {
*/
public static String getResponseContentString(HttpGet httpget, CloseableHttpClient httpClient) {
if (Objects.isNull(httpget) || Objects.isNull(httpClient)) {
logger.error("HttpGet or HttpClient parameter is null");
log.error("HttpGet or HttpClient parameter is null");
return null;
}
String responseContent = null;
@ -162,13 +160,13 @@ public class HttpUtils {
if (entity != null) {
responseContent = EntityUtils.toString(entity, Constants.UTF_8);
} else {
logger.warn("http entity is null");
log.warn("http entity is null");
}
} else {
logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode());
log.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode());
}
} catch (IOException ioe) {
logger.error(ioe.getMessage(), ioe);
log.error(ioe.getMessage(), ioe);
} finally {
try {
if (response != null) {
@ -176,7 +174,7 @@ public class HttpUtils {
response.close();
}
} catch (IOException e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
}
if (!httpget.isAborted()) {
httpget.releaseConnection();

22
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java

@ -39,8 +39,7 @@ import java.util.TimeZone;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
@ -65,12 +64,11 @@ import com.google.common.base.Strings;
/**
* json utils
*/
@Slf4j
public class JSONUtils {
private static final Logger logger = LoggerFactory.getLogger(JSONUtils.class);
static {
logger.info("init timezone: {}", TimeZone.getDefault());
log.info("init timezone: {}", TimeZone.getDefault());
}
private static final ObjectMapper objectMapper = JsonMapper.builder()
@ -117,7 +115,7 @@ public class JSONUtils {
ObjectWriter writer = objectMapper.writer(feature);
return writer.writeValueAsString(object);
} catch (Exception e) {
logger.error("object to json exception!", e);
log.error("object to json exception!", e);
}
return null;
@ -145,7 +143,7 @@ public class JSONUtils {
try {
return objectMapper.readValue(json, clazz);
} catch (Exception e) {
logger.error("Parse object exception, jsonStr: {}, class: {}", json, clazz, e);
log.error("Parse object exception, jsonStr: {}, class: {}", json, clazz, e);
}
return null;
}
@ -183,7 +181,7 @@ public class JSONUtils {
CollectionType listType = objectMapper.getTypeFactory().constructCollectionType(ArrayList.class, clazz);
return objectMapper.readValue(json, listType);
} catch (Exception e) {
logger.error("parse list exception!", e);
log.error("parse list exception!", e);
}
return Collections.emptyList();
@ -205,7 +203,7 @@ public class JSONUtils {
objectMapper.readTree(json);
return true;
} catch (IOException e) {
logger.error("check json object valid exception!", e);
log.error("check json object valid exception!", e);
}
return false;
@ -261,7 +259,7 @@ public class JSONUtils {
return objectMapper.readValue(json, new TypeReference<Map<K, V>>() {
});
} catch (Exception e) {
logger.error("json to map exception!", e);
log.error("json to map exception!", e);
}
return Collections.emptyMap();
@ -302,7 +300,7 @@ public class JSONUtils {
try {
return objectMapper.readValue(json, type);
} catch (Exception e) {
logger.error("json to map exception!", e);
log.error("json to map exception!", e);
}
return null;
@ -345,7 +343,7 @@ public class JSONUtils {
try {
json = toJsonString(obj);
} catch (Exception e) {
logger.error("json serialize exception.", e);
log.error("json serialize exception.", e);
}
return json.getBytes(UTF_8);

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java

@ -40,16 +40,14 @@ import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* kerberos http client
*/
@Slf4j
public class KerberosHttpClient {
public static final Logger logger = LoggerFactory.getLogger(KerberosHttpClient.class);
private String principal;
private String keyTabLocation;
@ -92,7 +90,7 @@ public class KerberosHttpClient {
}
public String get(final String url, final String userId) {
logger.info("Calling KerberosHttpClient {} {} {}", this.principal, this.keyTabLocation, url);
log.info("Calling KerberosHttpClient {} {} {}", this.principal, this.keyTabLocation, url);
Configuration config = new Configuration() {
@SuppressWarnings("serial")
@ -128,7 +126,7 @@ public class KerberosHttpClient {
return HttpUtils.getResponseContentString(httpget, httpClient);
});
} catch (LoginException le) {
logger.error("Kerberos authentication failed ", le);
log.error("Kerberos authentication failed ", le);
}
return null;
}

15
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java

@ -36,18 +36,17 @@ import java.util.List;
import java.util.Objects;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* NetUtils
*/
@Slf4j
public class NetUtils {
private static final String NETWORK_PRIORITY_DEFAULT = "default";
private static final String NETWORK_PRIORITY_INNER = "inner";
private static final String NETWORK_PRIORITY_OUTER = "outer";
private static final Logger logger = LoggerFactory.getLogger(NetUtils.class);
private static InetAddress LOCAL_ADDRESS = null;
private static volatile String HOST_ADDRESS;
@ -134,7 +133,7 @@ public class NetUtils {
return LOCAL_ADDRESS;
}
} catch (IOException e) {
logger.warn("test address id reachable io exception", e);
log.warn("test address id reachable io exception", e);
}
}
}
@ -142,7 +141,7 @@ public class NetUtils {
localAddress = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
logger.warn("InetAddress get LocalHost exception", e);
log.warn("InetAddress get LocalHost exception", e);
}
Optional<InetAddress> addressOp = toValidAddress(localAddress);
if (addressOp.isPresent()) {
@ -171,7 +170,7 @@ public class NetUtils {
try {
return InetAddress.getByName(addr.substring(0, i) + '%' + address.getScopeId());
} catch (UnknownHostException e) {
logger.debug("Unknown IPV6 address: ", e);
log.debug("Unknown IPV6 address: ", e);
}
}
return address;
@ -210,7 +209,7 @@ public class NetUtils {
try {
validNetworkInterfaces = getValidNetworkInterfaces();
} catch (SocketException e) {
logger.warn("ValidNetworkInterfaces exception", e);
log.warn("ValidNetworkInterfaces exception", e);
}
NetworkInterface result = null;
@ -279,7 +278,7 @@ public class NetUtils {
} else if (NETWORK_PRIORITY_OUTER.equalsIgnoreCase(networkPriority)) {
return findOuterAddress(validNetworkInterfaces);
} else {
logger.error("There is no matching network card acquisition policy!");
log.error("There is no matching network card acquisition policy!");
return null;
}
}

38
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java

@ -45,16 +45,14 @@ import java.util.List;
import java.util.StringTokenizer;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* os utils
*/
@Slf4j
public class OSUtils {
private static final Logger logger = LoggerFactory.getLogger(OSUtils.class);
private static final SystemInfo SI = new SystemInfo();
public static final String TWO_DECIMAL = "0.00";
@ -138,7 +136,7 @@ public class OSUtils {
OperatingSystemMXBean osBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class);
loadAverage = osBean.getSystemLoadAverage();
} catch (Exception e) {
logger.error("get operation system load average exception, try another method ", e);
log.error("get operation system load average exception, try another method ", e);
loadAverage = hal.getProcessor().getSystemLoadAverage(1)[0];
if (Double.isNaN(loadAverage)) {
return NEGATIVE_ONE;
@ -185,7 +183,7 @@ public class OSUtils {
return getUserListFromLinux();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
}
return Collections.emptyList();
@ -283,7 +281,7 @@ public class OSUtils {
} catch (Exception e) {
// because ShellExecutor method throws exception to the linux return status is not 0
// not exist user return status is 1
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
}
return false;
}
@ -297,7 +295,7 @@ public class OSUtils {
// if not exists this user, then create
if (!getUserList().contains(userName)) {
boolean isSuccess = createUser(userName);
logger.info("create user {} {}", userName, isSuccess ? "success" : "fail");
log.info("create user {} {}", userName, isSuccess ? "success" : "fail");
}
}
@ -312,7 +310,7 @@ public class OSUtils {
String userGroup = getGroup();
if (StringUtils.isEmpty(userGroup)) {
String errorLog = String.format("%s group does not exist for this operating system.", userGroup);
logger.error(errorLog);
log.error(errorLog);
return false;
}
if (SystemUtils.IS_OS_MAC) {
@ -324,7 +322,7 @@ public class OSUtils {
}
return true;
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
}
return false;
@ -338,9 +336,9 @@ public class OSUtils {
* @throws IOException in case of an I/O error
*/
private static void createLinuxUser(String userName, String userGroup) throws IOException {
logger.info("create linux os user: {}", userName);
log.info("create linux os user: {}", userName);
String cmd = String.format("sudo useradd -g %s %s", userGroup, userName);
logger.info("execute cmd: {}", cmd);
log.info("execute cmd: {}", cmd);
exeCmd(cmd);
}
@ -352,14 +350,14 @@ public class OSUtils {
* @throws IOException in case of an I/O error
*/
private static void createMacUser(String userName, String userGroup) throws IOException {
logger.info("create mac os user: {}", userName);
log.info("create mac os user: {}", userName);
String createUserCmd = String.format("sudo sysadminctl -addUser %s -password %s", userName, userName);
logger.info("create user command: {}", createUserCmd);
log.info("create user command: {}", createUserCmd);
exeCmd(createUserCmd);
String appendGroupCmd = String.format("sudo dseditgroup -o edit -a %s -t user %s", userName, userGroup);
logger.info("append user to group: {}", appendGroupCmd);
log.info("append user to group: {}", appendGroupCmd);
exeCmd(appendGroupCmd);
}
@ -371,14 +369,14 @@ public class OSUtils {
* @throws IOException in case of an I/O error
*/
private static void createWindowsUser(String userName, String userGroup) throws IOException {
logger.info("create windows os user: {}", userName);
log.info("create windows os user: {}", userName);
String userCreateCmd = String.format("net user \"%s\" /add", userName);
logger.info("execute create user command: {}", userCreateCmd);
log.info("execute create user command: {}", userCreateCmd);
exeCmd(userCreateCmd);
String appendGroupCmd = String.format("net localgroup \"%s\" \"%s\" /add", userGroup, userName);
logger.info("execute append user to group: {}", appendGroupCmd);
log.info("execute append user to group: {}", appendGroupCmd);
exeCmd(appendGroupCmd);
}
@ -478,12 +476,12 @@ public class OSUtils {
// system available physical memory
double availablePhysicalMemorySize = availablePhysicalMemorySize();
if (loadAverage > maxCpuLoadAvg) {
logger.warn("Current cpu load average {} is too high, max.cpuLoad.avg={}", loadAverage, maxCpuLoadAvg);
log.warn("Current cpu load average {} is too high, max.cpuLoad.avg={}", loadAverage, maxCpuLoadAvg);
return true;
}
if (availablePhysicalMemorySize < reservedMemory) {
logger.warn(
log.warn(
"Current available memory {}G is too low, reserved.memory={}G", maxCpuLoadAvg, reservedMemory);
return true;
}

20
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java

@ -31,15 +31,13 @@ import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import com.google.common.base.Strings;
@Slf4j
public class PropertyUtils {
private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class);
private static final Properties properties = new Properties();
private PropertyUtils() {
@ -56,11 +54,11 @@ public class PropertyUtils {
Properties subProperties = new Properties();
subProperties.load(fis);
subProperties.forEach((k, v) -> {
logger.debug("Get property {} -> {}", k, v);
log.debug("Get property {} -> {}", k, v);
});
properties.putAll(subProperties);
} catch (IOException e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
System.exit(1);
}
}
@ -68,7 +66,7 @@ public class PropertyUtils {
// Override from system properties
System.getProperties().forEach((k, v) -> {
final String key = String.valueOf(k);
logger.info("Overriding property from system property: {}", key);
log.info("Overriding property from system property: {}", key);
PropertyUtils.setValue(key, String.valueOf(v));
});
}
@ -140,7 +138,7 @@ public class PropertyUtils {
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
logger.info(e.getMessage(), e);
log.info(e.getMessage(), e);
}
return defaultValue;
}
@ -183,7 +181,7 @@ public class PropertyUtils {
try {
return Long.parseLong(value);
} catch (NumberFormatException e) {
logger.info(e.getMessage(), e);
log.info(e.getMessage(), e);
}
return defaultValue;
}
@ -210,7 +208,7 @@ public class PropertyUtils {
try {
return Double.parseDouble(value);
} catch (NumberFormatException e) {
logger.info(e.getMessage(), e);
log.info(e.getMessage(), e);
}
return defaultValue;
}
@ -247,7 +245,7 @@ public class PropertyUtils {
try {
return Enum.valueOf(type, value);
} catch (IllegalArgumentException e) {
logger.info(e.getMessage(), e);
log.info(e.getMessage(), e);
}
return defaultValue;
}

24
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ScriptRunner.java

@ -27,16 +27,14 @@ import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
/**
* Tool to run database scripts
*/
@Slf4j
public class ScriptRunner {
public static final Logger logger = LoggerFactory.getLogger(ScriptRunner.class);
private static final String DEFAULT_DELIMITER = ";";
private final Connection connection;
@ -103,7 +101,7 @@ public class ScriptRunner {
}
String trimmedLine = line.trim();
if (trimmedLine.startsWith("--")) {
logger.info("\n{}", trimmedLine);
log.info("\n{}", trimmedLine);
} else if (trimmedLine.length() < 1 || trimmedLine.startsWith("//")) {
// Do nothing
} else if (trimmedLine.startsWith("delimiter")) {
@ -113,7 +111,7 @@ public class ScriptRunner {
} else if (!fullLineDelimiter && trimmedLine.endsWith(getDelimiter())
|| fullLineDelimiter && trimmedLine.equals(getDelimiter())) {
command.add(line.substring(0, line.lastIndexOf(getDelimiter())));
logger.info("\n{}", String.join("\n", command));
log.info("\n{}", String.join("\n", command));
try (Statement statement = conn.createStatement()) {
statement.execute(String.join(" ", command));
@ -123,20 +121,20 @@ public class ScriptRunner {
int cols = md.getColumnCount();
for (int i = 1; i < cols; i++) {
String name = md.getColumnLabel(i);
logger.info("{} \t", name);
log.info("{} \t", name);
}
logger.info("");
log.info("");
while (rs.next()) {
for (int i = 1; i < cols; i++) {
String value = rs.getString(i);
logger.info("{} \t", value);
log.info("{} \t", value);
}
logger.info("");
log.info("");
}
}
}
} catch (SQLException e) {
logger.error("SQLException", e);
log.error("SQLException", e);
throw e;
}
@ -148,11 +146,11 @@ public class ScriptRunner {
}
} catch (SQLException e) {
logger.error("Error executing: {}", command);
log.error("Error executing: {}", command);
throw e;
} catch (IOException e) {
e.fillInStackTrace();
logger.error("Error executing: {}", command);
log.error("Error executing: {}", command);
throw e;
}
}

14
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java

@ -49,8 +49,8 @@ import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@ -60,13 +60,9 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists;
@Component
@Slf4j
public class AlertDao {
/**
* logger of AlertDao
*/
private static final Logger logger = LoggerFactory.getLogger(AlertDao.class);
private static final int QUERY_ALERT_THRESHOLD = 100;
@Value("${alert.alarm-suppression.crash:60}")
@ -92,14 +88,14 @@ public class AlertDao {
*/
public int addAlert(Alert alert) {
if (null == alert.getAlertGroupId() || NumberUtils.INTEGER_ZERO.equals(alert.getAlertGroupId())) {
logger.warn("the value of alertGroupId is null or 0 ");
log.warn("the value of alertGroupId is null or 0 ");
return 0;
}
String sign = generateSign(alert);
alert.setSign(sign);
int count = alertMapper.insert(alert);
logger.info("add alert to db , alert: {}", alert);
log.info("add alert to db , alert: {}", alert);
return count;
}

9
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/MonitorDBDao.java

@ -30,16 +30,15 @@ import java.util.List;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
@Slf4j
public class MonitorDBDao {
private static final Logger logger = LoggerFactory.getLogger(MonitorDBDao.class);
public static final String VARIABLE_NAME = "variable_name";
@Autowired
@ -56,7 +55,7 @@ public class MonitorDBDao {
return new H2Performance().getMonitorRecord(conn);
}
} catch (Exception e) {
logger.error("SQLException: {}", e.getMessage(), e);
log.error("SQLException: {}", e.getMessage(), e);
}
return null;
}

9
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionDaoImpl.java

@ -34,8 +34,8 @@ import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
@ -45,10 +45,9 @@ import com.google.common.collect.Lists;
* Task Definition DAO Implementation
*/
@Repository
@Slf4j
public class TaskDefinitionDaoImpl implements TaskDefinitionDao {
private final Logger logger = LoggerFactory.getLogger(TaskDefinitionDaoImpl.class);
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@ -65,7 +64,7 @@ public class TaskDefinitionDaoImpl implements TaskDefinitionDao {
public List<TaskDefinition> getTaskDefinitionListByDefinition(long processDefinitionCode) {
ProcessDefinition processDefinition = processDefinitionMapper.queryByCode(processDefinitionCode);
if (processDefinition == null) {
logger.error("Cannot find process definition, code: {}", processDefinitionCode);
log.error("Cannot find process definition, code: {}", processDefinitionCode);
return Lists.newArrayList();
}

4
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskDefinitionLogDaoImpl.java

@ -33,8 +33,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
@ -46,8 +44,6 @@ import com.google.common.collect.Lists;
@Repository
public class TaskDefinitionLogDaoImpl implements TaskDefinitionLogDao {
private final Logger logger = LoggerFactory.getLogger(TaskDefinitionLogDaoImpl.class);
@Autowired
private TaskDefinitionDao taskDefinitionDao;

11
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/repository/impl/TaskInstanceDaoImpl.java

@ -35,8 +35,8 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
@ -44,10 +44,9 @@ import org.springframework.stereotype.Repository;
* Task Instance DAO implementation
*/
@Repository
@Slf4j
public class TaskInstanceDaoImpl implements TaskInstanceDao {
private final Logger logger = LoggerFactory.getLogger(TaskInstanceDaoImpl.class);
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@ -82,7 +81,7 @@ public class TaskInstanceDaoImpl implements TaskInstanceDao {
public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) {
WorkflowExecutionStatus processInstanceState = processInstance.getState();
if (processInstanceState.isFinished() || processInstanceState == WorkflowExecutionStatus.READY_STOP) {
logger.warn("processInstance: {} state was: {}, skip submit this task, taskCode: {}",
log.warn("processInstance: {} state was: {}, skip submit this task, taskCode: {}",
processInstance.getId(),
processInstanceState,
taskInstance.getTaskCode());
@ -180,7 +179,7 @@ public class TaskInstanceDaoImpl implements TaskInstanceDao {
taskInstanceMapper.clearCacheByCacheKey(cacheKey);
return true;
} catch (Exception e) {
logger.error("clear cache by cacheKey failed", e);
log.error("clear cache by cacheKey failed", e);
return false;
}
}

12
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/JsonSplitDao.java

@ -26,13 +26,11 @@ import java.sql.Date;
import java.sql.PreparedStatement;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class JsonSplitDao {
public static final Logger logger = LoggerFactory.getLogger(JsonSplitDao.class);
/**
* executeJsonSplitProcessDefinition
*
@ -89,7 +87,7 @@ public class JsonSplitDao {
processUpdate.close();
insertLog.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@ -155,7 +153,7 @@ public class JsonSplitDao {
insert.close();
insertLog.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
@ -243,7 +241,7 @@ public class JsonSplitDao {
insert.close();
insertLog.close();
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}

14
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProcessDefinitionDao.java

@ -31,13 +31,11 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ProcessDefinitionDao {
public static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionDao.class);
/**
* queryAllProcessDefinition
*
@ -60,7 +58,7 @@ public class ProcessDefinitionDao {
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
@ -84,7 +82,7 @@ public class ProcessDefinitionDao {
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
}
@ -116,7 +114,7 @@ public class ProcessDefinitionDao {
processDefinitions.add(processDefinition);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
return processDefinitions;
@ -151,7 +149,7 @@ public class ProcessDefinitionDao {
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
}

10
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ProjectDao.java

@ -25,13 +25,11 @@ import java.sql.ResultSet;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ProjectDao {
public static final Logger logger = LoggerFactory.getLogger(ProjectDao.class);
/**
* queryAllProject
*
@ -53,7 +51,7 @@ public class ProjectDao {
projectMap.put(id, code);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
return projectMap;
@ -76,7 +74,7 @@ public class ProjectDao {
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
}

10
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/ScheduleDao.java

@ -24,13 +24,11 @@ import java.time.Clock;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class ScheduleDao {
public static final Logger logger = LoggerFactory.getLogger(ScheduleDao.class);
/**
* queryAllSchedule
*
@ -49,7 +47,7 @@ public class ScheduleDao {
scheduleMap.put(id, processDefinitionCode);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
return scheduleMap;
@ -84,7 +82,7 @@ public class ScheduleDao {
}
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}
}

11
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/SchemaUtils.java

@ -28,8 +28,8 @@ import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.io.ClassPathResource;
import com.google.common.base.Strings;
@ -37,10 +37,9 @@ import com.google.common.base.Strings;
/**
* Metadata related common classes
*/
@Slf4j
public class SchemaUtils {
private static final Logger logger = LoggerFactory.getLogger(SchemaUtils.class);
private SchemaUtils() {
throw new UnsupportedOperationException("Construct SchemaUtils");
}
@ -67,7 +66,7 @@ public class SchemaUtils {
return -1;
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}).collect(Collectors.toList());
@ -113,7 +112,7 @@ public class SchemaUtils {
softVersion = FileUtils.readFile2Str(inputStream);
softVersion = Strings.nullToEmpty(softVersion).replaceAll("\\s+|\r|\n", "");
} catch (FileNotFoundException e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException(
"Failed to get the product version description file. The file could not be found", e);
}

8
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/WorkerGroupDao.java

@ -23,13 +23,11 @@ import java.sql.ResultSet;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class WorkerGroupDao {
public static final Logger logger = LoggerFactory.getLogger(WorkerGroupDao.class);
/**
* query all old worker group
* @param conn jdbc connection
@ -50,7 +48,7 @@ public class WorkerGroupDao {
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
log.error(e.getMessage(), e);
throw new RuntimeException("sql: " + sql, e);
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save