ligang
6 years ago
93 changed files with 15144 additions and 0 deletions
@ -0,0 +1,203 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" |
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> |
||||
<modelVersion>4.0.0</modelVersion> |
||||
<parent> |
||||
<groupId>cn.analysys</groupId> |
||||
<artifactId>escheduler</artifactId> |
||||
<version>1.0.0</version> |
||||
</parent> |
||||
<artifactId>escheduler-api</artifactId> |
||||
<packaging>jar</packaging> |
||||
|
||||
<dependencies> |
||||
|
||||
<dependency> |
||||
<groupId>cn.analysys</groupId> |
||||
<artifactId>escheduler-dao</artifactId> |
||||
</dependency> |
||||
<dependency> |
||||
<groupId>cn.analysys</groupId> |
||||
<artifactId>escheduler-common</artifactId> |
||||
<exclusions> |
||||
<exclusion> |
||||
<groupId>io.netty</groupId> |
||||
<artifactId>netty</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<groupId>io.netty</groupId> |
||||
<artifactId>netty-all</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<groupId>com.google</groupId> |
||||
<artifactId>netty</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<artifactId>leveldbjni-all</artifactId> |
||||
<groupId>org.fusesource.leveldbjni</groupId> |
||||
</exclusion> |
||||
</exclusions> |
||||
</dependency> |
||||
|
||||
<!--springboot--> |
||||
<dependency> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter-parent</artifactId> |
||||
<version>${spring.boot.version}</version> |
||||
<type>pom</type> |
||||
<scope>import</scope> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter-web</artifactId> |
||||
<exclusions> |
||||
<exclusion> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter-tomcat</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter</artifactId> |
||||
</exclusion> |
||||
</exclusions> |
||||
</dependency> |
||||
|
||||
<!-- use jetty --> |
||||
<dependency> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter-jetty</artifactId> |
||||
<exclusions> |
||||
<exclusion> |
||||
<groupId>org.eclipse.jetty.websocket</groupId> |
||||
<artifactId>javax-websocket-server-impl</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<groupId>org.eclipse.jetty.websocket</groupId> |
||||
<artifactId>websocket-server</artifactId> |
||||
</exclusion> |
||||
</exclusions> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter-test</artifactId> |
||||
<scope>test</scope> |
||||
<exclusions> |
||||
<exclusion> |
||||
<groupId>org.ow2.asm</groupId> |
||||
<artifactId>asm</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot</artifactId> |
||||
</exclusion> |
||||
<exclusion> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-autoconfigure</artifactId> |
||||
</exclusion> |
||||
</exclusions> |
||||
</dependency> |
||||
<dependency> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter-aop</artifactId> |
||||
<exclusions> |
||||
<exclusion> |
||||
<groupId>org.springframework.boot</groupId> |
||||
<artifactId>spring-boot-starter</artifactId> |
||||
</exclusion> |
||||
</exclusions> |
||||
</dependency> |
||||
<dependency> |
||||
<groupId>org.springframework</groupId> |
||||
<artifactId>spring-context</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>org.apache.httpcomponents</groupId> |
||||
<artifactId>httpcore</artifactId> |
||||
</dependency> |
||||
<dependency> |
||||
<groupId>org.apache.httpcomponents</groupId> |
||||
<artifactId>httpclient</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>com.fasterxml.jackson.core</groupId> |
||||
<artifactId>jackson-annotations</artifactId> |
||||
</dependency> |
||||
<dependency> |
||||
<groupId>com.fasterxml.jackson.core</groupId> |
||||
<artifactId>jackson-databind</artifactId> |
||||
</dependency> |
||||
<dependency> |
||||
<groupId>com.fasterxml.jackson.core</groupId> |
||||
<artifactId>jackson-core</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>com.alibaba</groupId> |
||||
<artifactId>fastjson</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>commons-collections</groupId> |
||||
<artifactId>commons-collections</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>org.quartz-scheduler</groupId> |
||||
<artifactId>quartz</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>org.quartz-scheduler</groupId> |
||||
<artifactId>quartz-jobs</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>cn.analysys</groupId> |
||||
<artifactId>escheduler-rpc</artifactId> |
||||
</dependency> |
||||
|
||||
<dependency> |
||||
<groupId>junit</groupId> |
||||
<artifactId>junit</artifactId> |
||||
<version>4.12</version> |
||||
<scope>test</scope> |
||||
</dependency> |
||||
|
||||
</dependencies> |
||||
<build> |
||||
<plugins> |
||||
<plugin> |
||||
<artifactId>maven-assembly-plugin</artifactId> |
||||
<version>2.6</version> |
||||
<configuration> |
||||
<descriptors> |
||||
<descriptor>src/main/assembly/package.xml</descriptor> |
||||
</descriptors> |
||||
<appendAssemblyId>false</appendAssemblyId> |
||||
</configuration> |
||||
<executions> |
||||
<execution> |
||||
<id>make-assembly</id> |
||||
<phase>package</phase> |
||||
<goals> |
||||
<goal>single</goal> |
||||
</goals> |
||||
</execution> |
||||
</executions> |
||||
</plugin> |
||||
<plugin> |
||||
<groupId>org.apache.maven.plugins</groupId> |
||||
<artifactId>maven-compiler-plugin</artifactId> |
||||
<configuration> |
||||
<source>${java.version}</source> |
||||
<target>${java.version}</target> |
||||
<encoding>${project.build.sourceEncoding}</encoding> |
||||
</configuration> |
||||
</plugin> |
||||
</plugins> |
||||
</build> |
||||
</project> |
@ -0,0 +1,13 @@
|
||||
|
||||
|
||||
maven 打包 |
||||
|
||||
mvn -U clean package assembly:assembly -Dmaven.test.skip=true |
||||
|
||||
note : 如果启动springboot发生如下的错误 |
||||
Unable to start embedded container; nested exception is java.lang.NoSuchMethodError: javax.servlet.ServletContext.addServlet(Ljava/lang/String;Ljavax/servlet/Servlet;)Ljavax/servlet/ServletRegistration$Dynamic; |
||||
|
||||
solve method : |
||||
|
||||
File -> Project Structure -> escheduler-server -> Dependencies remove servlet-api 2.5 |
||||
|
@ -0,0 +1,74 @@
|
||||
<assembly |
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" |
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd"> |
||||
<id>cluster</id> |
||||
<formats> |
||||
<format>dir</format> |
||||
</formats> |
||||
<includeBaseDirectory>false</includeBaseDirectory> |
||||
<fileSets> |
||||
<fileSet> |
||||
<directory>src/main/resources</directory> |
||||
<includes> |
||||
<include>**/*.properties</include> |
||||
<include>**/*.xml</include> |
||||
<include>**/*.json</include> |
||||
</includes> |
||||
<outputDirectory>conf</outputDirectory> |
||||
</fileSet> |
||||
<fileSet> |
||||
<directory>${project.parent.basedir}/escheduler-common/src/main/resources</directory> |
||||
<includes> |
||||
<include>**/*.properties</include> |
||||
<include>**/*.xml</include> |
||||
<include>**/*.json</include> |
||||
</includes> |
||||
<outputDirectory>conf</outputDirectory> |
||||
</fileSet> |
||||
<fileSet> |
||||
<directory>${project.parent.basedir}/escheduler-common/src/main/resources/bin</directory> |
||||
<includes> |
||||
<include>*.*</include> |
||||
</includes> |
||||
<directoryMode>755</directoryMode> |
||||
<outputDirectory>bin</outputDirectory> |
||||
</fileSet> |
||||
<fileSet> |
||||
<directory>${project.parent.basedir}/escheduler-dao/src/main/resources</directory> |
||||
<includes> |
||||
<include>**/*.properties</include> |
||||
<include>**/*.xml</include> |
||||
<include>**/*.json</include> |
||||
</includes> |
||||
<outputDirectory>conf</outputDirectory> |
||||
</fileSet> |
||||
<fileSet> |
||||
<directory>${project.parent.basedir}/escheduler-api/src/main/resources</directory> |
||||
<includes> |
||||
<include>**/*.properties</include> |
||||
<include>**/*.xml</include> |
||||
<include>**/*.json</include> |
||||
</includes> |
||||
<outputDirectory>conf</outputDirectory> |
||||
</fileSet> |
||||
<fileSet> |
||||
<directory>target/</directory> |
||||
<includes> |
||||
<include>escheduler-api-${project.version}.jar</include> |
||||
</includes> |
||||
<outputDirectory>lib</outputDirectory> |
||||
</fileSet> |
||||
</fileSets> |
||||
<dependencySets> |
||||
<dependencySet> |
||||
<outputDirectory>lib</outputDirectory> |
||||
<useProjectArtifact>true</useProjectArtifact> |
||||
<excludes> |
||||
<exclude>javax.servlet:servlet-api</exclude> |
||||
<exclude>org.eclipse.jetty.aggregate:jetty-all</exclude> |
||||
<exclude>org.slf4j:slf4j-log4j12</exclude> |
||||
</excludes> |
||||
</dependencySet> |
||||
</dependencySets> |
||||
</assembly> |
@ -0,0 +1,31 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api; |
||||
|
||||
import org.springframework.boot.SpringApplication; |
||||
import org.springframework.boot.autoconfigure.SpringBootApplication; |
||||
import org.springframework.boot.web.servlet.ServletComponentScan; |
||||
import org.springframework.context.annotation.ComponentScan; |
||||
|
||||
@SpringBootApplication |
||||
@ServletComponentScan |
||||
@ComponentScan("cn.escheduler") |
||||
public class ApiApplicationServer { |
||||
public static void main(String[] args) { |
||||
SpringApplication.run(ApiApplicationServer.class, args); |
||||
} |
||||
} |
@ -0,0 +1,62 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.configuration; |
||||
|
||||
import cn.escheduler.api.interceptor.LoginHandlerInterceptor; |
||||
import org.springframework.context.annotation.Bean; |
||||
import org.springframework.context.annotation.Configuration; |
||||
import org.springframework.web.servlet.config.annotation.ContentNegotiationConfigurer; |
||||
import org.springframework.web.servlet.config.annotation.CorsRegistry; |
||||
import org.springframework.web.servlet.config.annotation.InterceptorRegistry; |
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter; |
||||
|
||||
/** |
||||
* application configuration |
||||
*/ |
||||
@Configuration |
||||
public class AppConfiguration extends WebMvcConfigurerAdapter { |
||||
|
||||
public static final String LOGIN_INTERCEPTOR_PATH_PATTERN = "/**/*"; |
||||
public static final String LOGIN_PATH_PATTERN = "/login"; |
||||
public static final String PATH_PATTERN = "/**"; |
||||
|
||||
@Override |
||||
public void addInterceptors(InterceptorRegistry registry) { |
||||
registry.addInterceptor(loginInterceptor()).addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN).excludePathPatterns(LOGIN_PATH_PATTERN); |
||||
} |
||||
|
||||
@Bean |
||||
public LoginHandlerInterceptor loginInterceptor() { |
||||
return new LoginHandlerInterceptor(); |
||||
} |
||||
|
||||
@Override |
||||
public void addCorsMappings(CorsRegistry registry) { |
||||
registry.addMapping(PATH_PATTERN).allowedOrigins("*").allowedMethods("*"); |
||||
} |
||||
|
||||
|
||||
/** |
||||
* Turn off suffix-based content negotiation |
||||
* |
||||
* @param configurer |
||||
*/ |
||||
@Override |
||||
public void configureContentNegotiation(final ContentNegotiationConfigurer configurer) { |
||||
configurer.favorPathExtension(false); |
||||
} |
||||
} |
@ -0,0 +1,209 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.service.AlertGroupService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.AlertType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
@RestController |
||||
@RequestMapping("alert-group") |
||||
public class AlertGroupController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(AlertGroupController.class); |
||||
|
||||
@Autowired |
||||
private AlertGroupService alertGroupService; |
||||
|
||||
|
||||
/** |
||||
* create alert group |
||||
* @param loginUser |
||||
* @param groupName |
||||
* @param groupType |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createAlertgroup(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "groupName") String groupName, |
||||
@RequestParam(value = "groupType") AlertType groupType, |
||||
@RequestParam(value = "desc",required = false) String desc) { |
||||
logger.info("loginUser user {}, create alertgroup, groupName: {}, groupType: {}, desc: {}", |
||||
loginUser.getUserName(), groupName, groupType,desc); |
||||
try { |
||||
Map<String, Object> result = alertGroupService.createAlertgroup(loginUser, groupName, groupType,desc); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(CREATE_ALERT_GROUP_ERROR.getMsg(),e); |
||||
return error(CREATE_ALERT_GROUP_ERROR.getCode(),CREATE_ALERT_GROUP_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* alert group list |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result list(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) { |
||||
logger.info("login user {}, query all alertGroup", |
||||
loginUser.getUserName()); |
||||
try{ |
||||
HashMap<String, Object> result = alertGroupService.queryAlertgroup(); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_ALL_ALERTGROUP_ERROR.getMsg(),e); |
||||
return error(QUERY_ALL_ALERTGROUP_ERROR.getCode(),QUERY_ALL_ALERTGROUP_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* paging query alarm group list |
||||
* |
||||
* @param loginUser |
||||
* @param pageNo |
||||
* @param searchVal |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result listPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageSize") Integer pageSize){ |
||||
logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", |
||||
loginUser.getUserName(),pageNo,searchVal,pageSize); |
||||
try{ |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if(result.get(Constants.STATUS) != SUCCESS){ |
||||
return returnDataListPaging(result); |
||||
} |
||||
|
||||
result = alertGroupService.listPaging(loginUser, searchVal, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(LIST_PAGING_ALERT_GROUP_ERROR.getMsg(),e); |
||||
return error(LIST_PAGING_ALERT_GROUP_ERROR.getCode(),LIST_PAGING_ALERT_GROUP_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance alert group |
||||
* @param loginUser |
||||
* @param id |
||||
* @param groupName |
||||
* @param groupType |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateAlertgroup(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int id, |
||||
@RequestParam(value = "groupName") String groupName, |
||||
@RequestParam(value = "groupType") AlertType groupType, |
||||
@RequestParam(value = "desc",required = false) String desc) { |
||||
logger.info("login user {}, updateProcessInstance alertgroup, groupName: {}, groupType: {}, desc: {}", |
||||
loginUser.getUserName(), groupName, groupType,desc); |
||||
try { |
||||
Map<String, Object> result = alertGroupService.updateAlertgroup(loginUser, id, groupName, groupType, desc); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(UPDATE_ALERT_GROUP_ERROR.getMsg(),e); |
||||
return error(UPDATE_ALERT_GROUP_ERROR.getCode(),UPDATE_ALERT_GROUP_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* delete alert group by id |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result delAlertgroupById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int id) { |
||||
logger.info("login user {}, delete AlertGroup, id: {},", loginUser.getUserName(), id); |
||||
try { |
||||
Map<String, Object> result = alertGroupService.delAlertgroupById(loginUser, id); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(DELETE_ALERT_GROUP_ERROR.getMsg(),e); |
||||
return error(DELETE_ALERT_GROUP_ERROR.getCode(),DELETE_ALERT_GROUP_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* check alert group exist |
||||
* @param loginUser |
||||
* @param groupName |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/verify-group-name") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyGroupName(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="groupName") String groupName |
||||
) { |
||||
logger.info("login user {}, verfiy group name: {}", |
||||
loginUser.getUserName(),groupName); |
||||
|
||||
return alertGroupService.verifyGroupName(loginUser, groupName); |
||||
} |
||||
|
||||
/** |
||||
* grant user |
||||
* |
||||
* @param loginUser |
||||
* @param userIds |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/grant-user") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result grantUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "alertgroupId") int alertgroupId, |
||||
@RequestParam(value = "userIds") String userIds) { |
||||
logger.info("login user {}, grant user, alertGroupId: {},userIds : {}", loginUser.getUserName(), alertgroupId,userIds); |
||||
try { |
||||
Map<String, Object> result = alertGroupService.grantUser(loginUser, alertgroupId, userIds); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(ALERT_GROUP_GRANT_USER_ERROR.getMsg(),e); |
||||
return error(ALERT_GROUP_GRANT_USER_ERROR.getCode(),ALERT_GROUP_GRANT_USER_ERROR.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,272 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.Resource; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
|
||||
import javax.servlet.http.HttpServletRequest; |
||||
import java.text.MessageFormat; |
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.common.Constants.*; |
||||
|
||||
/** |
||||
* base controller |
||||
*/ |
||||
public class BaseController { |
||||
|
||||
/** |
||||
* check params |
||||
* |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> checkPageParams(int pageNo, int pageSize) { |
||||
Map<String, Object> result = new HashMap<>(2); |
||||
Status resultEnum = Status.SUCCESS; |
||||
String msg = Status.SUCCESS.getMsg(); |
||||
if (pageNo <= 0) { |
||||
resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; |
||||
msg = MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_NUMBER); |
||||
} else if (pageSize <= 0) { |
||||
resultEnum = Status.REQUEST_PARAMS_NOT_VALID_ERROR; |
||||
msg = MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), Constants.PAGE_SIZE); |
||||
} |
||||
result.put(Constants.STATUS, resultEnum); |
||||
result.put(Constants.MSG, msg); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get ip address in the http request |
||||
* |
||||
* @param request |
||||
* @return client ip address |
||||
*/ |
||||
public static String getClientIpAddress(HttpServletRequest request) { |
||||
String clientIp = request.getHeader(HTTP_X_FORWARDED_FOR); |
||||
|
||||
if (StringUtils.isNotEmpty(clientIp) && !StringUtils.equalsIgnoreCase(HTTP_HEADER_UNKNOWN, clientIp)) { |
||||
int index = clientIp.indexOf(COMMA); |
||||
if (index != -1) { |
||||
return clientIp.substring(0, index); |
||||
} else { |
||||
return clientIp; |
||||
} |
||||
} |
||||
|
||||
clientIp = request.getHeader(HTTP_X_REAL_IP); |
||||
if (StringUtils.isNotEmpty(clientIp) && !StringUtils.equalsIgnoreCase(HTTP_HEADER_UNKNOWN, clientIp)) { |
||||
return clientIp; |
||||
} |
||||
|
||||
return request.getRemoteAddr(); |
||||
} |
||||
|
||||
/** |
||||
* return data list |
||||
* |
||||
* @param result |
||||
* @return |
||||
*/ |
||||
public Result returnDataList(Map<String, Object> result) { |
||||
Status status = (Status) result.get(Constants.STATUS); |
||||
if (status == Status.SUCCESS) { |
||||
String msg = Status.SUCCESS.getMsg(); |
||||
Object datalist = result.get(Constants.DATA_LIST); |
||||
return success(msg, datalist); |
||||
} else { |
||||
Integer code = status.getCode(); |
||||
String msg = (String) result.get(Constants.MSG); |
||||
return error(code, msg); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* return data list with paging |
||||
* @param result |
||||
* @return |
||||
*/ |
||||
public Result returnDataListPaging(Map<String, Object> result) { |
||||
Status status = (Status) result.get(Constants.STATUS); |
||||
if (status == Status.SUCCESS) { |
||||
result.put(Constants.MSG, Status.SUCCESS.getMsg()); |
||||
PageInfo<Resource> pageInfo = (PageInfo<Resource>) result.get(Constants.DATA_LIST); |
||||
return success(pageInfo.getLists(), pageInfo.getCurrentPage(), pageInfo.getTotalCount(), |
||||
pageInfo.getTotalPage()); |
||||
} else { |
||||
Integer code = status.getCode(); |
||||
String msg = (String) result.get(Constants.MSG); |
||||
return error(code, msg); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* success |
||||
* |
||||
* @return |
||||
*/ |
||||
public Result success() { |
||||
Result result = new Result(); |
||||
result.setCode(Status.SUCCESS.getCode()); |
||||
result.setMsg(Status.SUCCESS.getMsg()); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* success does not need to return data |
||||
* |
||||
* @param msg |
||||
* @return |
||||
*/ |
||||
public Result success(String msg) { |
||||
Result result = new Result(); |
||||
result.setCode(Status.SUCCESS.getCode()); |
||||
result.setMsg(msg); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* return data no paging |
||||
* |
||||
* @param msg |
||||
* @param list |
||||
* @return |
||||
*/ |
||||
public Result success(String msg, Object list) { |
||||
Result result = getResult(msg, list); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* return data no paging |
||||
* |
||||
* @param list |
||||
* @return |
||||
*/ |
||||
public Result success(Object list) { |
||||
Result result = getResult(Status.SUCCESS.getMsg(), list); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* return the data use Map format, for example, passing the value of key, value, passing a value |
||||
* eg. "/user/add" then return user name: zhangsan |
||||
* |
||||
* @param msg |
||||
* @param object |
||||
* @return |
||||
*/ |
||||
public Result success(String msg, Map<String, Object> object) { |
||||
Result result = getResult(msg, object); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* return data with paging |
||||
* |
||||
* @param totalList |
||||
* @param currentPage |
||||
* @param total |
||||
* @return |
||||
*/ |
||||
public Result success(Object totalList, Integer currentPage, |
||||
Integer total, Integer totalPage) { |
||||
Result result = new Result(); |
||||
result.setCode(Status.SUCCESS.getCode()); |
||||
result.setMsg(Status.SUCCESS.getMsg()); |
||||
|
||||
Map<String, Object> map = new HashMap<>(4); |
||||
map.put(Constants.TOTAL_LIST, totalList); |
||||
map.put(Constants.CURRENT_PAGE, currentPage); |
||||
map.put(Constants.TOTAL_PAGE, totalPage); |
||||
map.put(Constants.TOTAL, total); |
||||
result.setData(map); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* error handle |
||||
* |
||||
* @param code |
||||
* @param msg |
||||
* @return |
||||
*/ |
||||
public Result error(Integer code, String msg) { |
||||
Result result = new Result(); |
||||
result.setCode(code); |
||||
result.setMsg(msg); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* put message to map |
||||
* |
||||
* @param result |
||||
* @param status |
||||
* @param statusParams |
||||
*/ |
||||
protected void putMsg(Map<String, Object> result, Status status, Object... statusParams) { |
||||
result.put(Constants.STATUS, status); |
||||
if (statusParams != null && statusParams.length > 0) { |
||||
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); |
||||
} else { |
||||
result.put(Constants.MSG, status.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* put message to result object |
||||
* |
||||
* @param result |
||||
* @param status |
||||
*/ |
||||
protected void putMsg(Result result, Status status, Object... statusParams) { |
||||
result.setCode(status.getCode()); |
||||
|
||||
if (statusParams != null && statusParams.length > 0) { |
||||
result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); |
||||
} else { |
||||
result.setMsg(status.getMsg()); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* get result |
||||
* @param msg |
||||
* @param list |
||||
* @return |
||||
*/ |
||||
private Result getResult(String msg, Object list) { |
||||
Result result = new Result(); |
||||
result.setCode(Status.SUCCESS.getCode()); |
||||
result.setMsg(msg); |
||||
|
||||
result.setData(list); |
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,122 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.service.DataAnalysisService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
/** |
||||
* data analysis controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("projects/analysis") |
||||
public class DataAnalysisController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisController.class); |
||||
|
||||
|
||||
@Autowired |
||||
DataAnalysisService dataAnalysisService; |
||||
|
||||
|
||||
/** |
||||
* statistical task instance status data |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/task-state-count") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result countTaskState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value="startDate", required=false) String startDate, |
||||
@RequestParam(value="endDate", required=false) String endDate, |
||||
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId |
||||
){ |
||||
try{ |
||||
logger.info("count task state, user:{}, start date: {}, end date:{}, project id {}", |
||||
loginUser.getUserName(), startDate, endDate, projectId); |
||||
Map<String, Object> result = dataAnalysisService.countTaskStateByProject(loginUser,projectId, startDate, endDate); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(TASK_INSTANCE_STATE_COUNT_ERROR.getMsg(),e); |
||||
return error(TASK_INSTANCE_STATE_COUNT_ERROR.getCode(), TASK_INSTANCE_STATE_COUNT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* statistical process instance status data |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/process-state-count") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result countProcessInstanceState(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value="startDate", required=false) String startDate, |
||||
@RequestParam(value="endDate", required=false) String endDate, |
||||
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId |
||||
){ |
||||
try{ |
||||
logger.info("count process instance state, user:{}, start date: {}, end date:{}, project id", |
||||
loginUser.getUserName(), startDate, endDate, projectId); |
||||
Map<String, Object> result = dataAnalysisService.countProcessInstanceStateByProject(loginUser,projectId, startDate, endDate); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg(),e); |
||||
return error(COUNT_PROCESS_INSTANCE_STATE_ERROR.getCode(), COUNT_PROCESS_INSTANCE_STATE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* statistics the process definition quantities of certain person |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/define-user-count") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result countDefinitionByUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value="projectId", required=false, defaultValue = "0") int projectId |
||||
){ |
||||
try{ |
||||
logger.info("count process definition , user:{}, project id", |
||||
loginUser.getUserName(), projectId); |
||||
Map<String, Object> result = dataAnalysisService.countDefinitionByUser(loginUser, projectId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg(),e); |
||||
return error(COUNT_PROCESS_DEFINITION_USER_ERROR.getCode(), COUNT_PROCESS_DEFINITION_USER_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,352 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.DataSourceService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.DbType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
|
||||
/** |
||||
* data source controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("datasources") |
||||
public class DataSourceController extends BaseController { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataSourceController.class); |
||||
|
||||
@Autowired |
||||
private DataSourceService dataSourceService; |
||||
|
||||
/** |
||||
* create data source |
||||
* 创建数据源 |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param note |
||||
* @param type |
||||
* @param other |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createDataSource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("name") String name, |
||||
@RequestParam(value = "note", required = false) String note, |
||||
@RequestParam(value = "type") DbType type, |
||||
@RequestParam(value = "host") String host, |
||||
@RequestParam(value = "port") String port, |
||||
@RequestParam(value = "database") String database, |
||||
@RequestParam(value = "userName") String userName, |
||||
@RequestParam(value = "password") String password, |
||||
@RequestParam(value = "other") String other) { |
||||
logger.info("login user {} create datasource ame: {}, note: {}, type: {}, other: {}", |
||||
loginUser.getUserName(), name, note, type, other); |
||||
try { |
||||
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, userName, password, other); |
||||
Map<String, Object> result = dataSourceService.createDataSource(loginUser, name, note, type, parameter); |
||||
return returnDataList(result); |
||||
|
||||
} catch (Exception e) { |
||||
logger.error(CREATE_DATASOURCE_ERROR.getMsg(),e); |
||||
return error(Status.CREATE_DATASOURCE_ERROR.getCode(), Status.CREATE_DATASOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* updateProcessInstance data source |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param note |
||||
* @param type |
||||
* @param other |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateDataSource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("id") int id, |
||||
@RequestParam("name") String name, |
||||
@RequestParam(value = "note", required = false) String note, |
||||
@RequestParam(value = "type") DbType type, |
||||
@RequestParam(value = "host") String host, |
||||
@RequestParam(value = "port") String port, |
||||
@RequestParam(value = "database") String database, |
||||
@RequestParam(value = "userName") String userName, |
||||
@RequestParam(value = "password") String password, |
||||
@RequestParam(value = "other") String other) { |
||||
logger.info("login user {} updateProcessInstance datasource name: {}, note: {}, type: {}, other: {}", |
||||
loginUser.getUserName(), name, note, type, other); |
||||
try { |
||||
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, userName, password, other); |
||||
Map<String, Object> dataSource = dataSourceService.updateDataSource(id, loginUser, name, note, type, parameter); |
||||
return returnDataList(dataSource); |
||||
} catch (Exception e) { |
||||
logger.error(UPDATE_DATASOURCE_ERROR.getMsg(),e); |
||||
return error(UPDATE_DATASOURCE_ERROR.getCode(), UPDATE_DATASOURCE_ERROR.getMsg()); |
||||
} |
||||
|
||||
|
||||
} |
||||
|
||||
/** |
||||
* query data source |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update-ui") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryDataSource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("id") int id) { |
||||
logger.info("login user {}, query datasource: {}", |
||||
loginUser.getUserName(), id); |
||||
try { |
||||
Map<String, Object> result = dataSourceService.queryDataSource(id); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); |
||||
} |
||||
|
||||
|
||||
} |
||||
|
||||
/** |
||||
* query datasouce by type |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryDataSourceList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("type") DbType type) { |
||||
try { |
||||
Map<String, Object> result = dataSourceService.queryDataSourceList(loginUser, type.ordinal()); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_DATASOURCE_ERROR.getCode(), Status.QUERY_DATASOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query datasource with paging |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryDataSourceListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam("pageSize") Integer pageSize) { |
||||
try { |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return returnDataListPaging(result); |
||||
} |
||||
result = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_DATASOURCE_ERROR.getMsg(),e); |
||||
return error(QUERY_DATASOURCE_ERROR.getCode(), QUERY_DATASOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* connec datasource |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param note |
||||
* @param type |
||||
* @param other |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/connect") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result connectDataSource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("name") String name, |
||||
@RequestParam(value = "note", required = false) String note, |
||||
@RequestParam(value = "type") DbType type, |
||||
@RequestParam(value = "host") String host, |
||||
@RequestParam(value = "port") String port, |
||||
@RequestParam(value = "database") String database, |
||||
@RequestParam(value = "userName") String userName, |
||||
@RequestParam(value = "password") String password, |
||||
@RequestParam(value = "other") String other) { |
||||
logger.info("login user {}, connect datasource: {} failure, note: {}, type: {}, other: {}", |
||||
loginUser.getUserName(), name, note, type, other); |
||||
try { |
||||
String parameter = dataSourceService.buildParameter(name, note, type, host, port, database, userName, password, other); |
||||
Boolean isConnection = dataSourceService.checkConnection(type, parameter); |
||||
Result result = new Result(); |
||||
|
||||
if (isConnection) { |
||||
putMsg(result, SUCCESS); |
||||
} else { |
||||
putMsg(result, CONNECT_DATASOURCE_FAILURE); |
||||
} |
||||
return result; |
||||
} catch (Exception e) { |
||||
logger.error(CONNECT_DATASOURCE_FAILURE.getMsg(),e); |
||||
return error(CONNECT_DATASOURCE_FAILURE.getCode(), CONNECT_DATASOURCE_FAILURE.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* connection test |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/connect-by-id") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result connectionTest(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("id") int id) { |
||||
logger.info("connection test, login user:{}, id:{}", loginUser.getUserName(), id); |
||||
|
||||
try { |
||||
Boolean isConnection = dataSourceService.connectionTest(loginUser, id); |
||||
Result result = new Result(); |
||||
|
||||
if (isConnection) { |
||||
putMsg(result, SUCCESS); |
||||
} else { |
||||
putMsg(result, CONNECTION_TEST_FAILURE); |
||||
} |
||||
return result; |
||||
} catch (Exception e) { |
||||
logger.error(CONNECTION_TEST_FAILURE.getMsg(),e); |
||||
return error(CONNECTION_TEST_FAILURE.getCode(), CONNECTION_TEST_FAILURE.getMsg()); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* delete datasource by id |
||||
* |
||||
* @param loginUser |
||||
* @param id datasource id |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result delete(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("id") int id) { |
||||
try { |
||||
logger.info("delete datasource,login user:{}, id:{}", loginUser.getUserName(), id); |
||||
return dataSourceService.delete(loginUser, id); |
||||
} catch (Exception e) { |
||||
logger.error(DELETE_DATA_SOURCE_FAILURE.getMsg(),e); |
||||
return error(DELETE_DATA_SOURCE_FAILURE.getCode(), DELETE_DATA_SOURCE_FAILURE.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* verify datasource name |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/verify-name") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyDataSourceName(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "name") String name |
||||
) { |
||||
logger.info("login user {}, verfiy datasource name: {}", |
||||
loginUser.getUserName(), name); |
||||
|
||||
try { |
||||
return dataSourceService.verifyDataSourceName(loginUser, name); |
||||
} catch (Exception e) { |
||||
logger.error(VERFIY_DATASOURCE_NAME_FAILURE.getMsg(),e); |
||||
return error(VERFIY_DATASOURCE_NAME_FAILURE.getCode(), VERFIY_DATASOURCE_NAME_FAILURE.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* unauthorized datasource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/unauth-datasource") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result unauthDatasource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try { |
||||
logger.info("unauthorized datasource, login user:{}, unauthorized userId:{}", |
||||
loginUser.getUserName(), userId); |
||||
Map<String, Object> result = dataSourceService.unauthDatasource(loginUser, userId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(UNAUTHORIZED_DATASOURCE.getMsg(),e); |
||||
return error(UNAUTHORIZED_DATASOURCE.getCode(), UNAUTHORIZED_DATASOURCE.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* authorized datasource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/authed-datasource") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result authedDatasource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try { |
||||
logger.info("authorized data source, login user:{}, authorized useId:{}", |
||||
loginUser.getUserName(), userId); |
||||
Map<String, Object> result = dataSourceService.authedDatasource(loginUser, userId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(AUTHORIZED_DATA_SOURCE.getMsg(),e); |
||||
return error(AUTHORIZED_DATA_SOURCE.getCode(), AUTHORIZED_DATA_SOURCE.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,162 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.ExecuteType; |
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.ExecutorService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.*; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
|
||||
/** |
||||
* execute task controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("projects/{projectName}/executors") |
||||
public class ExecutorController extends BaseController { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorController.class); |
||||
|
||||
@Autowired |
||||
private ExecutorService execService; |
||||
|
||||
/** |
||||
* execute process instance |
||||
*/ |
||||
@PostMapping(value = "start-process-instance") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result startProcessInstance(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "processDefinitionId") int processDefinitionId, |
||||
@RequestParam(value = "scheduleTime", required = false) String scheduleTime, |
||||
@RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy, |
||||
@RequestParam(value = "startNodeList", required = false) String startNodeList, |
||||
@RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType, |
||||
@RequestParam(value = "execType", required = false) CommandType execType, |
||||
@RequestParam(value = "warningType", required = true) WarningType warningType, |
||||
@RequestParam(value = "warningGroupId", required = false) int warningGroupId, |
||||
@RequestParam(value = "receivers", required = false) String receivers, |
||||
@RequestParam(value = "receiversCc", required = false) String receiversCc, |
||||
@RequestParam(value = "runMode", required = false) RunMode runMode, |
||||
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority, |
||||
@RequestParam(value = "timeout", required = false) Integer timeout) { |
||||
try { |
||||
logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, " |
||||
+ "failure policy: {}, node name: {}, node dep: {}, notify type: {}, " |
||||
+ "notify group id: {},receivers:{},receiversCc:{}, run mode: {},process instance priority:{}, timeout: {}", |
||||
loginUser.getUserName(), projectName, processDefinitionId, scheduleTime, failureStrategy, |
||||
taskDependType, warningType, warningGroupId,receivers,receiversCc,runMode,processInstancePriority,timeout); |
||||
|
||||
if (timeout == null) { |
||||
timeout = cn.escheduler.common.Constants.MAX_TASK_TIMEOUT; |
||||
} |
||||
|
||||
Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy, |
||||
startNodeList, taskDependType, warningType, |
||||
warningGroupId,receivers,receiversCc, runMode,processInstancePriority,timeout); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(START_PROCESS_INSTANCE_ERROR.getMsg(),e); |
||||
return error(Status.START_PROCESS_INSTANCE_ERROR.getCode(), Status.START_PROCESS_INSTANCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* do action to process instance:pause, stop, repeat, recover from pause, recover from stop |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/execute") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result execute(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processInstanceId") Integer processInstanceId, |
||||
@RequestParam("executeType") ExecuteType executeType |
||||
) { |
||||
try { |
||||
logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}", |
||||
loginUser.getUserName(), projectName, processInstanceId, executeType.toString()); |
||||
Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(EXECUTE_PROCESS_INSTANCE_ERROR.getMsg(),e); |
||||
return error(EXECUTE_PROCESS_INSTANCE_ERROR.getCode(), EXECUTE_PROCESS_INSTANCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* check process definition and all of the son process definitions is on line. |
||||
* |
||||
* @param loginUser |
||||
* @param processDefinitionId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/start-check") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result startCheckProcessDefinition(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "processDefinitionId") int processDefinitionId) { |
||||
logger.info("login user {}, check process definition", loginUser.getUserName(), processDefinitionId); |
||||
try { |
||||
Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId); |
||||
return returnDataList(result); |
||||
|
||||
} catch (Exception e) { |
||||
logger.error(CHECK_PROCESS_DEFINITION_ERROR.getMsg(),e); |
||||
return error(CHECK_PROCESS_DEFINITION_ERROR.getCode(), CHECK_PROCESS_DEFINITION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query recipients and copyers by process definition ID |
||||
* |
||||
* @param loginUser |
||||
* @param processDefinitionId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/get-receiver-cc") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result getReceiverCc(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "processDefinitionId") int processDefinitionId){ |
||||
logger.info("login user {}, get process definition receiver and cc", loginUser.getUserName()); |
||||
try { |
||||
Map<String, Object> result = execService.getReceiverCc(processDefinitionId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg(),e); |
||||
return error(QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getCode(), QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,92 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.service.LoggerService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpHeaders; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.http.ResponseEntity; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import static cn.escheduler.api.enums.Status.DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR; |
||||
import static cn.escheduler.api.enums.Status.QUERY_TASK_INSTANCE_LOG_ERROR; |
||||
|
||||
|
||||
/** |
||||
* log controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/log") |
||||
public class LoggerController extends BaseController { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LoggerController.class); |
||||
|
||||
|
||||
@Autowired |
||||
private LoggerService loggerService; |
||||
|
||||
/** |
||||
* query task log |
||||
*/ |
||||
@GetMapping(value = "/detail") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryLog(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "taskInstId") int taskInstanceId, |
||||
@RequestParam(value = "skipLineNum") int skipNum, |
||||
@RequestParam(value = "limit") int limit) { |
||||
try { |
||||
|
||||
logger.info( |
||||
"login user {}, view {} task instance log ,skipLineNum {} , limit {}", loginUser.getUserName(), taskInstanceId, skipNum, limit); |
||||
return loggerService.queryLog(taskInstanceId, skipNum, limit); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_TASK_INSTANCE_LOG_ERROR.getMsg(), e); |
||||
return error(QUERY_TASK_INSTANCE_LOG_ERROR.getCode(), QUERY_TASK_INSTANCE_LOG_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* download log file |
||||
* |
||||
* @param loginUser |
||||
* @param taskInstanceId |
||||
*/ |
||||
@GetMapping(value = "/download-log") |
||||
@ResponseBody |
||||
public ResponseEntity downloadTaskLog(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "taskInstId") int taskInstanceId) { |
||||
try { |
||||
byte[] logBytes = loggerService.getLogBytes(taskInstanceId); |
||||
return ResponseEntity |
||||
.ok() |
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + System.currentTimeMillis() + ".queryLog" + "\"") |
||||
.body(logBytes); |
||||
} catch (Exception e) { |
||||
logger.error(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg(), e); |
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,136 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.SessionService; |
||||
import cn.escheduler.api.service.UsersService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.httpclient.HttpStatus; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import javax.servlet.http.Cookie; |
||||
import javax.servlet.http.HttpServletRequest; |
||||
import javax.servlet.http.HttpServletResponse; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
/** |
||||
* user login controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("") |
||||
public class LoginController extends BaseController { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LoginController.class); |
||||
|
||||
@Autowired |
||||
private SessionService sessionService; |
||||
|
||||
@Autowired |
||||
private UsersService userService; |
||||
|
||||
/** |
||||
* login |
||||
* |
||||
* @param userName |
||||
* @param userPassword |
||||
* @param request |
||||
* @param response |
||||
* @return |
||||
*/ |
||||
@RequestMapping(value = "/login") |
||||
public Result login(@RequestParam(value = "userName") String userName, |
||||
@RequestParam(value = "userPassword") String userPassword, |
||||
HttpServletRequest request, |
||||
HttpServletResponse response) { |
||||
|
||||
|
||||
try { |
||||
logger.info("login user name: {} ", userName); |
||||
|
||||
//user name check
|
||||
if (StringUtils.isEmpty(userName)) { |
||||
return error(Status.USER_NAME_NULL.getCode(), |
||||
Status.USER_NAME_NULL.getMsg()); |
||||
} |
||||
|
||||
|
||||
// user ip check
|
||||
String ip = getClientIpAddress(request); |
||||
if (StringUtils.isEmpty(ip)) { |
||||
return error(IP_IS_EMPTY.getCode(), IP_IS_EMPTY.getMsg()); |
||||
} |
||||
|
||||
// verify username and password
|
||||
User user = userService.queryUser(userName, userPassword); |
||||
|
||||
if (user == null) { |
||||
return error(Status.USER_NAME_PASSWD_ERROR.getCode(),Status.USER_NAME_PASSWD_ERROR.getMsg() |
||||
); |
||||
} |
||||
|
||||
// create session
|
||||
String sessionId = sessionService.createSession(user, ip); |
||||
|
||||
if (sessionId == null) { |
||||
return error(Status.LOGIN_SESSION_FAILED.getCode(), |
||||
Status.LOGIN_SESSION_FAILED.getMsg() |
||||
); |
||||
} |
||||
|
||||
response.setStatus(HttpStatus.SC_OK); |
||||
response.addCookie(new Cookie(Constants.SESSION_ID, sessionId)); |
||||
|
||||
logger.info("sessionId = " + sessionId); |
||||
return success(LOGIN_SUCCESS.getMsg(), sessionId); |
||||
} catch (Exception e) { |
||||
logger.error(USER_LOGIN_FAILURE.getMsg(),e); |
||||
return error(USER_LOGIN_FAILURE.getCode(), USER_LOGIN_FAILURE.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* sign out |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/signOut") |
||||
public Result signOut(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
HttpServletRequest request) { |
||||
|
||||
try { |
||||
logger.info("login user:{} sign out", loginUser.getUserName()); |
||||
String ip = getClientIpAddress(request); |
||||
sessionService.signOut(ip, loginUser); |
||||
//clear session
|
||||
request.removeAttribute(Constants.SESSION_USER); |
||||
return success(); |
||||
} catch (Exception e) { |
||||
logger.error(SIGN_OUT_ERROR.getMsg(),e); |
||||
return error(SIGN_OUT_ERROR.getCode(), SIGN_OUT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,326 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.ProcessDefinitionService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
|
||||
/** |
||||
* process definition controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("projects/{projectName}/process") |
||||
public class ProcessDefinitionController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class); |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionService processDefinitionService; |
||||
|
||||
/** |
||||
* create process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param name |
||||
* @param json process definition json |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/save") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createProcessDefinition(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "name", required = true) String name, |
||||
@RequestParam(value = "processDefinitionJson", required = true) String json, |
||||
@RequestParam(value = "locations", required = false) String locations, |
||||
@RequestParam(value = "connects", required = false) String connects, |
||||
@RequestParam(value = "desc", required = false) String desc) { |
||||
|
||||
try { |
||||
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, " + |
||||
"process_definition_json: {}, desc: {} locations:{}, connects:{}", |
||||
loginUser.getUserName(), projectName, name, json,desc, locations, connects); |
||||
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json, |
||||
desc, locations, connects ); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(CREATE_PROCESS_DEFINITION.getMsg(),e); |
||||
return error(CREATE_PROCESS_DEFINITION.getCode(), CREATE_PROCESS_DEFINITION.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* verify process definition name unique |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/verify-name") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyProccessDefinitionName(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "name", required = true) String name){ |
||||
try { |
||||
logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}", |
||||
loginUser.getUserName(), projectName, name); |
||||
Map<String, Object> result = processDefinitionService.verifyProccessDefinitionName(loginUser, projectName, name); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg(),e); |
||||
return error(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* update process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param name |
||||
* @param id |
||||
* @param processDefinitionJson |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateProccessDefinition(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "name", required = true) String name, |
||||
@RequestParam(value = "id", required = true) int id, |
||||
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson, |
||||
@RequestParam(value = "locations", required = false) String locations, |
||||
@RequestParam(value = "connects", required = false) String connects, |
||||
@RequestParam(value = "desc", required = false) String desc) { |
||||
|
||||
try { |
||||
logger.info("login user {}, update process define, project name: {}, process define name: {}, " + |
||||
"process_definition_json: {}, desc: {}, locations:{}, connects:{}", |
||||
loginUser.getUserName(), projectName, name, processDefinitionJson,desc, locations, connects); |
||||
Map<String, Object> result = processDefinitionService.updateProccessDefinition(loginUser, projectName, id, name, |
||||
processDefinitionJson, desc, locations, connects); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UPDATE_PROCESS_DEFINITION_ERROR.getMsg(),e); |
||||
return error(UPDATE_PROCESS_DEFINITION_ERROR.getCode(), Status.UPDATE_PROCESS_DEFINITION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* release process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processId |
||||
* @param releaseState |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/release") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result releaseProccessDefinition(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "processId", required = true) int processId, |
||||
@RequestParam(value = "releaseState", required = true) int releaseState) { |
||||
|
||||
try { |
||||
logger.info("login user {}, release process definition, project name: {}, release state: {}", |
||||
loginUser.getUserName(), projectName, releaseState); |
||||
Map<String, Object> result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(RELEASE_PROCESS_DEFINITION_ERROR.getMsg(),e); |
||||
return error(RELEASE_PROCESS_DEFINITION_ERROR.getCode(), Status.RELEASE_PROCESS_DEFINITION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query datail of process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/select-by-id") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProccessDefinitionById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processId") Integer processId |
||||
){ |
||||
try{ |
||||
logger.info("query datail of process definition, login user:{}, project name:{}, process definition id:{}", |
||||
loginUser.getUserName(), projectName, processId); |
||||
Map<String, Object> result = processDefinitionService.queryProccessDefinitionById(loginUser, projectName, processId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg(),e); |
||||
return error(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getCode(), Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query proccess definition list |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProccessDefinitionList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName |
||||
){ |
||||
try{ |
||||
logger.info("query proccess definition list, login user:{}, project name:{}", |
||||
loginUser.getUserName(), projectName); |
||||
Map<String, Object> result = processDefinitionService.queryProccessDefinitionList(loginUser, projectName); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_PROCCESS_DEFINITION_LIST.getMsg(),e); |
||||
return error(QUERY_PROCCESS_DEFINITION_LIST.getCode(), QUERY_PROCCESS_DEFINITION_LIST.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query proccess definition list paging |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProcessDefinitionListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam(value = "userId", required = false, defaultValue = "0") Integer userId, |
||||
@RequestParam("pageSize") Integer pageSize){ |
||||
try{ |
||||
logger.info("query proccess definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName); |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if(result.get(Constants.STATUS) != Status.SUCCESS){ |
||||
return returnDataListPaging(result); |
||||
} |
||||
result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getCode(), QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* encapsulation treeview structure |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/view-tree") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result viewTree(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processId") Integer id, |
||||
@RequestParam("limit") Integer limit){ |
||||
try{ |
||||
Map<String, Object> result = processDefinitionService.viewTree(id, limit); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg(),e); |
||||
return error(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getCode(),ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* |
||||
* get tasks list by process definition id |
||||
* |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefinitionId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="gen-task-list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result getNodeListByDefinitionId( |
||||
@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processDefinitionId") Integer processDefinitionId){ |
||||
try { |
||||
logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}", |
||||
loginUser.getUserName(), projectName, processDefinitionId); |
||||
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); |
||||
return error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* get tasks list by process definition id |
||||
* |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefinitionIdList |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="get-task-list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result getNodeListByDefinitionIdList( |
||||
@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processDefinitionIdList") String processDefinitionIdList){ |
||||
|
||||
try { |
||||
logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}", |
||||
loginUser.getUserName(), projectName, processDefinitionIdList); |
||||
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg(), e); |
||||
return error(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getCode(), GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,283 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.ProcessInstanceService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
import cn.escheduler.common.enums.Flag; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
/** |
||||
* process instance controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("projects/{projectName}/instance") |
||||
public class ProcessInstanceController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceController.class); |
||||
|
||||
|
||||
@Autowired |
||||
ProcessInstanceService processInstanceService; |
||||
|
||||
/** |
||||
* query process instance list paging |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProcessInstanceList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "processDefinitionId", required = false, defaultValue = "0") Integer processDefinitionId, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam(value = "stateType", required = false) ExecutionStatus stateType, |
||||
@RequestParam(value = "host", required = false) String host, |
||||
@RequestParam(value = "startDate", required = false) String startTime, |
||||
@RequestParam(value = "endDate", required = false) String endTime, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam("pageSize") Integer pageSize){ |
||||
try{ |
||||
logger.info("query all process instance list, login user:{},project name:{}, define id:{}," + |
||||
"search value:{},state type:{},host:{},start time:{}, end time:{},page number:{}, page size:{}", |
||||
loginUser.getUserName(), projectName, processDefinitionId, searchVal, stateType,host, |
||||
startTime, endTime, pageNo, pageSize); |
||||
Map<String, Object> result = processInstanceService.queryProcessInstanceList( |
||||
loginUser, projectName, processDefinitionId, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query task list by process instance id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param workflowId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/task-list-by-process-id") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryTaskListByProcessId(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processInstanceId") Integer workflowId |
||||
) { |
||||
try{ |
||||
logger.info("query task instance list by process instance id, login user:{}, project name:{}, work instance id:{}", |
||||
loginUser.getUserName(), projectName, workflowId); |
||||
Map<String, Object> result = processInstanceService.queryTaskListByProcessId(loginUser, projectName, workflowId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); |
||||
return error(QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getCode(), QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* update process instance |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceJson |
||||
* @param processInstanceId |
||||
* @param scheduleTime |
||||
* @param syncDefine |
||||
* @param flag |
||||
* @return |
||||
*/ |
||||
@PostMapping(value="/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateProcessInstance(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam( value = "processInstanceJson", required = false) String processInstanceJson, |
||||
@RequestParam( value = "processInstanceId") Integer processInstanceId, |
||||
@RequestParam( value = "scheduleTime", required = false) String scheduleTime, |
||||
@RequestParam( value = "syncDefine", required = true) Boolean syncDefine, |
||||
@RequestParam(value = "locations", required = false) String locations, |
||||
@RequestParam(value = "connects", required = false) String connects, |
||||
@RequestParam( value = "flag", required = false) Flag flag |
||||
){ |
||||
try{ |
||||
logger.info("updateProcessInstance process instance, login user:{}, project name:{}, process instance json:{}," + |
||||
"process instance id:{}, schedule time:{}, sync define:{}, flag:{}, locations:{}, connects:{}", |
||||
loginUser.getUserName(), projectName, processInstanceJson, processInstanceId, scheduleTime, |
||||
syncDefine, flag, locations, connects); |
||||
Map<String, Object> result = processInstanceService.updateProcessInstance(loginUser, projectName, |
||||
processInstanceId, processInstanceJson, scheduleTime, syncDefine, flag, locations, connects); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UPDATE_PROCESS_INSTANCE_ERROR.getMsg(),e); |
||||
return error(Status.UPDATE_PROCESS_INSTANCE_ERROR.getCode(), Status.UPDATE_PROCESS_INSTANCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query process instance by id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/select-by-id") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProcessInstanceById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processInstanceId") Integer processInstanceId |
||||
){ |
||||
try{ |
||||
logger.info("query process instance detail by id, login user:{},project name:{}, process instance id:{}", |
||||
loginUser.getUserName(), projectName, processInstanceId); |
||||
Map<String, Object> result = processInstanceService.queryProcessInstanceById(loginUser, projectName, processInstanceId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* delete process instance by id, at the same time, |
||||
* delete task instance and their mapping relation data |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result deleteProcessInstanceById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processInstanceId") Integer processInstanceId |
||||
){ |
||||
try{ |
||||
logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}", |
||||
loginUser.getUserName(), projectName, processInstanceId); |
||||
Map<String, Object> result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); |
||||
return error(Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getCode(), Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query sub process instance detail info by task id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param taskId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/select-sub-process") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result querySubProcessInstanceByTaskId(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("taskId") Integer taskId){ |
||||
try{ |
||||
Map<String, Object> result = processInstanceService.querySubProcessInstanceByTaskId(loginUser, projectName, taskId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getCode(), Status.QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query parent process instance detail info by sub process instance id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param subId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/select-parent-process") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryParentInstanceBySubId(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("subId") Integer subId){ |
||||
try{ |
||||
Map<String, Object> result = processInstanceService.queryParentInstanceBySubId(loginUser, projectName, subId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getCode(), Status.QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query process instance global variables and local variables |
||||
* |
||||
* @param loginUser |
||||
* @param processInstanceId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/view-variables") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result viewVariables(@RequestAttribute(value = Constants.SESSION_USER) User loginUser |
||||
, @RequestParam("processInstanceId") Integer processInstanceId){ |
||||
try{ |
||||
Map<String, Object> result = processInstanceService.viewVariables(processInstanceId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getCode(), Status.QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* encapsulation gantt structure |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/view-gantt") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result viewTree(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam("processInstanceId") Integer processInstanceId){ |
||||
try{ |
||||
Map<String, Object> result = processInstanceService.viewGantt(processInstanceId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg(),e); |
||||
return error(Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getCode(),ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,212 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.ProjectService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
/** |
||||
* project controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("projects") |
||||
public class ProjectController extends BaseController { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProjectController.class); |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
/** |
||||
* create project |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param desc |
||||
* @return returns an error if it exists |
||||
*/ |
||||
@PostMapping(value = "/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createProject(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("projectName") String projectName, |
||||
@RequestParam(value = "desc", required = false) String desc) { |
||||
|
||||
try { |
||||
logger.info("login user {}, create project name: {}, desc: {}", loginUser.getUserName(), projectName, desc); |
||||
Map<String, Object> result = projectService.createProject(loginUser, projectName, desc); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(CREATE_PROJECT_ERROR.getMsg(), e); |
||||
return error(CREATE_PROJECT_ERROR.getCode(), CREATE_PROJECT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance project |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @param projectName |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateProject(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("projectId") Integer projectId, |
||||
@RequestParam("projectName") String projectName, |
||||
@RequestParam(value = "desc", required = false) String desc) { |
||||
try { |
||||
logger.info("login user {} , updateProcessInstance project name: {}, desc: {}", loginUser.getUserName(), projectName, desc); |
||||
Map<String, Object> result = projectService.update(loginUser, projectId, projectName, desc); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(UPDATE_PROJECT_ERROR.getMsg(), e); |
||||
return error(UPDATE_PROJECT_ERROR.getCode(), UPDATE_PROJECT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query project details by id |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/query-by-id") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProjectById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("projectId") Integer projectId) { |
||||
logger.info("login user {}, query project by id: {}", loginUser.getUserName(), projectId); |
||||
|
||||
try { |
||||
Map<String, Object> result = projectService.queryById(projectId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg(), e); |
||||
return error(QUERY_PROJECT_DETAILS_BY_ID_ERROR.getCode(), QUERY_PROJECT_DETAILS_BY_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query project list paging |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageSize |
||||
* @param pageNo |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryProjectListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageSize") Integer pageSize, |
||||
@RequestParam("pageNo") Integer pageNo |
||||
) { |
||||
|
||||
try { |
||||
logger.info("login user {}, query project list paging", loginUser.getUserName()); |
||||
Map<String, Object> result = projectService.queryProjectListPaging(loginUser, pageSize, pageNo, searchVal); |
||||
return returnDataListPaging(result); |
||||
} catch (Exception e) { |
||||
logger.error(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg(), e); |
||||
return error(Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getCode(), Status.LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* delete project by id |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result deleteProject(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("projectId") Integer projectId |
||||
) { |
||||
|
||||
try { |
||||
logger.info("login user {}, delete project: {}.", loginUser.getUserName(), projectId); |
||||
Map<String, Object> result = projectService.deleteProject(loginUser, projectId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(DELETE_PROJECT_ERROR.getMsg(), e); |
||||
return error(DELETE_PROJECT_ERROR.getCode(), DELETE_PROJECT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query unauthorized project |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/unauth-project") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryUnauthorizedProject(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try { |
||||
logger.info("login user {}, query unauthorized project by user id: {}.", loginUser.getUserName(), userId); |
||||
Map<String, Object> result = projectService.queryUnauthorizedProject(loginUser, userId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg(), e); |
||||
return error(QUERY_UNAUTHORIZED_PROJECT_ERROR.getCode(), QUERY_UNAUTHORIZED_PROJECT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query authorized project |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/authed-project") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryAuthorizedProject(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try { |
||||
logger.info("login user {}, query authorized project by user id: {}.", loginUser.getUserName(), userId); |
||||
Map<String, Object> result = projectService.queryAuthorizedProject(loginUser, userId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(QUERY_AUTHORIZED_PROJECT.getMsg(), e); |
||||
return error(QUERY_AUTHORIZED_PROJECT.getCode(), QUERY_AUTHORIZED_PROJECT.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,67 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.service.QueueService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.QUERY_QUEUE_LIST_ERROR; |
||||
|
||||
|
||||
/** |
||||
* queue controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/queue") |
||||
public class QueueController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(QueueController.class); |
||||
|
||||
@Autowired |
||||
private QueueService queueService; |
||||
|
||||
|
||||
/** |
||||
* query queue list |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ |
||||
try{ |
||||
logger.info("login user {}, query queue list", loginUser.getUserName()); |
||||
Map<String, Object> result = queueService.queryList(loginUser); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_QUEUE_LIST_ERROR.getMsg(),e); |
||||
return error(QUERY_QUEUE_LIST_ERROR.getCode(), QUERY_QUEUE_LIST_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,587 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.ResourcesService; |
||||
import cn.escheduler.api.service.UdfFuncService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.ResourceType; |
||||
import cn.escheduler.common.enums.UdfType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.core.io.Resource; |
||||
import org.springframework.http.HttpHeaders; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.http.ResponseEntity; |
||||
import org.springframework.web.bind.annotation.*; |
||||
import org.springframework.web.multipart.MultipartFile; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
/** |
||||
* resources controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("resources") |
||||
public class ResourcesController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class); |
||||
|
||||
|
||||
@Autowired |
||||
private ResourcesService resourceService; |
||||
@Autowired |
||||
private UdfFuncService udfFuncService; |
||||
|
||||
/** |
||||
* create resource |
||||
* |
||||
* @param loginUser |
||||
* @param alias |
||||
* @param desc |
||||
* @param file |
||||
*/ |
||||
@PostMapping(value = "/create") |
||||
public Result createResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "type") ResourceType type, |
||||
@RequestParam(value ="name")String alias, |
||||
@RequestParam(value = "desc", required = false) String desc, |
||||
@RequestParam("file") MultipartFile file) { |
||||
try { |
||||
logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}", |
||||
loginUser.getUserName(),type, alias, desc, file.getName(), file.getOriginalFilename()); |
||||
return resourceService.createResource(loginUser,alias, desc,type ,file); |
||||
} catch (Exception e) { |
||||
logger.error(CREATE_RESOURCE_ERROR.getMsg(),e); |
||||
return error(CREATE_RESOURCE_ERROR.getCode(), CREATE_RESOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* update resource |
||||
* |
||||
* @param loginUser |
||||
* @param alias |
||||
* @param desc |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
public Result updateResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="id") int resourceId, |
||||
@RequestParam(value = "type") ResourceType type, |
||||
@RequestParam(value ="name")String alias, |
||||
@RequestParam(value = "desc", required = false) String desc) { |
||||
try { |
||||
logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}", |
||||
loginUser.getUserName(),type, alias, desc); |
||||
return resourceService.updateResource(loginUser,resourceId,alias, desc,type); |
||||
} catch (Exception e) { |
||||
logger.error(UPDATE_RESOURCE_ERROR.getMsg(),e); |
||||
return error(Status.UPDATE_RESOURCE_ERROR.getCode(), Status.UPDATE_RESOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query resources list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result querytResourceList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="type")ResourceType type |
||||
){ |
||||
try{ |
||||
logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type.toString()); |
||||
Map<String, Object> result = resourceService.queryResourceList(loginUser, type); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_RESOURCES_LIST_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_RESOURCES_LIST_ERROR.getCode(), Status.QUERY_RESOURCES_LIST_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query resources list paging |
||||
* |
||||
* @param loginUser |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result querytResourceListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="type")ResourceType type, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageSize") Integer pageSize |
||||
){ |
||||
try{ |
||||
logger.info("query resource list, login user:{}, resource type:{}, search value:{}", |
||||
loginUser.getUserName(), type.toString(), searchVal); |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if(result.get(Constants.STATUS) != Status.SUCCESS){ |
||||
return returnDataListPaging(result); |
||||
} |
||||
|
||||
result = resourceService.queryResourceListPaging(loginUser,type,searchVal,pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_RESOURCES_LIST_PAGING.getMsg(),e); |
||||
return error(Status.QUERY_RESOURCES_LIST_PAGING.getCode(), Status.QUERY_RESOURCES_LIST_PAGING.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* delete resource |
||||
* |
||||
* @param loginUser |
||||
* @param resourceId |
||||
*/ |
||||
@GetMapping(value = "/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result deleteResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="id") int resourceId |
||||
) { |
||||
try{ |
||||
logger.info("login user {}, delete resource id: {}", |
||||
loginUser.getUserName(),resourceId); |
||||
return resourceService.delete(loginUser,resourceId); |
||||
}catch (Exception e){ |
||||
logger.error(DELETE_RESOURCE_ERROR.getMsg(),e); |
||||
return error(Status.DELETE_RESOURCE_ERROR.getCode(), Status.DELETE_RESOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* verify resource by alias and type |
||||
* |
||||
* @param loginUser |
||||
* @param alias |
||||
* @param type |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/verify-name") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyResourceName(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="name") String alias, |
||||
@RequestParam(value ="type")ResourceType type |
||||
) { |
||||
try { |
||||
logger.info("login user {}, verfiy resource alias: {},resource type: {}", |
||||
loginUser.getUserName(), alias); |
||||
|
||||
return resourceService.verifyResourceName(alias, type); |
||||
} catch (Exception e) { |
||||
logger.error(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg(), e); |
||||
return error(Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getCode(), Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* view resource file online |
||||
* |
||||
* @param loginUser |
||||
* @param resourceId |
||||
*/ |
||||
@GetMapping(value = "/view") |
||||
public Result viewResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int resourceId, |
||||
@RequestParam(value = "skipLineNum") int skipLineNum, |
||||
@RequestParam(value = "limit") int limit |
||||
) { |
||||
try{ |
||||
logger.info("login user {}, view resource : {}, skipLineNum {} , limit {}", |
||||
loginUser.getUserName(),resourceId,skipLineNum,limit); |
||||
|
||||
return resourceService.readResource(resourceId,skipLineNum,limit); |
||||
}catch (Exception e){ |
||||
logger.error(VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); |
||||
return error(Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* create resource file online |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @param fileName |
||||
* @param fileSuffix |
||||
* @param desc |
||||
* @param content |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/online-create") |
||||
public Result onlineCreateResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "type") ResourceType type, |
||||
@RequestParam(value ="fileName")String fileName, |
||||
@RequestParam(value ="suffix")String fileSuffix, |
||||
@RequestParam(value = "desc", required = false) String desc, |
||||
@RequestParam(value = "content") String content |
||||
) { |
||||
try{ |
||||
logger.info("login user {}, online create resource! fileName : {}, type : {}, suffix : {},desc : {},content : {}", |
||||
loginUser.getUserName(),type,fileName,fileSuffix,desc,content); |
||||
if(StringUtils.isEmpty(content)){ |
||||
logger.error("resource file contents are not allowed to be empty"); |
||||
return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); |
||||
} |
||||
return resourceService.onlineCreateResource(loginUser,type,fileName,fileSuffix,desc,content); |
||||
}catch (Exception e){ |
||||
logger.error(CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); |
||||
return error(Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* edit resource file online |
||||
* |
||||
* @param loginUser |
||||
* @param resourceId |
||||
*/ |
||||
@PostMapping(value = "/update-content") |
||||
public Result updateResourceContent(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int resourceId, |
||||
@RequestParam(value = "content") String content |
||||
) { |
||||
try{ |
||||
logger.info("login user {}, updateProcessInstance resource : {}", |
||||
loginUser.getUserName(),resourceId); |
||||
if(StringUtils.isEmpty(content)){ |
||||
logger.error("The resource file contents are not allowed to be empty"); |
||||
return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); |
||||
} |
||||
return resourceService.updateResourceContent(resourceId,content); |
||||
}catch (Exception e){ |
||||
logger.error(EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg(),e); |
||||
return error(Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getCode(), Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* download resource file |
||||
* |
||||
* @param loginUser |
||||
* @param resourceId |
||||
*/ |
||||
@GetMapping(value = "/download") |
||||
@ResponseBody |
||||
public ResponseEntity downloadResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int resourceId) { |
||||
try{ |
||||
logger.info("login user {}, download resource : {}", |
||||
loginUser.getUserName(), resourceId); |
||||
Resource file = resourceService.downloadResource(resourceId); |
||||
if (file == null) { |
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg()); |
||||
} |
||||
return ResponseEntity |
||||
.ok() |
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"") |
||||
.body(file); |
||||
}catch (Exception e){ |
||||
logger.error(DOWNLOAD_RESOURCE_FILE_ERROR.getMsg(),e); |
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.DOWNLOAD_RESOURCE_FILE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* create udf function |
||||
* @param loginUser |
||||
* @param type |
||||
* @param funcName |
||||
* @param argTypes |
||||
* @param database |
||||
* @param desc |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/udf-func/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createUdfFunc(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "type") UdfType type, |
||||
@RequestParam(value ="funcName")String funcName, |
||||
@RequestParam(value ="className")String className, |
||||
@RequestParam(value ="argTypes", required = false)String argTypes, |
||||
@RequestParam(value ="database", required = false)String database, |
||||
@RequestParam(value = "desc", required = false) String desc, |
||||
@RequestParam(value = "resourceId") int resourceId) { |
||||
logger.info("login user {}, create udf function, type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", |
||||
loginUser.getUserName(),type, funcName, argTypes,database,desc, resourceId); |
||||
Result result = new Result(); |
||||
|
||||
try { |
||||
return udfFuncService.createUdfFunction(loginUser,funcName,className,argTypes,database,desc,type,resourceId); |
||||
} catch (Exception e) { |
||||
logger.error(CREATE_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.CREATE_UDF_FUNCTION_ERROR.getCode(), Status.CREATE_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* view udf function |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/udf-func/update-ui") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateUIUdfFunction(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("id") int id) |
||||
{ |
||||
Result result = new Result(); |
||||
logger.info("login user {}, query udf{}", |
||||
loginUser.getUserName(), id); |
||||
try { |
||||
Map<String, Object> map = udfFuncService.queryUdfFuncDetail(id); |
||||
return returnDataList(map); |
||||
} catch (Exception e) { |
||||
logger.error(VIEW_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.VIEW_UDF_FUNCTION_ERROR.getCode(), Status.VIEW_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance udf function |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @param funcName |
||||
* @param argTypes |
||||
* @param database |
||||
* @param desc |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/udf-func/update") |
||||
public Result updateUdfFunc(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int udfFuncId, |
||||
@RequestParam(value = "type") UdfType type, |
||||
@RequestParam(value ="funcName")String funcName, |
||||
@RequestParam(value ="className")String className, |
||||
@RequestParam(value ="argTypes", required = false)String argTypes, |
||||
@RequestParam(value ="database", required = false)String database, |
||||
@RequestParam(value = "desc", required = false) String desc, |
||||
@RequestParam(value = "resourceId") int resourceId) { |
||||
try { |
||||
logger.info("login user {}, updateProcessInstance udf function id: {},type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}", |
||||
loginUser.getUserName(),udfFuncId,type, funcName, argTypes,database,desc, resourceId); |
||||
Map<String, Object> result = udfFuncService.updateUdfFunc(udfFuncId,funcName,className,argTypes,database,desc,type,resourceId); |
||||
return returnDataList(result); |
||||
} catch (Exception e) { |
||||
logger.error(UPDATE_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.UPDATE_UDF_FUNCTION_ERROR.getCode(), Status.UPDATE_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query udf function list paging |
||||
* |
||||
* @param loginUser |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/udf-func/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryUdfFuncList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageSize") Integer pageSize |
||||
){ |
||||
try{ |
||||
logger.info("query udf functions list, login user:{},search value:{}", |
||||
loginUser.getUserName(), searchVal); |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if(result.get(Constants.STATUS) != Status.SUCCESS){ |
||||
return returnDataListPaging(result); |
||||
} |
||||
|
||||
result = udfFuncService.queryUdfFuncListPaging(loginUser,searchVal,pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getCode(), Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query data resource by type |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/udf-func/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryResourceList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("type") UdfType type){ |
||||
try{ |
||||
logger.info("query datasource list, user:{}, type:{}", loginUser.getUserName(), type.toString()); |
||||
Map<String, Object> result = udfFuncService.queryResourceList(loginUser,type.ordinal()); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_DATASOURCE_BY_TYPE_ERROR.getCode(),QUERY_DATASOURCE_BY_TYPE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* verify udf function name can use or not |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/udf-func/verify-name") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyUdfFuncName(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="name") String name |
||||
) { |
||||
logger.info("login user {}, verfiy udf function name: {}", |
||||
loginUser.getUserName(),name); |
||||
|
||||
try{ |
||||
|
||||
return udfFuncService.verifyUdfFuncByName(name); |
||||
}catch (Exception e){ |
||||
logger.error(VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg(),e); |
||||
return error(Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getCode(), Status.VERIFY_UDF_FUNCTION_NAME_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* delete udf function |
||||
* |
||||
* @param loginUser |
||||
* @param udfFuncId |
||||
*/ |
||||
@GetMapping(value = "/udf-func/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result deleteUdfFunc(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="id") int udfFuncId |
||||
) { |
||||
try{ |
||||
|
||||
logger.info("login user {}, delete udf function id: {}", loginUser.getUserName(),udfFuncId); |
||||
return udfFuncService.delete(udfFuncId); |
||||
}catch (Exception e){ |
||||
logger.error(DELETE_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.DELETE_UDF_FUNCTION_ERROR.getCode(), Status.DELETE_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* authorized file resource list |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/authed-file") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result authorizedFile(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try{ |
||||
logger.info("authorized file resource, user: {}, user id:{}", loginUser.getUserName(), userId); |
||||
Map<String, Object> result = resourceService.authorizedFile(loginUser, userId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(AUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); |
||||
return error(Status.AUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.AUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* unauthorized file resource list |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/unauth-file") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result unauthorizedFile(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try{ |
||||
logger.info("resource unauthorized file, user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); |
||||
Map<String, Object> result = resourceService.unauthorizedFile(loginUser, userId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg(),e); |
||||
return error(Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getCode(), Status.UNAUTHORIZED_FILE_RESOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* unauthorized udf function |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/unauth-udf-func") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result unauthUDFFunc(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try{ |
||||
logger.info("unauthorized udf function, login user:{}, unauthorized user id:{}", loginUser.getUserName(), userId); |
||||
|
||||
Map<String, Object> result = resourceService.unauthorizedUDFFunction(loginUser, userId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.UNAUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* authorized udf function |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/authed-udf-func") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result authorizedUDFFunction(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("userId") Integer userId) { |
||||
try{ |
||||
logger.info("auth udf function, login user:{}, auth user id:{}", loginUser.getUserName(), userId); |
||||
Map<String, Object> result = resourceService.authorizedUDFFunction(loginUser, userId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(AUTHORIZED_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.AUTHORIZED_UDF_FUNCTION_ERROR.getCode(), Status.AUTHORIZED_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,230 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.SchedulerService; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.FailureStrategy; |
||||
import cn.escheduler.common.enums.Priority; |
||||
import cn.escheduler.common.enums.ReleaseState; |
||||
import cn.escheduler.common.enums.WarningType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
import static cn.escheduler.api.utils.Constants.SESSION_USER; |
||||
|
||||
/** |
||||
* schedule controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/projects/{projectName}/schedule") |
||||
public class SchedulerController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SchedulerController.class); |
||||
public static final String DEFAULT_WARNING_TYPE = "NONE"; |
||||
public static final String DEFAULT_NOTIFY_GROUP_ID = "1"; |
||||
public static final String DEFAULT_MAX_TRY_TIMES = "0"; |
||||
public static final String DEFAULT_FAILURE_POLICY = "CONTINUE"; |
||||
|
||||
|
||||
@Autowired |
||||
private SchedulerService schedulerService; |
||||
|
||||
|
||||
/** |
||||
* create schedule |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefinitionId |
||||
* @param schedule |
||||
* @param warningType |
||||
* @param warningGroupId |
||||
* @param failureStrategy |
||||
* @return |
||||
*/ |
||||
@PostMapping("/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createSchedule(@RequestAttribute(value = SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "processDefinitionId") Integer processDefinitionId, |
||||
@RequestParam(value = "schedule") String schedule, |
||||
@RequestParam(value = "warningType", required = false,defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, |
||||
@RequestParam(value = "warningGroupId", required = false,defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId, |
||||
@RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy, |
||||
@RequestParam(value = "receivers", required = false) String receivers, |
||||
@RequestParam(value = "receiversCc", required = false) String receiversCc, |
||||
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { |
||||
logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," + |
||||
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}", |
||||
loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId, failureStrategy,receivers,receiversCc,processInstancePriority); |
||||
try { |
||||
Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule, |
||||
warningType, warningGroupId, failureStrategy, receivers,receiversCc,processInstancePriority); |
||||
|
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(CREATE_SCHEDULE_ERROR.getMsg(),e); |
||||
return error(CREATE_SCHEDULE_ERROR.getCode(), CREATE_SCHEDULE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance schedule |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @param schedule |
||||
* @param warningType |
||||
* @param warningGroupId |
||||
* @param failureStrategy |
||||
* @return |
||||
*/ |
||||
@PostMapping("/update") |
||||
public Result updateSchedule(@RequestAttribute(value = SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "id") Integer id, |
||||
@RequestParam(value = "schedule") String schedule, |
||||
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType, |
||||
@RequestParam(value = "warningGroupId", required = false) int warningGroupId, |
||||
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy, |
||||
@RequestParam(value = "receivers", required = false) String receivers, |
||||
@RequestParam(value = "receiversCc", required = false) String receiversCc, |
||||
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) { |
||||
logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " + |
||||
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}", |
||||
loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy,receivers,receiversCc,processInstancePriority); |
||||
|
||||
try { |
||||
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule, |
||||
warningType, warningGroupId, failureStrategy, receivers,receiversCc,null,processInstancePriority); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(UPDATE_SCHEDULE_ERROR.getMsg(),e); |
||||
return error(Status.UPDATE_SCHEDULE_ERROR.getCode(), Status.UPDATE_SCHEDULE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* publish schedule setScheduleState |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @return |
||||
* @throws Exception |
||||
*/ |
||||
@PostMapping("/online") |
||||
public Result online(@RequestAttribute(value = SESSION_USER) User loginUser, |
||||
@PathVariable("projectName") String projectName, |
||||
@RequestParam("id") Integer id) { |
||||
logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}", |
||||
loginUser.getUserName(), projectName, id); |
||||
try { |
||||
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg(),e); |
||||
return error(Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getCode(), Status.PUBLISH_SCHEDULE_ONLINE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* offline schedule |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@PostMapping("/offline") |
||||
public Result offline(@RequestAttribute(value = SESSION_USER) User loginUser, |
||||
@PathVariable("projectName") String projectName, |
||||
@RequestParam("id") Integer id) { |
||||
logger.info("login user {}, schedule offline, project name: {}, process definition id: {}", |
||||
loginUser.getUserName(), projectName, id); |
||||
|
||||
try { |
||||
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(OFFLINE_SCHEDULE_ERROR.getMsg(),e); |
||||
return error(Status.OFFLINE_SCHEDULE_ERROR.getCode(), Status.OFFLINE_SCHEDULE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query schedule list paging |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefinitionId |
||||
* @return |
||||
*/ |
||||
@GetMapping("/list-paging") |
||||
public Result querySchedule(@RequestAttribute(value = SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam Integer processDefinitionId, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam("pageSize") Integer pageSize) { |
||||
logger.info("login user {}, query schedule, project name: {}, process definition id: {}", |
||||
loginUser.getUserName(), projectName, processDefinitionId); |
||||
try { |
||||
Map<String, Object> result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query schedule list |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @return |
||||
*/ |
||||
@PostMapping("/list") |
||||
public Result queryScheduleList(@RequestAttribute(value = SESSION_USER) User loginUser, |
||||
@PathVariable String projectName) { |
||||
try{ |
||||
logger.info("login user {}, query schedule list, project name: {}", |
||||
loginUser.getUserName(), projectName); |
||||
Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_SCHEDULE_LIST_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_SCHEDULE_LIST_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_ERROR.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,85 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.service.ServerService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.LIST_MASTERS_ERROR; |
||||
import static cn.escheduler.api.enums.Status.LIST_WORKERS_ERROR; |
||||
|
||||
/** |
||||
* server controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("process") |
||||
public class ServerController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorController.class); |
||||
|
||||
@Autowired |
||||
private ServerService serverService; |
||||
|
||||
/** |
||||
* master list |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/master/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result listMaster(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) { |
||||
logger.info("login user: {}, query all master", loginUser.getUserName()); |
||||
try{ |
||||
logger.info("list master, user:{}", loginUser.getUserName()); |
||||
Map<String, Object> result = serverService.queryMaster(loginUser); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(LIST_MASTERS_ERROR.getMsg(),e); |
||||
return error(LIST_MASTERS_ERROR.getCode(), |
||||
LIST_MASTERS_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* worker list |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/worker/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result listWorker(@RequestAttribute(value = Constants.SESSION_USER) User loginUser) { |
||||
logger.info("login user: {}, query all workers", loginUser.getUserName()); |
||||
try{ |
||||
Map<String, Object> result = serverService.queryWorker(loginUser); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(LIST_WORKERS_ERROR.getMsg(),e); |
||||
return error(LIST_WORKERS_ERROR.getCode(), |
||||
LIST_WORKERS_ERROR.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,81 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.service.TaskInstanceService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.QUERY_TASK_LIST_PAGING_ERROR; |
||||
|
||||
/** |
||||
* task instance controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/projects/{projectName}/task-instance") |
||||
public class TaskInstanceController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TaskInstanceController.class); |
||||
|
||||
@Autowired |
||||
TaskInstanceService taskInstanceService; |
||||
|
||||
|
||||
/** |
||||
* query task list paging |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping("/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryTaskListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@PathVariable String projectName, |
||||
@RequestParam(value = "processInstanceId", required = false, defaultValue = "0") Integer processInstanceId, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam(value = "taskName", required = false) String taskName, |
||||
@RequestParam(value = "stateType", required = false) ExecutionStatus stateType, |
||||
@RequestParam(value = "host", required = false) String host, |
||||
@RequestParam(value = "startDate", required = false) String startTime, |
||||
@RequestParam(value = "endDate", required = false) String endTime, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam("pageSize") Integer pageSize){ |
||||
|
||||
try{ |
||||
logger.info("query task instance list, project name:{},process instance:{}, search value:{},task name:{}, state type:{}, host:{}, start:{}, end:{}", |
||||
projectName, processInstanceId, searchVal, taskName, stateType, host, startTime, endTime); |
||||
Map<String, Object> result = taskInstanceService.queryTaskListPaging( |
||||
loginUser, projectName, processInstanceId, taskName, startTime, endTime, searchVal, stateType, host, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_TASK_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(QUERY_TASK_LIST_PAGING_ERROR.getCode(), QUERY_TASK_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
|
||||
} |
||||
|
||||
} |
@ -0,0 +1,80 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.service.TaskRecordService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.QUERY_TASK_RECORD_LIST_PAGING_ERROR; |
||||
|
||||
/** |
||||
* task record controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/projects/task-record") |
||||
public class TaskRecordController extends BaseController{ |
||||
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TaskRecordController.class); |
||||
|
||||
|
||||
@Autowired |
||||
TaskRecordService taskRecordService; |
||||
|
||||
/** |
||||
* query task record list paging |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping("/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryTaskRecordListPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "taskName", required = false) String taskName, |
||||
@RequestParam(value = "state", required = false) String state, |
||||
@RequestParam(value = "sourceTable", required = false) String sourceTable, |
||||
@RequestParam(value = "destTable", required = false) String destTable, |
||||
@RequestParam(value = "taskDate", required = false) String taskDate, |
||||
@RequestParam(value = "startDate", required = false) String startTime, |
||||
@RequestParam(value = "endDate", required = false) String endTime, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam("pageSize") Integer pageSize |
||||
){ |
||||
|
||||
try{ |
||||
logger.info("query task record list, task name:{}, state :{}, taskDate: {}, start:{}, end:{}", |
||||
taskName, state, taskDate, startTime, endTime); |
||||
Map<String, Object> result = taskRecordService.queryTaskRecordListPaging(taskName, startTime, taskDate, sourceTable, destTable, endTime,state, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(QUERY_TASK_RECORD_LIST_PAGING_ERROR.getCode(), QUERY_TASK_RECORD_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
|
||||
} |
||||
|
||||
} |
@ -0,0 +1,206 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.TenantService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
|
||||
/** |
||||
* tenant controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/tenant") |
||||
public class TenantController extends BaseController{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TenantController.class); |
||||
|
||||
|
||||
@Autowired |
||||
private TenantService tenantService; |
||||
|
||||
/** |
||||
* create tenant |
||||
* |
||||
* @param loginUser |
||||
* @param tenantCode |
||||
* @param tenantName |
||||
* @param queueId |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createTenant(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "tenantCode") String tenantCode, |
||||
@RequestParam(value = "tenantName") String tenantName, |
||||
@RequestParam(value = "queueId") int queueId, |
||||
@RequestParam(value = "desc",required = false) String desc) { |
||||
logger.info("login user {}, create tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", |
||||
loginUser.getUserName(), tenantCode, tenantName, queueId,desc); |
||||
try { |
||||
Map<String, Object> result = tenantService.createTenant(loginUser,tenantCode,tenantName,queueId,desc); |
||||
return returnDataList(result); |
||||
|
||||
}catch (Exception e){ |
||||
logger.error(CREATE_TENANT_ERROR.getMsg(),e); |
||||
return error(CREATE_TENANT_ERROR.getCode(), CREATE_TENANT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query tenant list paging |
||||
* |
||||
* @param loginUser |
||||
* @param pageNo |
||||
* @param searchVal |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryTenantlistPaging(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageSize") Integer pageSize){ |
||||
logger.info("login user {}, list paging, pageNo: {}, searchVal: {}, pageSize: {}", |
||||
loginUser.getUserName(),pageNo,searchVal,pageSize); |
||||
try{ |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if(result.get(Constants.STATUS) != Status.SUCCESS){ |
||||
return returnDataListPaging(result); |
||||
} |
||||
result = tenantService.queryTenantList(loginUser, searchVal, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_TENANT_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_TENANT_LIST_PAGING_ERROR.getCode(), Status.QUERY_TENANT_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* tenant list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryTenantlist(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ |
||||
logger.info("login user {}, query tenant list"); |
||||
try{ |
||||
Map<String, Object> result = tenantService.queryTenantList(loginUser); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_TENANT_LIST_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_TENANT_LIST_ERROR.getCode(), Status.QUERY_TENANT_LIST_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* udpate tenant |
||||
* |
||||
* @param loginUser |
||||
* @param tenantCode |
||||
* @param tenantName |
||||
* @param queueId |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateTenant(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int id, |
||||
@RequestParam(value = "tenantCode") String tenantCode, |
||||
@RequestParam(value = "tenantName") String tenantName, |
||||
@RequestParam(value = "queueId") int queueId, |
||||
@RequestParam(value = "desc",required = false) String desc) { |
||||
logger.info("login user {}, updateProcessInstance tenant, tenantCode: {}, tenantName: {}, queueId: {}, desc: {}", |
||||
loginUser.getUserName(), tenantCode, tenantName, queueId,desc); |
||||
try { |
||||
Map<String, Object> result = tenantService.updateTenant(loginUser,id,tenantCode, tenantName, queueId, desc); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UPDATE_TENANT_ERROR.getMsg(),e); |
||||
return error(Status.UPDATE_TENANT_ERROR.getCode(),UPDATE_TENANT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* delete tenant by id |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result deleteTenantById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int id) { |
||||
logger.info("login user {}, delete tenant, tenantCode: {},", loginUser.getUserName(), id); |
||||
try { |
||||
Map<String, Object> result = tenantService.deleteTenantById(loginUser,id); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(DELETE_TENANT_BY_ID_ERROR.getMsg(),e); |
||||
return error(Status.DELETE_TENANT_BY_ID_ERROR.getCode(), Status.DELETE_TENANT_BY_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* verify tenant code |
||||
* |
||||
* @param loginUser |
||||
* @param tenantCode |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/verify-tenant-code") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyTenantCode(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="tenantCode") String tenantCode |
||||
) { |
||||
|
||||
try{ |
||||
logger.info("login user {}, verfiy tenant code: {}", |
||||
loginUser.getUserName(),tenantCode); |
||||
return tenantService.verifyTenantCode(tenantCode); |
||||
}catch (Exception e){ |
||||
logger.error(VERIFY_TENANT_CODE_ERROR.getMsg(),e); |
||||
return error(Status.VERIFY_TENANT_CODE_ERROR.getCode(), Status.VERIFY_TENANT_CODE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,364 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.service.UsersService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.http.HttpStatus; |
||||
import org.springframework.web.bind.annotation.*; |
||||
|
||||
import java.util.Map; |
||||
|
||||
import static cn.escheduler.api.enums.Status.*; |
||||
|
||||
|
||||
/** |
||||
* user controller |
||||
*/ |
||||
@RestController |
||||
@RequestMapping("/users") |
||||
public class UsersController extends BaseController{ |
||||
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(UsersController.class); |
||||
|
||||
|
||||
@Autowired |
||||
private UsersService usersService; |
||||
|
||||
/** |
||||
* create user |
||||
* |
||||
* @param loginUser |
||||
* @param userName |
||||
* @param userPassword |
||||
* @param email |
||||
* @param tenantId |
||||
* @param phone |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/create") |
||||
@ResponseStatus(HttpStatus.CREATED) |
||||
public Result createUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "userName") String userName, |
||||
@RequestParam(value = "userPassword") String userPassword, |
||||
@RequestParam(value = "tenantId") int tenantId, |
||||
@RequestParam(value = "email") String email, |
||||
@RequestParam(value = "phone", required = false) String phone) { |
||||
logger.info("login user {}, create user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, proxyUsers: {}", |
||||
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone); |
||||
|
||||
try { |
||||
Map<String, Object> result = usersService.createUser(loginUser, userName, userPassword, email, tenantId, phone); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(CREATE_USER_ERROR.getMsg(),e); |
||||
return error(CREATE_USER_ERROR.getCode(), CREATE_USER_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* query user list paging |
||||
* |
||||
* @param loginUser |
||||
* @param pageNo |
||||
* @param searchVal |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list-paging") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result queryUserList(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("pageNo") Integer pageNo, |
||||
@RequestParam(value = "searchVal", required = false) String searchVal, |
||||
@RequestParam("pageSize") Integer pageSize){ |
||||
logger.info("login user {}, list user paging, pageNo: {}, searchVal: {}, pageSize: {}", |
||||
loginUser.getUserName(),pageNo,searchVal,pageSize); |
||||
try{ |
||||
Map<String, Object> result = checkPageParams(pageNo, pageSize); |
||||
if(result.get(Constants.STATUS) != Status.SUCCESS){ |
||||
return returnDataListPaging(result); |
||||
} |
||||
result = usersService.queryUserList(loginUser, searchVal, pageNo, pageSize); |
||||
return returnDataListPaging(result); |
||||
}catch (Exception e){ |
||||
logger.error(QUERY_USER_LIST_PAGING_ERROR.getMsg(),e); |
||||
return error(Status.QUERY_USER_LIST_PAGING_ERROR.getCode(), Status.QUERY_USER_LIST_PAGING_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* updateProcessInstance user |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @param userName |
||||
* @param userPassword |
||||
* @param email |
||||
* @param tenantId |
||||
* @param phone |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/update") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result updateUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int id, |
||||
@RequestParam(value = "userName") String userName, |
||||
@RequestParam(value = "userPassword") String userPassword, |
||||
@RequestParam(value = "email") String email, |
||||
@RequestParam(value = "tenantId") int tenantId, |
||||
@RequestParam(value = "phone", required = false) String phone) { |
||||
logger.info("login user {}, updateProcessInstance user, userName: {}, email: {}, tenantId: {}, userPassword: {}, phone: {}, proxyUsers: {}", |
||||
loginUser.getUserName(), userName, email, tenantId, Constants.PASSWORD_DEFAULT, phone); |
||||
try { |
||||
Map<String, Object> result = usersService.updateUser(id,userName,userPassword,email,tenantId,phone); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UPDATE_USER_ERROR.getMsg(),e); |
||||
return error(Status.UPDATE_USER_ERROR.getCode(), Status.UPDATE_USER_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* delete user by id |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/delete") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result delUserById(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "id") int id) { |
||||
logger.info("login user {}, delete user, userId: {},", loginUser.getUserName(), id); |
||||
try { |
||||
Map<String, Object> result = usersService.deleteUserById(loginUser, id); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(DELETE_USER_BY_ID_ERROR.getMsg(),e); |
||||
return error(Status.DELETE_USER_BY_ID_ERROR.getCode(), Status.DELETE_USER_BY_ID_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* grant project |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/grant-project") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result grantProject(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "userId") int userId, |
||||
@RequestParam(value = "projectIds") String projectIds) { |
||||
logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(), userId,projectIds); |
||||
try { |
||||
Map<String, Object> result = usersService.grantProject(loginUser, userId, projectIds); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GRANT_PROJECT_ERROR.getMsg(),e); |
||||
return error(Status.GRANT_PROJECT_ERROR.getCode(), Status.GRANT_PROJECT_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* grant resource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/grant-file") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result grantResource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "userId") int userId, |
||||
@RequestParam(value = "resourceIds") String resourceIds) { |
||||
logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,resourceIds); |
||||
try { |
||||
Map<String, Object> result = usersService.grantResources(loginUser, userId, resourceIds); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GRANT_RESOURCE_ERROR.getMsg(),e); |
||||
return error(Status.GRANT_RESOURCE_ERROR.getCode(), Status.GRANT_RESOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* grant udf function |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/grant-udf-func") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result grantUDFFunc(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "userId") int userId, |
||||
@RequestParam(value = "udfIds") String udfIds) { |
||||
logger.info("login user {}, grant project, userId: {},resourceIds : {}", loginUser.getUserName(), userId,udfIds); |
||||
try { |
||||
Map<String, Object> result = usersService.grantUDFFunction(loginUser, userId, udfIds); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GRANT_UDF_FUNCTION_ERROR.getMsg(),e); |
||||
return error(Status.GRANT_UDF_FUNCTION_ERROR.getCode(), Status.GRANT_UDF_FUNCTION_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* grant datasource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
@PostMapping(value = "/grant-datasource") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result grantDataSource(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value = "userId") int userId, |
||||
@RequestParam(value = "datasourceIds") String datasourceIds) { |
||||
logger.info("login user {}, grant project, userId: {},projectIds : {}", loginUser.getUserName(),userId,datasourceIds); |
||||
try { |
||||
Map<String, Object> result = usersService.grantDataSource(loginUser, userId, datasourceIds); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GRANT_DATASOURCE_ERROR.getMsg(),e); |
||||
return error(Status.GRANT_DATASOURCE_ERROR.getCode(), Status.GRANT_DATASOURCE_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* get user info |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/get-user-info") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result getUserInfo(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ |
||||
logger.info("login user {},get user info : {}", loginUser.getUserName()); |
||||
try{ |
||||
Map<String, Object> result = usersService.getUserInfo(loginUser); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(GET_USER_INFO_ERROR.getMsg(),e); |
||||
return error(Status.GET_USER_INFO_ERROR.getCode(), Status.GET_USER_INFO_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* user list no paging |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
@GetMapping(value="/list") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result listUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser){ |
||||
logger.info("login user {}, user list"); |
||||
try{ |
||||
Map<String, Object> result = usersService.queryUserList(loginUser); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(USER_LIST_ERROR.getMsg(),e); |
||||
return error(Status.USER_LIST_ERROR.getCode(), Status.USER_LIST_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* verify username |
||||
* |
||||
* @param loginUser |
||||
* @param userName |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/verify-user-name") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result verifyUserName(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam(value ="userName") String userName |
||||
) { |
||||
try{ |
||||
|
||||
logger.info("login user {}, verfiy user name: {}", |
||||
loginUser.getUserName(),userName); |
||||
return usersService.verifyUserName(userName); |
||||
}catch (Exception e){ |
||||
logger.error(VERIFY_USERNAME_ERROR.getMsg(),e); |
||||
return error(Status.VERIFY_USERNAME_ERROR.getCode(), Status.VERIFY_USERNAME_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* unauthorized user |
||||
* |
||||
* @param loginUser |
||||
* @param alertgroupId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/unauth-user") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result unauthorizedUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("alertgroupId") Integer alertgroupId) { |
||||
try{ |
||||
logger.info("unauthorized user, login user:{}, alert group id:{}", |
||||
loginUser.getUserName(), alertgroupId); |
||||
Map<String, Object> result = usersService.unauthorizedUser(loginUser, alertgroupId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(UNAUTHORIZED_USER_ERROR.getMsg(),e); |
||||
return error(Status.UNAUTHORIZED_USER_ERROR.getCode(), Status.UNAUTHORIZED_USER_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* authorized user |
||||
* |
||||
* @param loginUser |
||||
* @param alertgroupId |
||||
* @return |
||||
*/ |
||||
@GetMapping(value = "/authed-user") |
||||
@ResponseStatus(HttpStatus.OK) |
||||
public Result authorizedUser(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, |
||||
@RequestParam("alertgroupId") Integer alertgroupId) { |
||||
try{ |
||||
logger.info("authorized user , login user:{}, alert group id:{}", |
||||
loginUser.getUserName(), alertgroupId); |
||||
Map<String, Object> result = usersService.authorizedUser(loginUser, alertgroupId); |
||||
return returnDataList(result); |
||||
}catch (Exception e){ |
||||
logger.error(AUTHORIZED_USER_ERROR.getMsg(),e); |
||||
return error(Status.AUTHORIZED_USER_ERROR.getCode(), Status.AUTHORIZED_USER_ERROR.getMsg()); |
||||
} |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,55 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto; |
||||
|
||||
import cn.escheduler.dao.model.DefinitionGroupByUser; |
||||
|
||||
import java.util.List; |
||||
|
||||
/** |
||||
* |
||||
*/ |
||||
public class DefineUserDto { |
||||
|
||||
private int count; |
||||
|
||||
private List<DefinitionGroupByUser> userList; |
||||
|
||||
public DefineUserDto(List<DefinitionGroupByUser> defineGroupByUsers) { |
||||
|
||||
for(DefinitionGroupByUser define : defineGroupByUsers){ |
||||
count += define.getCount(); |
||||
} |
||||
this.userList = defineGroupByUsers; |
||||
} |
||||
|
||||
public int getCount() { |
||||
return count; |
||||
} |
||||
|
||||
public void setCount(int count) { |
||||
this.count = count; |
||||
} |
||||
|
||||
public List<DefinitionGroupByUser> getUserList() { |
||||
return userList; |
||||
} |
||||
|
||||
public void setUserList(List<DefinitionGroupByUser> userList) { |
||||
this.userList = userList; |
||||
} |
||||
} |
@ -0,0 +1,72 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto; |
||||
|
||||
import java.util.Date; |
||||
|
||||
/** |
||||
* schedule parameters |
||||
* 调度参数 |
||||
*/ |
||||
public class ScheduleParam { |
||||
private Date startTime; |
||||
private Date endTime; |
||||
private String crontab; |
||||
|
||||
public ScheduleParam() { |
||||
} |
||||
|
||||
public ScheduleParam(Date startTime, Date endTime, String crontab) { |
||||
this.startTime = startTime; |
||||
this.endTime = endTime; |
||||
this.crontab = crontab; |
||||
} |
||||
|
||||
public Date getStartTime() { |
||||
return startTime; |
||||
} |
||||
|
||||
public void setStartTime(Date startTime) { |
||||
this.startTime = startTime; |
||||
} |
||||
|
||||
public Date getEndTime() { |
||||
return endTime; |
||||
} |
||||
|
||||
public void setEndTime(Date endTime) { |
||||
this.endTime = endTime; |
||||
} |
||||
|
||||
public String getCrontab() { |
||||
return crontab; |
||||
} |
||||
|
||||
public void setCrontab(String crontab) { |
||||
this.crontab = crontab; |
||||
} |
||||
|
||||
|
||||
@Override |
||||
public String toString() { |
||||
return "ScheduleParam{" + |
||||
"startTime=" + startTime + |
||||
", endTime=" + endTime + |
||||
", crontab='" + crontab + '\'' + |
||||
'}'; |
||||
} |
||||
} |
@ -0,0 +1,135 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto; |
||||
|
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
import cn.escheduler.dao.model.ExecuteStatusCount; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* task count dto |
||||
*/ |
||||
public class TaskCountDto { |
||||
|
||||
/** |
||||
* total count |
||||
*/ |
||||
private int totalCount; |
||||
|
||||
/** |
||||
* |
||||
*/ |
||||
private List<TaskStateCount> taskCountDtos; |
||||
|
||||
|
||||
public TaskCountDto(List<ExecuteStatusCount> taskInstanceStateCounts) { |
||||
countTaskDtos(taskInstanceStateCounts); |
||||
} |
||||
|
||||
private void countTaskDtos(List<ExecuteStatusCount> taskInstanceStateCounts){ |
||||
int submitted_success = 0; |
||||
int running_exeution = 0; |
||||
int ready_pause = 0; |
||||
int pause = 0; |
||||
int ready_stop = 0; |
||||
int stop = 0; |
||||
int failure = 0; |
||||
int success = 0; |
||||
int need_fault_tolerance = 0; |
||||
int kill = 0; |
||||
int waitting_thread = 0; |
||||
int waitting_depend = 0; |
||||
|
||||
for(ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts){ |
||||
ExecutionStatus status = taskInstanceStateCount.getExecutionStatus(); |
||||
totalCount += taskInstanceStateCount.getCount(); |
||||
switch (status){ |
||||
case SUBMITTED_SUCCESS: |
||||
submitted_success += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case RUNNING_EXEUTION: |
||||
running_exeution += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case READY_PAUSE: |
||||
ready_pause += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case PAUSE: |
||||
pause += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case READY_STOP: |
||||
ready_stop += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case STOP: |
||||
stop += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case FAILURE: |
||||
failure += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case SUCCESS: |
||||
success += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case NEED_FAULT_TOLERANCE: |
||||
failure += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case KILL: |
||||
kill += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case WAITTING_THREAD: |
||||
kill += taskInstanceStateCount.getCount(); |
||||
break; |
||||
case WAITTING_DEPEND: |
||||
kill += taskInstanceStateCount.getCount(); |
||||
break; |
||||
|
||||
default: |
||||
break; |
||||
} |
||||
} |
||||
this.taskCountDtos = new ArrayList<>(); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submitted_success)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXEUTION, running_exeution)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, ready_pause)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, ready_stop)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.STOP, stop)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.FAILURE, failure)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUCCESS, success)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.NEED_FAULT_TOLERANCE, need_fault_tolerance)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.KILL, kill)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waitting_thread)); |
||||
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_DEPEND, waitting_depend)); |
||||
} |
||||
|
||||
|
||||
public List<TaskStateCount> getTaskCountDtos(){ |
||||
return taskCountDtos; |
||||
} |
||||
|
||||
public void setTaskCountDtos(List<TaskStateCount> taskCountDtos) { |
||||
this.taskCountDtos = taskCountDtos; |
||||
} |
||||
|
||||
public int getTotalCount() { |
||||
return totalCount; |
||||
} |
||||
|
||||
public void setTotalCount(int totalCount) { |
||||
this.totalCount = totalCount; |
||||
} |
||||
} |
@ -0,0 +1,50 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto; |
||||
|
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
|
||||
/** |
||||
* task state count |
||||
*/ |
||||
public class TaskStateCount { |
||||
|
||||
private int count; |
||||
private ExecutionStatus taskStateType; |
||||
|
||||
public TaskStateCount(ExecutionStatus taskStateType, int count) { |
||||
this.taskStateType = taskStateType; |
||||
this.count = count; |
||||
} |
||||
|
||||
|
||||
public int getCount() { |
||||
return count; |
||||
} |
||||
|
||||
public void setCount(int count) { |
||||
this.count = count; |
||||
} |
||||
|
||||
public ExecutionStatus getTaskStateType() { |
||||
return taskStateType; |
||||
} |
||||
|
||||
public void setTaskStateType(ExecutionStatus taskStateType) { |
||||
this.taskStateType = taskStateType; |
||||
} |
||||
} |
@ -0,0 +1,103 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto.gantt; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* gantt DTO |
||||
* 甘特图 DTO |
||||
*/ |
||||
public class GanttDto { |
||||
|
||||
/** |
||||
* height |
||||
* 高度 |
||||
*/ |
||||
private int height; |
||||
|
||||
/** |
||||
* tasks list |
||||
* 任务集合 |
||||
*/ |
||||
private List<Task> tasks = new ArrayList<>(); |
||||
|
||||
/** |
||||
* task name list |
||||
* 任务名称 |
||||
*/ |
||||
private List<String> taskNames; |
||||
|
||||
/** |
||||
* task status map |
||||
* 任务状态 |
||||
*/ |
||||
private Map<String,String> taskStatus; |
||||
|
||||
|
||||
public GanttDto(){ |
||||
this.taskStatus = new HashMap<>(); |
||||
taskStatus.put("success","success"); |
||||
} |
||||
public GanttDto(int height, List<Task> tasks, List<String> taskNames){ |
||||
this(); |
||||
this.height = height; |
||||
this.tasks = tasks; |
||||
this.taskNames = taskNames;; |
||||
} |
||||
public GanttDto(int height, List<Task> tasks, List<String> taskNames, Map<String, String> taskStatus) { |
||||
this.height = height; |
||||
this.tasks = tasks; |
||||
this.taskNames = taskNames; |
||||
this.taskStatus = taskStatus; |
||||
} |
||||
|
||||
public int getHeight() { |
||||
return height; |
||||
} |
||||
|
||||
public void setHeight(int height) { |
||||
this.height = height; |
||||
} |
||||
|
||||
public List<Task> getTasks() { |
||||
return tasks; |
||||
} |
||||
|
||||
public void setTasks(List<Task> tasks) { |
||||
this.tasks = tasks; |
||||
} |
||||
|
||||
public List<String> getTaskNames() { |
||||
return taskNames; |
||||
} |
||||
|
||||
public void setTaskNames(List<String> taskNames) { |
||||
this.taskNames = taskNames; |
||||
} |
||||
|
||||
public Map<String, String> getTaskStatus() { |
||||
return taskStatus; |
||||
} |
||||
|
||||
public void setTaskStatus(Map<String, String> taskStatus) { |
||||
this.taskStatus = taskStatus; |
||||
} |
||||
} |
@ -0,0 +1,138 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto.gantt; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* Task |
||||
* 任务 |
||||
*/ |
||||
public class Task { |
||||
/** |
||||
* task name |
||||
* 任务名称 |
||||
*/ |
||||
private String taskName; |
||||
|
||||
/** |
||||
* task start date |
||||
* 任务开始时间 |
||||
*/ |
||||
private List<Long> startDate = new ArrayList<>(); |
||||
/** |
||||
* task end date |
||||
* 任务结束时间 |
||||
*/ |
||||
private List<Long> endDate = new ArrayList<>(); |
||||
|
||||
/** |
||||
* task execution date |
||||
* 任务执行时间 |
||||
*/ |
||||
private Date executionDate; |
||||
|
||||
/** |
||||
* task iso start |
||||
* 任务开始时间 |
||||
*/ |
||||
private Date isoStart; |
||||
|
||||
/** |
||||
* task iso end |
||||
* 任务结束时间 |
||||
*/ |
||||
private Date isoEnd; |
||||
|
||||
/** |
||||
* task status |
||||
* 执行状态 |
||||
*/ |
||||
private String status; |
||||
|
||||
/** |
||||
* task duration |
||||
* 运行时长 |
||||
*/ |
||||
private String duration; |
||||
|
||||
public String getTaskName() { |
||||
return taskName; |
||||
} |
||||
|
||||
public void setTaskName(String taskName) { |
||||
this.taskName = taskName; |
||||
} |
||||
|
||||
public List<Long> getStartDate() { |
||||
return startDate; |
||||
} |
||||
|
||||
public void setStartDate(List<Long> startDate) { |
||||
this.startDate = startDate; |
||||
} |
||||
|
||||
public List<Long> getEndDate() { |
||||
return endDate; |
||||
} |
||||
|
||||
public void setEndDate(List<Long> endDate) { |
||||
this.endDate = endDate; |
||||
} |
||||
|
||||
public Date getExecutionDate() { |
||||
return executionDate; |
||||
} |
||||
|
||||
public void setExecutionDate(Date executionDate) { |
||||
this.executionDate = executionDate; |
||||
} |
||||
|
||||
public Date getIsoStart() { |
||||
return isoStart; |
||||
} |
||||
|
||||
public void setIsoStart(Date isoStart) { |
||||
this.isoStart = isoStart; |
||||
} |
||||
|
||||
public Date getIsoEnd() { |
||||
return isoEnd; |
||||
} |
||||
|
||||
public void setIsoEnd(Date isoEnd) { |
||||
this.isoEnd = isoEnd; |
||||
} |
||||
|
||||
public String getStatus() { |
||||
return status; |
||||
} |
||||
|
||||
public void setStatus(String status) { |
||||
this.status = status; |
||||
} |
||||
|
||||
public String getDuration() { |
||||
return duration; |
||||
} |
||||
|
||||
public void setDuration(String duration) { |
||||
this.duration = duration; |
||||
} |
||||
} |
@ -0,0 +1,171 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto.treeview; |
||||
|
||||
import java.util.Date; |
||||
|
||||
/** |
||||
* Instance |
||||
*/ |
||||
public class Instance { |
||||
|
||||
private int id; |
||||
/** |
||||
* node name |
||||
* 节点名称 |
||||
*/ |
||||
private String name; |
||||
|
||||
/** |
||||
* node type |
||||
* 节点类型 |
||||
*/ |
||||
private String type; |
||||
|
||||
/** |
||||
* node status |
||||
* 状态 |
||||
*/ |
||||
private String state; |
||||
|
||||
/** |
||||
* node start time |
||||
* 开始时间 |
||||
*/ |
||||
private Date startTime; |
||||
|
||||
/** |
||||
* node end time |
||||
* 结束时间 |
||||
*/ |
||||
private Date endTime; |
||||
|
||||
|
||||
|
||||
/** |
||||
* node running on which host |
||||
* 运行机器 |
||||
*/ |
||||
private String host; |
||||
|
||||
/** |
||||
* node duration |
||||
* 运行时长 |
||||
*/ |
||||
private String duration; |
||||
|
||||
private int subflowId; |
||||
|
||||
|
||||
public Instance(){} |
||||
|
||||
public Instance(int id,String name, String type){ |
||||
this.id = id; |
||||
this.name = name; |
||||
this.type = type; |
||||
} |
||||
|
||||
public Instance(int id,String name, String type,String state,Date startTime, Date endTime, String host, String duration,int subflowId) { |
||||
this.id = id; |
||||
this.name = name; |
||||
this.type = type; |
||||
this.state = state; |
||||
this.startTime = startTime; |
||||
this.endTime = endTime; |
||||
this.host = host; |
||||
this.duration = duration; |
||||
this.subflowId = subflowId; |
||||
} |
||||
|
||||
public Instance(int id,String name, String type,String state,Date startTime, Date endTime, String host, String duration) { |
||||
this(id, name, type, state, startTime, endTime,host,duration,0); |
||||
} |
||||
|
||||
|
||||
|
||||
public int getId() { |
||||
return id; |
||||
} |
||||
|
||||
public void setId(int id) { |
||||
this.id = id; |
||||
} |
||||
|
||||
public String getName() { |
||||
return name; |
||||
} |
||||
|
||||
public void setName(String name) { |
||||
this.name = name; |
||||
} |
||||
|
||||
public String getType() { |
||||
return type; |
||||
} |
||||
|
||||
public void setType(String type) { |
||||
this.type = type; |
||||
} |
||||
|
||||
public String getState() { |
||||
return state; |
||||
} |
||||
|
||||
public void setState(String state) { |
||||
this.state = state; |
||||
} |
||||
|
||||
public Date getStartTime() { |
||||
return startTime; |
||||
} |
||||
|
||||
public void setStartTime(Date startTime) { |
||||
this.startTime = startTime; |
||||
} |
||||
|
||||
public Date getEndTime() { |
||||
return endTime; |
||||
} |
||||
|
||||
public void setEndTime(Date endTime) { |
||||
this.endTime = endTime; |
||||
} |
||||
|
||||
public String getHost() { |
||||
return host; |
||||
} |
||||
|
||||
public void setHost(String host) { |
||||
this.host = host; |
||||
} |
||||
|
||||
public String getDuration() { |
||||
return duration; |
||||
} |
||||
|
||||
public void setDuration(String duration) { |
||||
this.duration = duration; |
||||
} |
||||
|
||||
public int getSubflowId() { |
||||
return subflowId; |
||||
} |
||||
|
||||
public void setSubflowId(int subflowId) { |
||||
this.subflowId = subflowId; |
||||
} |
||||
} |
@ -0,0 +1,84 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.dto.treeview; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* TreeView |
||||
*/ |
||||
public class TreeViewDto { |
||||
|
||||
/** |
||||
* name |
||||
*/ |
||||
private String name; |
||||
|
||||
/** |
||||
* type |
||||
*/ |
||||
private String type; |
||||
|
||||
|
||||
public String getName() { |
||||
return name; |
||||
} |
||||
|
||||
public void setName(String name) { |
||||
this.name = name; |
||||
} |
||||
|
||||
public String getType() { |
||||
return type; |
||||
} |
||||
|
||||
public void setType(String type) { |
||||
this.type = type; |
||||
} |
||||
|
||||
/** |
||||
* instances list |
||||
* 实例列表 |
||||
*/ |
||||
|
||||
private List<Instance> instances = new ArrayList<>(); |
||||
|
||||
/** |
||||
* children |
||||
*/ |
||||
private List<TreeViewDto> children = new ArrayList<>(); |
||||
|
||||
|
||||
public List<Instance> getInstances() { |
||||
return instances; |
||||
} |
||||
|
||||
public void setInstances(List<Instance> instances) { |
||||
this.instances = instances; |
||||
} |
||||
|
||||
public List<TreeViewDto> getChildren() { |
||||
return children; |
||||
} |
||||
|
||||
public void setChildren(List<TreeViewDto> children) { |
||||
this.children = children; |
||||
} |
||||
|
||||
|
||||
} |
@ -0,0 +1,40 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.enums; |
||||
|
||||
/** |
||||
* execute type |
||||
*/ |
||||
public enum ExecuteType { |
||||
|
||||
|
||||
/** |
||||
* 操作类型 |
||||
* 1.重跑 2.恢复暂停 3.恢复失败 4.停止 5.暂停 |
||||
*/ |
||||
NONE,REPEAT_RUNNING, RECOVER_SUSPENDED_PROCESS, START_FAILURE_TASK_PROCESS, STOP, PAUSE; |
||||
|
||||
|
||||
public static ExecuteType getEnum(int value){ |
||||
for (ExecuteType e: ExecuteType.values()) { |
||||
if(e.ordinal() == value) { |
||||
return e; |
||||
} |
||||
} |
||||
return null;//For values out of enum scope
|
||||
} |
||||
} |
@ -0,0 +1,220 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.enums; |
||||
|
||||
/** |
||||
* status enum |
||||
*/ |
||||
public enum Status { |
||||
|
||||
SUCCESS(0, "success"), |
||||
|
||||
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid"), |
||||
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid"), |
||||
USER_NAME_EXIST(10003, "user name already exists"), |
||||
USER_NAME_NULL(10004,"user name is null"), |
||||
// DB_OPERATION_ERROR(10005, "database operation error"),
|
||||
HDFS_OPERATION_ERROR(10006, "hdfs operation error"), |
||||
UPDATE_FAILED(10007, "updateProcessInstance failed"), |
||||
TASK_INSTANCE_HOST_NOT_FOUND(10008, "task instance does not set host"), |
||||
TENANT_NAME_EXIST(10009, "tenant name already exists"), |
||||
USER_NOT_EXIST(10010, "user {0} not exists"), |
||||
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found"), |
||||
ALERT_GROUP_EXIST(10012, "alarm group already exists"), |
||||
USER_NAME_PASSWD_ERROR(10013,"user name or password error"), |
||||
LOGIN_SESSION_FAILED(10014,"create session failed!"), |
||||
DATASOURCE_EXIST(10015, "data source name already exists"), |
||||
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed"), |
||||
TENANT_NOT_EXIST(10017, "tenant not exists"), |
||||
PROJECT_NOT_FOUNT(10018, "project {0} not found "), |
||||
PROJECT_ALREADY_EXISTS(10019, "project {0} already exists"), |
||||
TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist"), |
||||
TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance"), |
||||
SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist"), |
||||
SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow updateProcessInstance operations"), |
||||
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}"), |
||||
MASTER_NOT_EXISTS(10025, "master does not exist"), |
||||
SCHEDULE_STATUS_UNKNOWN(10026, "unknown command: {0}"), |
||||
CREATE_ALERT_GROUP_ERROR(10027,"create alert group error"), |
||||
QUERY_ALL_ALERTGROUP_ERROR(10028,"query all alertgroup error"), |
||||
LIST_PAGING_ALERT_GROUP_ERROR(10029,"list paging alert group error"), |
||||
UPDATE_ALERT_GROUP_ERROR(10030,"updateProcessInstance alert group error"), |
||||
DELETE_ALERT_GROUP_ERROR(10031,"delete alert group error"), |
||||
ALERT_GROUP_GRANT_USER_ERROR(10032,"alert group grant user error"), |
||||
CREATE_DATASOURCE_ERROR(10033,"create datasource error"), |
||||
UPDATE_DATASOURCE_ERROR(10034,"updateProcessInstance datasource error"), |
||||
QUERY_DATASOURCE_ERROR(10035,"query datasource error"), |
||||
CONNECT_DATASOURCE_FAILURE(10036,"connect datasource failure"), |
||||
CONNECTION_TEST_FAILURE(10037,"connection test failure"), |
||||
DELETE_DATA_SOURCE_FAILURE(10038,"delete data source failure"), |
||||
VERFIY_DATASOURCE_NAME_FAILURE(10039,"verfiy datasource name failure"), |
||||
UNAUTHORIZED_DATASOURCE(10040,"unauthorized datasource"), |
||||
AUTHORIZED_DATA_SOURCE(10041,"authorized data source"), |
||||
LOGIN_SUCCESS(10042,"login success"), |
||||
USER_LOGIN_FAILURE(10043,"user login failure"), |
||||
LIST_WORKERS_ERROR(10044,"list workers error"), |
||||
LIST_MASTERS_ERROR(10045,"list masters error"), |
||||
UPDATE_PROJECT_ERROR(10046,"updateProcessInstance project error"), |
||||
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047,"query project details by id error"), |
||||
CREATE_PROJECT_ERROR(10048,"create project error"), |
||||
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049,"login user query project list paging error"), |
||||
DELETE_PROJECT_ERROR(10050,"delete project error"), |
||||
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051,"query unauthorized project error"), |
||||
QUERY_AUTHORIZED_PROJECT(10052,"query authorized project"), |
||||
QUERY_QUEUE_LIST_ERROR(10053,"query queue list error"), |
||||
CREATE_RESOURCE_ERROR(10054,"create resource error"), |
||||
UPDATE_RESOURCE_ERROR(10055,"updateProcessInstance resource error"), |
||||
QUERY_RESOURCES_LIST_ERROR(10056,"query resources list error"), |
||||
QUERY_RESOURCES_LIST_PAGING(10057,"query resources list paging"), |
||||
DELETE_RESOURCE_ERROR(10058,"delete resource error"), |
||||
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059,"verify resource by name and type error"), |
||||
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060,"view resource file online error"), |
||||
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061,"create resource file online error"), |
||||
RESOURCE_FILE_IS_EMPTY(10062,"resource file is empty"), |
||||
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063,"edit resource file online error"), |
||||
DOWNLOAD_RESOURCE_FILE_ERROR(10064,"download resource file error"), |
||||
CREATE_UDF_FUNCTION_ERROR(10065 ,"create udf function error"), |
||||
VIEW_UDF_FUNCTION_ERROR( 10066,"view udf function error"), |
||||
UPDATE_UDF_FUNCTION_ERROR(10067,"updateProcessInstance udf function error"), |
||||
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR( 10068,"query udf function list paging error"), |
||||
QUERY_DATASOURCE_BY_TYPE_ERROR( 10069,"query datasource by type error"), |
||||
VERIFY_UDF_FUNCTION_NAME_ERROR( 10070,"verify udf function name error"), |
||||
DELETE_UDF_FUNCTION_ERROR( 10071,"delete udf function error"), |
||||
AUTHORIZED_FILE_RESOURCE_ERROR( 10072,"authorized file resource error"), |
||||
UNAUTHORIZED_FILE_RESOURCE_ERROR( 10073,"unauthorized file resource error"), |
||||
UNAUTHORIZED_UDF_FUNCTION_ERROR( 10074,"unauthorized udf function error"), |
||||
AUTHORIZED_UDF_FUNCTION_ERROR(10075,"authorized udf function error"), |
||||
CREATE_SCHEDULE_ERROR(10076,"create schedule error"), |
||||
UPDATE_SCHEDULE_ERROR(10077,"updateProcessInstance schedule error"), |
||||
PUBLISH_SCHEDULE_ONLINE_ERROR(10078,"publish schedule online error"), |
||||
OFFLINE_SCHEDULE_ERROR(10079,"offline schedule error"), |
||||
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080,"query schedule list paging error"), |
||||
QUERY_SCHEDULE_LIST_ERROR(10081,"query schedule list error"), |
||||
QUERY_TASK_LIST_PAGING_ERROR(10082,"query task list paging error"), |
||||
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083,"query task record list paging error"), |
||||
CREATE_TENANT_ERROR(10084,"create tenant error"), |
||||
QUERY_TENANT_LIST_PAGING_ERROR(10085,"query tenant list paging error"), |
||||
QUERY_TENANT_LIST_ERROR(10086,"query tenant list error"), |
||||
UPDATE_TENANT_ERROR(10087,"updateProcessInstance tenant error"), |
||||
DELETE_TENANT_BY_ID_ERROR(10088,"delete tenant by id error"), |
||||
VERIFY_TENANT_CODE_ERROR(10089,"verify tenant code error"), |
||||
CREATE_USER_ERROR(10090,"create user error"), |
||||
QUERY_USER_LIST_PAGING_ERROR(10091,"query user list paging error"), |
||||
UPDATE_USER_ERROR(10092,"updateProcessInstance user error"), |
||||
DELETE_USER_BY_ID_ERROR(10093,"delete user by id error"), |
||||
GRANT_PROJECT_ERROR(10094,"grant project error"), |
||||
GRANT_RESOURCE_ERROR(10095,"grant resource error"), |
||||
GRANT_UDF_FUNCTION_ERROR(10096,"grant udf function error"), |
||||
GRANT_DATASOURCE_ERROR(10097,"grant datasource error"), |
||||
GET_USER_INFO_ERROR(10098,"get user info error"), |
||||
USER_LIST_ERROR(10099,"user list error"), |
||||
VERIFY_USERNAME_ERROR(10100,"verify username error"), |
||||
UNAUTHORIZED_USER_ERROR(10101,"unauthorized user error"), |
||||
AUTHORIZED_USER_ERROR(10102,"authorized user error"), |
||||
QUERY_TASK_INSTANCE_LOG_ERROR(10103,"view task instance log error"), |
||||
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104,"download task instance log file error"), |
||||
CREATE_PROCESS_DEFINITION(10105,"create process definition"), |
||||
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106,"verify process definition name unique error"), |
||||
UPDATE_PROCESS_DEFINITION_ERROR(10107,"updateProcessInstance process definition error"), |
||||
RELEASE_PROCESS_DEFINITION_ERROR(10108,"release process definition error"), |
||||
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109,"query datail of process definition error"), |
||||
QUERY_PROCCESS_DEFINITION_LIST(10110,"query proccess definition list"), |
||||
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111,"encapsulation treeview structure error"), |
||||
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112,"get tasks list by process definition id error"), |
||||
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113,"query process instance list paging error"), |
||||
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114,"query task list by process instance id error"), |
||||
UPDATE_PROCESS_INSTANCE_ERROR(10115,"updateProcessInstance process instance error"), |
||||
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116,"query process instance by id error"), |
||||
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117,"delete process instance by id error"), |
||||
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118,"query sub process instance detail info by task id error"), |
||||
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119,"query parent process instance detail info by sub process instance id error"), |
||||
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120,"query process instance all variables error"), |
||||
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121,"encapsulation process instance gantt structure error"), |
||||
QUERY_PROCCESS_DEFINITION_LIST_PAGING_ERROR(10122,"query proccess definition list paging error"), |
||||
SIGN_OUT_ERROR(10123,"sign out error"), |
||||
TENANT_CODE_HAS_ALREADY_EXISTS(10124,"tenant code has already exists"), |
||||
IP_IS_EMPTY(10125,"ip is empty"), |
||||
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}"), |
||||
|
||||
|
||||
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found"), |
||||
UDF_FUNCTION_EXISTS(20002, "UDF function already exists"), |
||||
// RESOURCE_EMPTY(20003, "resource file is empty"),
|
||||
RESOURCE_NOT_EXIST(20004, "resource not exist"), |
||||
RESOURCE_EXIST(20005, "resource already exists"), |
||||
RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing"), |
||||
RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit"), |
||||
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified"), |
||||
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"), |
||||
HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"), |
||||
|
||||
|
||||
|
||||
USER_NO_OPERATION_PERM(30001, "user has no operation privilege"), |
||||
USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission"), |
||||
|
||||
|
||||
PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist"), |
||||
PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists"), |
||||
PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist"), |
||||
PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line"), |
||||
PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}"), |
||||
PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation"), |
||||
SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist"), |
||||
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit"), |
||||
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ..."), |
||||
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance"), |
||||
TASK_INSTANCE_STATE_COUNT_ERROR(50011,"task instance state count error"), |
||||
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012,"count process instance state error"), |
||||
COUNT_PROCESS_DEFINITION_USER_ERROR(50013,"count process definition user error"), |
||||
START_PROCESS_INSTANCE_ERROR(50014,"start process instance error"), |
||||
EXECUTE_PROCESS_INSTANCE_ERROR(50015,"execute process instance error"), |
||||
CHECK_PROCESS_DEFINITION_ERROR(50016,"check process definition error"), |
||||
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017,"query recipients and copyers by process definition error"), |
||||
DATA_IS_NOT_VALID(50017,"data %s not valid"), |
||||
DATA_IS_NULL(50018,"data %s is null"), |
||||
PROCESS_NODE_HAS_CYCLE(50019,"process node has cycle"), |
||||
PROCESS_NODE_S_PARAMETER_INVALID(50020,"process node %s parameter invalid"), |
||||
|
||||
|
||||
HDFS_NOT_STARTUP(60001,"hdfs not startup"), |
||||
; |
||||
|
||||
private int code; |
||||
private String msg; |
||||
|
||||
private Status(int code, String msg) { |
||||
this.code = code; |
||||
this.msg = msg; |
||||
} |
||||
|
||||
public int getCode() { |
||||
return this.code; |
||||
} |
||||
|
||||
public void setCode(int code) { |
||||
this.code = code; |
||||
} |
||||
|
||||
public String getMsg() { |
||||
return this.msg; |
||||
} |
||||
|
||||
public void setMsg(String msg) { |
||||
this.msg = msg; |
||||
} |
||||
} |
@ -0,0 +1,29 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.interceptor; |
||||
|
||||
import com.alibaba.druid.support.http.WebStatFilter; |
||||
|
||||
/* this class annotation for druid stat monitor in development |
||||
@WebFilter(filterName="druidWebStatFilter",urlPatterns="/*", |
||||
initParams={ |
||||
@WebInitParam(name="exclusions",value="*.js,*.gif,*.jpg,*.bmp,*.png,*.css,*.ico,/druid/*") |
||||
}) */ |
||||
public class DruidStatFilter extends WebStatFilter { |
||||
|
||||
|
||||
} |
@ -0,0 +1,34 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.interceptor; |
||||
|
||||
import com.alibaba.druid.support.http.StatViewServlet; |
||||
|
||||
|
||||
/* this class annotation for druid stat monitor in development |
||||
@WebServlet(urlPatterns = "/druid/*", |
||||
initParams={ |
||||
// @WebInitParam(name="allow",value="127.0.0.1"),
|
||||
// @WebInitParam(name="deny",value="192.168.16.111"),
|
||||
@WebInitParam(name="loginUsername",value="admin"), |
||||
@WebInitParam(name="loginPassword",value="escheduler123"), |
||||
@WebInitParam(name="resetEnable",value="true") |
||||
}) */ |
||||
public class DruidStatViewServlet extends StatViewServlet { |
||||
|
||||
|
||||
} |
@ -0,0 +1,111 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.interceptor; |
||||
|
||||
import cn.escheduler.api.service.SessionService; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.dao.mapper.UserMapper; |
||||
import cn.escheduler.dao.model.Session; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.httpclient.HttpStatus; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.web.servlet.HandlerInterceptor; |
||||
import org.springframework.web.servlet.ModelAndView; |
||||
|
||||
import javax.servlet.http.HttpServletRequest; |
||||
import javax.servlet.http.HttpServletResponse; |
||||
|
||||
/** |
||||
* login interceptor, must login first |
||||
*/ |
||||
public class LoginHandlerInterceptor implements HandlerInterceptor { |
||||
private static final Logger logger = LoggerFactory.getLogger(LoginHandlerInterceptor.class); |
||||
|
||||
@Autowired |
||||
private SessionService sessionService; |
||||
|
||||
@Autowired |
||||
private UserMapper userMapper; |
||||
|
||||
/** |
||||
* Intercept the execution of a handler. Called after HandlerMapping determined |
||||
* an appropriate handler object, but before HandlerAdapter invokes the handler. |
||||
* <p>DispatcherServlet processes a handler in an execution chain, consisting |
||||
* of any number of interceptors, with the handler itself at the end. |
||||
* With this method, each interceptor can decide to abort the execution chain, |
||||
* typically sending a HTTP error or writing a custom response. |
||||
* <p><strong>Note:</strong> special considerations apply for asynchronous |
||||
* request processing. For more details see |
||||
* {@link org.springframework.web.servlet.AsyncHandlerInterceptor}. |
||||
* @param request current HTTP request |
||||
* @param response current HTTP response |
||||
* @param handler chosen handler to execute, for type and/or instance evaluation |
||||
* @return {@code true} if the execution chain should proceed with the |
||||
* next interceptor or the handler itself. Else, DispatcherServlet assumes |
||||
* that this interceptor has already dealt with the response itself. |
||||
* @throws Exception in case of errors |
||||
*/ |
||||
@Override |
||||
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) { |
||||
|
||||
Session session = sessionService.getSession(request); |
||||
|
||||
if(logger.isDebugEnabled()){ |
||||
logger.debug("session info : " + session); |
||||
} |
||||
|
||||
if (session == null) { |
||||
response.setStatus(HttpStatus.SC_UNAUTHORIZED); |
||||
logger.info("session info is null "); |
||||
return false; |
||||
} |
||||
|
||||
if(logger.isDebugEnabled()){ |
||||
logger.debug("session id: {}", session.getId()); |
||||
} |
||||
|
||||
//get user object from session
|
||||
User user = userMapper.queryById(session.getUserId()); |
||||
|
||||
if(logger.isDebugEnabled()){ |
||||
logger.info("user info : " + user); |
||||
} |
||||
|
||||
|
||||
if (user == null) { |
||||
response.setStatus(HttpStatus.SC_UNAUTHORIZED); |
||||
return false; |
||||
} |
||||
|
||||
request.setAttribute(Constants.SESSION_USER, user); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception { |
||||
|
||||
} |
||||
|
||||
@Override |
||||
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception { |
||||
|
||||
} |
||||
|
||||
} |
@ -0,0 +1,134 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.log; |
||||
|
||||
import cn.escheduler.rpc.*; |
||||
import io.grpc.ManagedChannel; |
||||
import io.grpc.ManagedChannelBuilder; |
||||
import io.grpc.StatusRuntimeException; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import java.util.concurrent.TimeUnit; |
||||
|
||||
/** |
||||
* log client |
||||
*/ |
||||
public class LogClient { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LogClient.class); |
||||
|
||||
private final ManagedChannel channel; |
||||
private final LogViewServiceGrpc.LogViewServiceBlockingStub blockingStub; |
||||
|
||||
/** |
||||
* construct client connecting to HelloWorld server at {@code host:port} |
||||
*/ |
||||
public LogClient(String host, int port) { |
||||
this(ManagedChannelBuilder.forAddress(host, port) |
||||
// Channels are secure by default (via SSL/TLS). For the example we disable TLS to avoid
|
||||
// needing certificates.
|
||||
.usePlaintext(true)); |
||||
} |
||||
|
||||
/** |
||||
* construct client for accessing RouteGuide server using the existing channel |
||||
* |
||||
*/ |
||||
LogClient(ManagedChannelBuilder<?> channelBuilder) { |
||||
/** |
||||
* set max read size |
||||
*/ |
||||
channelBuilder.maxInboundMessageSize(Integer.MAX_VALUE); |
||||
channel = channelBuilder.build(); |
||||
blockingStub = LogViewServiceGrpc.newBlockingStub(channel); |
||||
} |
||||
|
||||
/** |
||||
* shutdown |
||||
* |
||||
* @throws InterruptedException |
||||
*/ |
||||
public void shutdown() throws InterruptedException { |
||||
channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); |
||||
} |
||||
|
||||
/** |
||||
* roll view log |
||||
* |
||||
* @param path |
||||
* @param skipLineNum |
||||
* @param limit |
||||
* @return |
||||
*/ |
||||
public String rollViewLog(String path,int skipLineNum,int limit) { |
||||
logger.info("roll view log : path {},skipLineNum {} ,limit {}", path, skipLineNum, limit); |
||||
LogParameter pathParameter = LogParameter |
||||
.newBuilder() |
||||
.setPath(path) |
||||
.setSkipLineNum(skipLineNum) |
||||
.setLimit(limit) |
||||
.build(); |
||||
RetStrInfo retStrInfo; |
||||
try { |
||||
retStrInfo = blockingStub.rollViewLog(pathParameter); |
||||
return retStrInfo.getMsg(); |
||||
} catch (StatusRuntimeException e) { |
||||
logger.error("roll view log error", e); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* view log |
||||
* |
||||
* @param path |
||||
* @return |
||||
*/ |
||||
public String viewLog(String path) { |
||||
logger.info("view queryLog path {}",path); |
||||
PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); |
||||
RetStrInfo retStrInfo; |
||||
try { |
||||
retStrInfo = blockingStub.viewLog(pathParameter); |
||||
return retStrInfo.getMsg(); |
||||
} catch (StatusRuntimeException e) { |
||||
logger.error("view log error", e); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* get log size |
||||
* |
||||
* @param path |
||||
* @return |
||||
*/ |
||||
public byte[] getLogBytes(String path) { |
||||
logger.info("get log path {}",path); |
||||
PathParameter pathParameter = PathParameter.newBuilder().setPath(path).build(); |
||||
RetByteInfo retByteInfo; |
||||
try { |
||||
retByteInfo = blockingStub.getLogBytes(pathParameter); |
||||
return retByteInfo.getData().toByteArray(); |
||||
} catch (StatusRuntimeException e) { |
||||
logger.error("get log size error", e); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,150 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.quartz; |
||||
|
||||
|
||||
import cn.escheduler.common.Constants; |
||||
import cn.escheduler.common.enums.CommandType; |
||||
import cn.escheduler.common.enums.ReleaseState; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.model.Command; |
||||
import cn.escheduler.dao.model.ProcessDefinition; |
||||
import cn.escheduler.dao.model.Schedule; |
||||
import org.quartz.*; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.util.Assert; |
||||
|
||||
import java.util.Date; |
||||
|
||||
import static cn.escheduler.api.quartz.QuartzExecutors.buildJobGroupName; |
||||
import static cn.escheduler.api.quartz.QuartzExecutors.buildJobName; |
||||
|
||||
/** |
||||
* process schedule job |
||||
* <p> |
||||
* {@link Job} |
||||
* </p> |
||||
*/ |
||||
public class ProcessScheduleJob implements Job { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessScheduleJob.class); |
||||
|
||||
/** |
||||
* {@link ProcessDao} |
||||
*/ |
||||
private static ProcessDao processDao; |
||||
|
||||
|
||||
/** |
||||
* init |
||||
*/ |
||||
public static void init(ProcessDao processDao) { |
||||
ProcessScheduleJob.processDao = processDao; |
||||
} |
||||
|
||||
/** |
||||
* <p> |
||||
* Called by the <code>{@link Scheduler}</code> when a <code>{@link Trigger}</code> |
||||
* fires that is associated with the <code>Job</code>. |
||||
* </p> |
||||
* |
||||
* <p> |
||||
* The implementation may wish to set a |
||||
* {@link JobExecutionContext#setResult(Object) result} object on the |
||||
* {@link JobExecutionContext} before this method exits. The result itself |
||||
* is meaningless to Quartz, but may be informative to |
||||
* <code>{@link JobListener}s</code> or |
||||
* <code>{@link TriggerListener}s</code> that are watching the job's |
||||
* execution. |
||||
* </p> |
||||
* |
||||
* @throws JobExecutionException if there is an exception while executing the job. |
||||
*/ |
||||
@Override |
||||
public void execute(JobExecutionContext context) throws JobExecutionException { |
||||
|
||||
Assert.notNull(processDao, "please call init() method first"); |
||||
|
||||
JobDataMap dataMap = context.getJobDetail().getJobDataMap(); |
||||
|
||||
int projectId = dataMap.getInt(Constants.PROJECT_ID); |
||||
int scheduleId = dataMap.getInt(Constants.SCHEDULE_ID); |
||||
|
||||
/** |
||||
* The scheduled time the trigger fired for. For instance the scheduled |
||||
* time may have been 10:00:00 but the actual fire time may have been |
||||
* 10:00:03 if the scheduler was too busy. |
||||
* |
||||
* @return Returns the scheduledFireTime. |
||||
* @see #getFireTime() |
||||
*/ |
||||
Date scheduledFireTime = context.getScheduledFireTime(); |
||||
|
||||
/** |
||||
* The actual time the trigger fired. For instance the scheduled time may |
||||
* have been 10:00:00 but the actual fire time may have been 10:00:03 if |
||||
* the scheduler was too busy. |
||||
* |
||||
* @return Returns the fireTime. |
||||
* @see #getScheduledFireTime() |
||||
*/ |
||||
Date fireTime = context.getFireTime(); |
||||
|
||||
logger.info("scheduled fire time :{}, fire time :{}, process id :{}", scheduledFireTime, fireTime, scheduleId); |
||||
|
||||
// query schedule
|
||||
Schedule schedule = processDao.querySchedule(scheduleId); |
||||
if (schedule == null) { |
||||
logger.warn("process schedule does not exist in db,delete schedule job in quartz, projectId:{}, scheduleId:{}", projectId, scheduleId); |
||||
deleteJob(projectId, scheduleId); |
||||
return; |
||||
} |
||||
|
||||
|
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); |
||||
// release state : online/offline
|
||||
ReleaseState releaseState = processDefinition.getReleaseState(); |
||||
if (processDefinition == null || releaseState == ReleaseState.OFFLINE) { |
||||
logger.warn("process definition does not exist in db or offline,need not to create command, projectId:{}, processId:{}", projectId, scheduleId); |
||||
return; |
||||
} |
||||
|
||||
Command command = new Command(); |
||||
command.setCommandType(CommandType.START_PROCESS); |
||||
command.setExecutorId(schedule.getUserId()); |
||||
command.setFailureStrategy(schedule.getFailureStrategy()); |
||||
command.setProcessDefinitionId(schedule.getProcessDefinitionId()); |
||||
command.setScheduleTime(scheduledFireTime); |
||||
command.setStartTime(fireTime); |
||||
command.setWarningGroupId(schedule.getWarningGroupId()); |
||||
command.setWarningType(schedule.getWarningType()); |
||||
command.setProcessInstancePriority(schedule.getProcessInstancePriority()); |
||||
|
||||
processDao.createCommand(command); |
||||
} |
||||
|
||||
|
||||
/** |
||||
* delete job |
||||
*/ |
||||
private void deleteJob(int projectId, int scheduleId) { |
||||
String jobName = buildJobName(scheduleId); |
||||
String jobGroupName = buildJobGroupName(projectId); |
||||
QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName); |
||||
} |
||||
} |
@ -0,0 +1,305 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.quartz; |
||||
|
||||
import cn.escheduler.common.Constants; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.dao.model.Schedule; |
||||
import org.apache.commons.lang.StringUtils; |
||||
import org.quartz.*; |
||||
import org.quartz.impl.StdSchedulerFactory; |
||||
import org.quartz.impl.matchers.GroupMatcher; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
|
||||
import java.util.Calendar; |
||||
import java.util.*; |
||||
import java.util.concurrent.locks.ReadWriteLock; |
||||
import java.util.concurrent.locks.ReentrantReadWriteLock; |
||||
|
||||
import static org.quartz.CronScheduleBuilder.cronSchedule; |
||||
import static org.quartz.JobBuilder.newJob; |
||||
import static org.quartz.TriggerBuilder.newTrigger; |
||||
|
||||
/** |
||||
* single Quartz executors instance |
||||
*/ |
||||
public class QuartzExecutors { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(QuartzExecutors.class); |
||||
|
||||
private final ReadWriteLock lock = new ReentrantReadWriteLock(); |
||||
|
||||
|
||||
/** |
||||
* <p> |
||||
* A <code>Scheduler</code> maintains a registry of <code>{@link org.quartz.JobDetail}</code>s |
||||
* and <code>{@link Trigger}</code>s. Once registered, the <code>Scheduler</code> |
||||
* is responsible for executing <code>Job</code> s when their associated |
||||
* <code>Trigger</code> s fire (when their scheduled time arrives). |
||||
* </p> |
||||
* {@link Scheduler} |
||||
*/ |
||||
private static Scheduler scheduler; |
||||
|
||||
private static volatile QuartzExecutors INSTANCE = null; |
||||
|
||||
private QuartzExecutors() {} |
||||
|
||||
/** |
||||
* thread safe and performance promote |
||||
* @return |
||||
*/ |
||||
public static QuartzExecutors getInstance() { |
||||
if (INSTANCE == null) { |
||||
synchronized (QuartzExecutors.class) { |
||||
// when more than two threads run into the first null check same time, to avoid instanced more than one time, it needs to be checked again.
|
||||
if (INSTANCE == null) { |
||||
INSTANCE = new QuartzExecutors(); |
||||
//finish QuartzExecutors init
|
||||
INSTANCE.init(); |
||||
} |
||||
} |
||||
} |
||||
return INSTANCE; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* init |
||||
* |
||||
* <p> |
||||
* Returns a client-usable handle to a <code>Scheduler</code>. |
||||
* </p> |
||||
*/ |
||||
private void init() { |
||||
try { |
||||
SchedulerFactory schedulerFactory = new StdSchedulerFactory(Constants.QUARTZ_PROPERTIES_PATH); |
||||
scheduler = schedulerFactory.getScheduler(); |
||||
|
||||
} catch (SchedulerException e) { |
||||
logger.error(e.getMessage(),e); |
||||
System.exit(1); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* Whether the scheduler has been started. |
||||
* |
||||
* <p> |
||||
* Note: This only reflects whether <code>{@link #start()}</code> has ever |
||||
* been called on this Scheduler, so it will return <code>true</code> even |
||||
* if the <code>Scheduler</code> is currently in standby mode or has been |
||||
* since shutdown. |
||||
* </p> |
||||
* |
||||
* @see Scheduler#start() |
||||
*/ |
||||
public void start() throws SchedulerException { |
||||
if (!scheduler.isStarted()){ |
||||
scheduler.start(); |
||||
logger.info("Quartz service started" ); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* stop all scheduled tasks |
||||
* |
||||
* Halts the <code>Scheduler</code>'s firing of <code>{@link Trigger}s</code>, |
||||
* and cleans up all resources associated with the Scheduler. Equivalent to |
||||
* <code>shutdown(false)</code>. |
||||
* |
||||
* <p> |
||||
* The scheduler cannot be re-started. |
||||
* </p> |
||||
* |
||||
*/ |
||||
public void shutdown() throws SchedulerException { |
||||
if (!scheduler.isShutdown()) { |
||||
// don't wait for the task to complete
|
||||
scheduler.shutdown(); |
||||
logger.info("Quartz service stopped, and halt all tasks"); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* add task trigger , if this task already exists, return this task with updated trigger |
||||
* |
||||
* @param clazz job class name |
||||
* @param jobName job name |
||||
* @param jobGroupName job group name |
||||
* @param startDate job start date |
||||
* @param endDate job end date |
||||
* @param cronExpression cron expression |
||||
* @param jobDataMap job parameters data map |
||||
* @return |
||||
*/ |
||||
public void addJob(Class<? extends Job> clazz,String jobName,String jobGroupName,Date startDate, Date endDate, |
||||
String cronExpression, |
||||
Map<String, Object> jobDataMap) { |
||||
lock.writeLock().lock(); |
||||
try { |
||||
|
||||
JobKey jobKey = new JobKey(jobName, jobGroupName); |
||||
JobDetail jobDetail; |
||||
//add a task (if this task already exists, return this task directly)
|
||||
if (scheduler.checkExists(jobKey)) { |
||||
|
||||
jobDetail = scheduler.getJobDetail(jobKey); |
||||
if (jobDataMap != null) { |
||||
jobDetail.getJobDataMap().putAll(jobDataMap); |
||||
} |
||||
} else { |
||||
jobDetail = newJob(clazz).withIdentity(jobKey).build(); |
||||
|
||||
if (jobDataMap != null) { |
||||
jobDetail.getJobDataMap().putAll(jobDataMap); |
||||
} |
||||
|
||||
scheduler.addJob(jobDetail, false, true); |
||||
|
||||
logger.info("Add job, job name: {}, group name: {}", |
||||
jobName, jobGroupName); |
||||
} |
||||
|
||||
TriggerKey triggerKey = new TriggerKey(jobName, jobGroupName); |
||||
/** |
||||
* Instructs the <code>{@link Scheduler}</code> that upon a mis-fire |
||||
* situation, the <code>{@link CronTrigger}</code> wants to have it's |
||||
* next-fire-time updated to the next time in the schedule after the |
||||
* current time (taking into account any associated <code>{@link Calendar}</code>, |
||||
* but it does not want to be fired now. |
||||
*/ |
||||
CronTrigger cronTrigger = newTrigger().withIdentity(triggerKey).startAt(startDate).endAt(endDate) |
||||
.withSchedule(cronSchedule(cronExpression).withMisfireHandlingInstructionDoNothing()) |
||||
.forJob(jobDetail).build(); |
||||
|
||||
if (scheduler.checkExists(triggerKey)) { |
||||
// updateProcessInstance scheduler trigger when scheduler cycle changes
|
||||
CronTrigger oldCronTrigger = (CronTrigger) scheduler.getTrigger(triggerKey); |
||||
String oldCronExpression = oldCronTrigger.getCronExpression(); |
||||
|
||||
if (!StringUtils.equalsIgnoreCase(cronExpression,oldCronExpression)) { |
||||
// reschedule job trigger
|
||||
scheduler.rescheduleJob(triggerKey, cronTrigger); |
||||
logger.info("reschedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", |
||||
jobName, jobGroupName, cronExpression, startDate, endDate); |
||||
} |
||||
} else { |
||||
scheduler.scheduleJob(cronTrigger); |
||||
logger.info("schedule job trigger, triggerName: {}, triggerGroupName: {}, cronExpression: {}, startDate: {}, endDate: {}", |
||||
jobName, jobGroupName, cronExpression, startDate, endDate); |
||||
} |
||||
|
||||
} catch (Exception e) { |
||||
logger.error("add job failed", e); |
||||
throw new RuntimeException("add job failed:"+e.getMessage()); |
||||
} finally { |
||||
lock.writeLock().unlock(); |
||||
} |
||||
} |
||||
|
||||
|
||||
/** |
||||
* delete job |
||||
* |
||||
* @param jobName |
||||
* @param jobGroupName |
||||
* @return true if the Job was found and deleted. |
||||
*/ |
||||
public boolean deleteJob(String jobName, String jobGroupName) { |
||||
lock.writeLock().lock(); |
||||
try { |
||||
logger.info("try to delete job, job name: {}, job group name: {},", jobName, jobGroupName); |
||||
return scheduler.deleteJob(new JobKey(jobName, jobGroupName)); |
||||
} catch (SchedulerException e) { |
||||
logger.error(String.format("delete job : %s failed",jobName), e); |
||||
} finally { |
||||
lock.writeLock().unlock(); |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* delete all jobs in job group |
||||
* <p> |
||||
* Note that while this bulk operation is likely more efficient than |
||||
* invoking <code>deleteJob(JobKey jobKey)</code> several |
||||
* times, it may have the adverse affect of holding data locks for a |
||||
* single long duration of time (rather than lots of small durations |
||||
* of time). |
||||
* </p> |
||||
* |
||||
* @param jobGroupName |
||||
* |
||||
* @return true if all of the Jobs were found and deleted, false if |
||||
* one or more were not deleted. |
||||
*/ |
||||
public boolean deleteAllJobs(String jobGroupName) { |
||||
lock.writeLock().lock(); |
||||
try { |
||||
logger.info("try to delete all jobs in job group: {}", jobGroupName); |
||||
List<JobKey> jobKeys = new ArrayList<>(); |
||||
jobKeys.addAll(scheduler.getJobKeys(GroupMatcher.groupEndsWith(jobGroupName))); |
||||
|
||||
return scheduler.deleteJobs(jobKeys); |
||||
} catch (SchedulerException e) { |
||||
logger.error(String.format("delete all jobs in job group: %s failed",jobGroupName), e); |
||||
} finally { |
||||
lock.writeLock().unlock(); |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* build job name |
||||
*/ |
||||
public static String buildJobName(int processId) { |
||||
StringBuilder sb = new StringBuilder(30); |
||||
sb.append(Constants.QUARTZ_JOB_PRIFIX).append(Constants.UNDERLINE).append(processId); |
||||
return sb.toString(); |
||||
} |
||||
|
||||
/** |
||||
* build job group name |
||||
*/ |
||||
public static String buildJobGroupName(int projectId) { |
||||
StringBuilder sb = new StringBuilder(30); |
||||
sb.append(Constants.QUARTZ_JOB_GROUP_PRIFIX).append(Constants.UNDERLINE).append(projectId); |
||||
return sb.toString(); |
||||
} |
||||
|
||||
/** |
||||
* add params to map |
||||
* |
||||
* @param projectId |
||||
* @param scheduleId |
||||
* @param schedule |
||||
* @return |
||||
*/ |
||||
public static Map<String, Object> buildDataMap(int projectId, int scheduleId, Schedule schedule) { |
||||
Map<String, Object> dataMap = new HashMap<>(3); |
||||
dataMap.put(Constants.PROJECT_ID, projectId); |
||||
dataMap.put(Constants.SCHEDULE_ID, scheduleId); |
||||
dataMap.put(Constants.SCHEDULE, JSONUtils.toJson(schedule)); |
||||
|
||||
return dataMap; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,294 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.AlertType; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.mapper.AlertGroupMapper; |
||||
import cn.escheduler.dao.mapper.UserAlertGroupMapper; |
||||
import cn.escheduler.dao.model.AlertGroup; |
||||
import cn.escheduler.dao.model.User; |
||||
import cn.escheduler.dao.model.UserAlertGroup; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* alert group service |
||||
*/ |
||||
@Service |
||||
public class AlertGroupService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(AlertGroupService.class); |
||||
|
||||
@Autowired |
||||
private AlertGroupMapper alertGroupMapper; |
||||
|
||||
@Autowired |
||||
private UserAlertGroupMapper userAlertGroupMapper; |
||||
|
||||
/** |
||||
* query alert group list |
||||
* |
||||
* @return |
||||
*/ |
||||
public HashMap<String, Object> queryAlertgroup() { |
||||
|
||||
HashMap<String, Object> result = new HashMap<>(5); |
||||
List<AlertGroup> alertGroups = alertGroupMapper.queryAllGroupList(); |
||||
result.put(Constants.DATA_LIST, alertGroups); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* paging query alarm group list |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
Integer count = alertGroupMapper.countAlertGroupPaging(searchVal); |
||||
|
||||
PageInfo<AlertGroup> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
|
||||
List<AlertGroup> scheduleList = alertGroupMapper.queryAlertGroupPaging(searchVal, pageInfo.getStart(), pageSize); |
||||
|
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(scheduleList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create alert group |
||||
* |
||||
* @param loginUser |
||||
* @param groupName |
||||
* @param groupType |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> createAlertgroup(User loginUser, String groupName, AlertType groupType, String desc) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
|
||||
AlertGroup alertGroup = new AlertGroup(); |
||||
Date now = new Date(); |
||||
|
||||
alertGroup.setGroupName(groupName); |
||||
alertGroup.setGroupType(groupType); |
||||
alertGroup.setDesc(desc); |
||||
alertGroup.setCreateTime(now); |
||||
alertGroup.setUpdateTime(now); |
||||
|
||||
// insert
|
||||
int insert = alertGroupMapper.insert(alertGroup); |
||||
|
||||
if (insert > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check user is admin or not |
||||
* |
||||
* @param user |
||||
* @return |
||||
*/ |
||||
public boolean isAdmin(User user) { |
||||
return user.getUserType() == UserType.ADMIN_USER; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance alert group |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @param groupName |
||||
* @param groupType |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, AlertType groupType, String desc) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
|
||||
|
||||
AlertGroup alertGroup = alertGroupMapper.queryById(id); |
||||
|
||||
if (alertGroup == null) { |
||||
putMsg(result, Status.ALERT_GROUP_NOT_EXIST); |
||||
return result; |
||||
|
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
if (StringUtils.isNotEmpty(groupName)) { |
||||
alertGroup.setGroupName(groupName); |
||||
} |
||||
|
||||
if (groupType != null) { |
||||
alertGroup.setGroupType(groupType); |
||||
} |
||||
alertGroup.setDesc(desc); |
||||
alertGroup.setUpdateTime(now); |
||||
// updateProcessInstance
|
||||
alertGroupMapper.update(alertGroup); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete alert group by id |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> delAlertgroupById(User loginUser, int id) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
//only admin can operate
|
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
|
||||
|
||||
alertGroupMapper.delete(id); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* grant user |
||||
* |
||||
* @param loginUser |
||||
* @param alertgroupId |
||||
* @param userIds |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> grantUser(User loginUser, int alertgroupId, String userIds) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
//only admin can operate
|
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
|
||||
userAlertGroupMapper.deleteByAlertgroupId(alertgroupId); |
||||
if (StringUtils.isEmpty(userIds)) { |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
String[] userIdsArr = userIds.split(","); |
||||
|
||||
for (String userId : userIdsArr) { |
||||
Date now = new Date(); |
||||
UserAlertGroup userAlertGroup = new UserAlertGroup(); |
||||
userAlertGroup.setAlertgroupId(alertgroupId); |
||||
userAlertGroup.setUserId(Integer.parseInt(userId)); |
||||
userAlertGroup.setCreateTime(now); |
||||
userAlertGroup.setUpdateTime(now); |
||||
userAlertGroupMapper.insert(userAlertGroup); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify group name exists |
||||
* |
||||
* @param loginUser |
||||
* @param groupName |
||||
* @return |
||||
*/ |
||||
public Result verifyGroupName(User loginUser, String groupName) { |
||||
Result result = new Result(); |
||||
AlertGroup alertGroup = alertGroupMapper.queryByGroupName(groupName); |
||||
if (alertGroup != null) { |
||||
logger.error("group {} has exist, can't create again.", groupName); |
||||
result.setCode(Status.ALERT_GROUP_EXIST.getCode()); |
||||
result.setMsg(Status.ALERT_GROUP_EXIST.getMsg()); |
||||
} else { |
||||
result.setCode(Status.SUCCESS.getCode()); |
||||
result.setMsg(Status.SUCCESS.getMsg()); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* is admin? |
||||
* @param loginUser |
||||
* @param result |
||||
* @return |
||||
*/ |
||||
private boolean checkAdmin(User loginUser, Map<String, Object> result) { |
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* put message |
||||
* |
||||
* @param result |
||||
* @param status |
||||
*/ |
||||
private void putMsg(Map<String, Object> result, Status status) { |
||||
result.put(Constants.STATUS, status); |
||||
result.put(Constants.MSG, status.getMsg()); |
||||
} |
||||
} |
@ -0,0 +1,89 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.common.graph.DAG; |
||||
import cn.escheduler.common.model.TaskNode; |
||||
import cn.escheduler.common.model.TaskNodeRelation; |
||||
import cn.escheduler.common.process.ProcessDag; |
||||
import cn.escheduler.common.utils.CollectionUtils; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.dao.model.ProcessData; |
||||
import cn.escheduler.dao.model.ProcessInstance; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
/** |
||||
* base DAG service |
||||
*/ |
||||
public class BaseDAGService extends BaseService{ |
||||
|
||||
|
||||
/** |
||||
* process instance to DAG |
||||
* |
||||
* @param processInstance |
||||
* @return |
||||
* @throws Exception |
||||
*/ |
||||
public static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) throws Exception { |
||||
|
||||
String processDefinitionJson = processInstance.getProcessInstanceJson(); |
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
|
||||
List<TaskNode> taskNodeList = processData.getTasks(); |
||||
|
||||
List<TaskNodeRelation> taskNodeRelations = new ArrayList<>(); |
||||
|
||||
//Traversing node information and building relationships
|
||||
for (TaskNode taskNode : taskNodeList) { |
||||
String preTasks = taskNode.getPreTasks(); |
||||
List<String> preTasksList = JSONUtils.toList(preTasks, String.class); |
||||
|
||||
//if previous tasks not empty
|
||||
if (preTasksList != null) { |
||||
for (String depNode : preTasksList) { |
||||
taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); |
||||
} |
||||
} |
||||
} |
||||
|
||||
ProcessDag processDag = new ProcessDag(); |
||||
processDag.setEdges(taskNodeRelations); |
||||
processDag.setNodes(taskNodeList); |
||||
|
||||
|
||||
// generate detail Dag, to be executed
|
||||
DAG<String, TaskNode, TaskNodeRelation> dag = new DAG<>(); |
||||
|
||||
if (CollectionUtils.isNotEmpty(processDag.getNodes())) { |
||||
for (TaskNode node : processDag.getNodes()) { |
||||
dag.addNode(node.getName(), node); |
||||
} |
||||
} |
||||
|
||||
if (CollectionUtils.isNotEmpty(processDag.getEdges())) { |
||||
for (TaskNodeRelation edge : processDag.getEdges()) { |
||||
dag.addEdge(edge.getStartNode(), edge.getEndNode()); |
||||
} |
||||
} |
||||
|
||||
return dag; |
||||
} |
||||
} |
@ -0,0 +1,113 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
|
||||
import javax.servlet.http.Cookie; |
||||
import javax.servlet.http.HttpServletRequest; |
||||
import java.text.MessageFormat; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* base service |
||||
*/ |
||||
public class BaseService { |
||||
|
||||
/** |
||||
* check admin |
||||
* |
||||
* @param user |
||||
* @return |
||||
*/ |
||||
protected boolean isAdmin(User user) { |
||||
return user.getUserType() == UserType.ADMIN_USER; |
||||
} |
||||
|
||||
/** |
||||
* check admin |
||||
* |
||||
* @param loginUser |
||||
* @param result |
||||
* @return |
||||
*/ |
||||
protected boolean checkAdmin(User loginUser, Map<String, Object> result) { |
||||
//only admin can operate
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* put message to map |
||||
* |
||||
* @param result |
||||
* @param status |
||||
* @param statusParams |
||||
*/ |
||||
protected void putMsg(Map<String, Object> result, Status status, Object... statusParams) { |
||||
result.put(Constants.STATUS, status); |
||||
if (statusParams != null && statusParams.length > 0) { |
||||
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); |
||||
} else { |
||||
result.put(Constants.MSG, status.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* put message to result object |
||||
* |
||||
* @param result |
||||
* @param status |
||||
*/ |
||||
protected void putMsg(Result result, Status status, Object... statusParams) { |
||||
result.setCode(status.getCode()); |
||||
|
||||
if (statusParams != null && statusParams.length > 0) { |
||||
result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); |
||||
} else { |
||||
result.setMsg(status.getMsg()); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* get cookie info by name |
||||
* @param request |
||||
* @param name |
||||
* @return get cookie info |
||||
*/ |
||||
public static Cookie getCookie(HttpServletRequest request, String name) { |
||||
Cookie[] cookies = request.getCookies(); |
||||
if (cookies != null && cookies.length > 0) { |
||||
for (Cookie cookie : cookies) { |
||||
if (StringUtils.equalsIgnoreCase(name, cookie.getName())) { |
||||
return cookie; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return null; |
||||
} |
||||
} |
@ -0,0 +1,209 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
|
||||
import cn.escheduler.api.dto.DefineUserDto; |
||||
import cn.escheduler.api.dto.TaskCountDto; |
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.utils.DateUtils; |
||||
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import cn.escheduler.dao.mapper.ProcessInstanceMapper; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.mapper.TaskInstanceMapper; |
||||
import cn.escheduler.dao.model.DefinitionGroupByUser; |
||||
import cn.escheduler.dao.model.ExecuteStatusCount; |
||||
import cn.escheduler.dao.model.Project; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.text.MessageFormat; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* data analysis service |
||||
*/ |
||||
@Service |
||||
public class DataAnalysisService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class); |
||||
|
||||
@Autowired |
||||
ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
ProjectService projectService; |
||||
|
||||
@Autowired |
||||
TaskInstanceMapper taskInstanceMapper; |
||||
|
||||
@Autowired |
||||
ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
@Autowired |
||||
ProcessDefinitionMapper processDefinitionMapper; |
||||
|
||||
/** |
||||
* statistical task instance status data |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @param startDate |
||||
* @param endDate |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if(projectId != 0){ |
||||
Project project = projectMapper.queryById(projectId); |
||||
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); |
||||
|
||||
if (getResultStatus(result)){ |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* find all the task lists in the project under the user |
||||
* statistics based on task status execution, failure, completion, wait, total |
||||
*/ |
||||
Date start = null; |
||||
Date end = null; |
||||
|
||||
try { |
||||
start = DateUtils.getScheduleDate(startDate); |
||||
end = DateUtils.getScheduleDate(endDate); |
||||
} catch (Exception e) { |
||||
logger.error(e.getMessage(),e); |
||||
putErrorRequestParamsMsg(result); |
||||
return result; |
||||
} |
||||
|
||||
List<ExecuteStatusCount> taskInstanceStateCounts = |
||||
taskInstanceMapper.countTaskInstanceStateByUser(loginUser.getId(), |
||||
loginUser.getUserType(), start, end, projectId); |
||||
|
||||
TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts); |
||||
if (taskInstanceStateCounts != null) { |
||||
result.put(Constants.DATA_LIST, taskCountResult); |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.TASK_INSTANCE_STATE_COUNT_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
private void putErrorRequestParamsMsg(Map<String, Object> result) { |
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); |
||||
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); |
||||
} |
||||
|
||||
/** |
||||
* statistical process instance status data |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @param startDate |
||||
* @param endDate |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if(projectId != 0){ |
||||
Project project = projectMapper.queryById(projectId); |
||||
result = projectService.checkProjectAndAuth(loginUser, project, String.valueOf(projectId)); |
||||
|
||||
if (getResultStatus(result)){ |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
try { |
||||
start = DateUtils.getScheduleDate(startDate); |
||||
end = DateUtils.getScheduleDate(endDate); |
||||
} catch (Exception e) { |
||||
logger.error(e.getMessage(),e); |
||||
putErrorRequestParamsMsg(result); |
||||
return result; |
||||
} |
||||
List<ExecuteStatusCount> processInstanceStateCounts = |
||||
processInstanceMapper.countInstanceStateByUser(loginUser.getId(), |
||||
loginUser.getUserType(), start, end, projectId ); |
||||
|
||||
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts); |
||||
if (processInstanceStateCounts != null) { |
||||
result.put(Constants.DATA_LIST, taskCountResult); |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.COUNT_PROCESS_INSTANCE_STATE_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* statistics the process definition quantities of certain person |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> countDefinitionByUser(User loginUser, int projectId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(loginUser.getId(), loginUser.getUserType(), projectId); |
||||
|
||||
DefineUserDto dto = new DefineUserDto(defineGroupByUsers); |
||||
result.put(Constants.DATA_LIST, dto); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* @param result |
||||
* @param status |
||||
*/ |
||||
private void putMsg(Map<String, Object> result, Status status) { |
||||
result.put(Constants.STATUS, status); |
||||
result.put(Constants.MSG, status.getMsg()); |
||||
} |
||||
|
||||
/** |
||||
* get result status |
||||
* @param result |
||||
* @return |
||||
*/ |
||||
private boolean getResultStatus(Map<String, Object> result) { |
||||
Status resultEnum = (Status) result.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
} |
@ -0,0 +1,603 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.DbType; |
||||
import cn.escheduler.common.job.db.*; |
||||
import cn.escheduler.dao.mapper.DataSourceMapper; |
||||
import cn.escheduler.dao.mapper.DatasourceUserMapper; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.model.DataSource; |
||||
import cn.escheduler.dao.model.Resource; |
||||
import cn.escheduler.dao.model.User; |
||||
import com.alibaba.fastjson.JSONObject; |
||||
import com.alibaba.fastjson.TypeReference; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import java.sql.Connection; |
||||
import java.sql.DriverManager; |
||||
import java.util.*; |
||||
|
||||
/** |
||||
* datasource service |
||||
*/ |
||||
@Service |
||||
public class DataSourceService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class); |
||||
|
||||
public static final String NAME = "name"; |
||||
public static final String NOTE = "note"; |
||||
public static final String TYPE = "type"; |
||||
public static final String HOST = "host"; |
||||
public static final String PORT = "port"; |
||||
public static final String DATABASE = "database"; |
||||
public static final String USER_NAME = "userName"; |
||||
public static final String PASSWORD = "password"; |
||||
public static final String OTHER = "other"; |
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private DataSourceMapper dataSourceMapper; |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
@Autowired |
||||
private DatasourceUserMapper datasourceUserMapper; |
||||
|
||||
/** |
||||
* create data source |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param desc |
||||
* @param type |
||||
* @param parameter |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
// check name can use or not
|
||||
if (checkName(name, result)) { |
||||
return result; |
||||
} |
||||
Boolean isConnection = checkConnection(type, parameter); |
||||
if (!isConnection) { |
||||
logger.info("connect failed, type:{}, parameter:{}", type, parameter); |
||||
putMsg(result, Status.DATASOURCE_CONNECT_FAILED); |
||||
return result; |
||||
} |
||||
|
||||
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); |
||||
if (datasource == null) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, parameter); |
||||
return result; |
||||
} |
||||
|
||||
// build datasource
|
||||
DataSource dataSource = new DataSource(); |
||||
Date now = new Date(); |
||||
|
||||
dataSource.setName(name.trim()); |
||||
dataSource.setNote(desc); |
||||
dataSource.setUserId(loginUser.getId()); |
||||
dataSource.setUserName(loginUser.getUserName()); |
||||
dataSource.setType(type); |
||||
dataSource.setConnectionParams(parameter); |
||||
dataSource.setCreateTime(now); |
||||
dataSource.setUpdateTime(now); |
||||
dataSourceMapper.insert(dataSource); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* updateProcessInstance datasource |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param desc |
||||
* @param type |
||||
* @param parameter |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
// determine whether the data source exists
|
||||
DataSource dataSource = dataSourceMapper.queryById(id); |
||||
if (dataSource == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
//check name can use or not
|
||||
if(!name.trim().equals(dataSource.getName()) && checkName(name, result)){ |
||||
return result; |
||||
} |
||||
|
||||
Boolean isConnection = checkConnection(type, parameter); |
||||
if (!isConnection) { |
||||
logger.info("connect failed, type:{}, parameter:{}", type, parameter); |
||||
putMsg(result, Status.DATASOURCE_CONNECT_FAILED); |
||||
return result; |
||||
} |
||||
Date now = new Date(); |
||||
|
||||
dataSource.setName(name.trim()); |
||||
dataSource.setNote(desc); |
||||
dataSource.setUserName(loginUser.getUserName()); |
||||
dataSource.setType(type); |
||||
dataSource.setConnectionParams(parameter); |
||||
dataSource.setUpdateTime(now); |
||||
dataSourceMapper.update(dataSource); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
private boolean checkName(String name, Map<String, Object> result) { |
||||
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); |
||||
if (queryDataSource != null && queryDataSource.size() > 0) { |
||||
putMsg(result, Status.DATASOURCE_EXIST); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* updateProcessInstance datasource |
||||
*/ |
||||
public Map<String, Object> queryDataSource(int id) { |
||||
|
||||
Map<String, Object> result = new HashMap<String, Object>(5); |
||||
DataSource dataSource = dataSourceMapper.queryById(id); |
||||
if (dataSource == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
// type
|
||||
String dataSourceType = dataSource.getType().toString(); |
||||
// name
|
||||
String dataSourceName = dataSource.getName(); |
||||
// desc
|
||||
String desc = dataSource.getNote(); |
||||
// parameter
|
||||
String parameter = dataSource.getConnectionParams(); |
||||
|
||||
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter); |
||||
String database = datasourceForm.getDatabase(); |
||||
// jdbc connection params
|
||||
String other = datasourceForm.getOther(); |
||||
String address = datasourceForm.getAddress(); |
||||
|
||||
String[] hostsPorts = getHostsAndPort(address); |
||||
// ip host
|
||||
String host = hostsPorts[0]; |
||||
// prot
|
||||
String port = hostsPorts[1]; |
||||
String separator = ""; |
||||
|
||||
switch (dataSource.getType()) { |
||||
case HIVE: |
||||
separator = ";"; |
||||
break; |
||||
case MYSQL: |
||||
separator = "&"; |
||||
break; |
||||
case POSTGRESQL: |
||||
separator = "&"; |
||||
break; |
||||
default: |
||||
separator = "&"; |
||||
break; |
||||
} |
||||
|
||||
Map<String, String> otherMap = new LinkedHashMap<String, String>(); |
||||
if (other != null) { |
||||
String[] configs = other.split(separator); |
||||
for (String config : configs) { |
||||
otherMap.put(config.split("=")[0], config.split("=")[1]); |
||||
} |
||||
|
||||
} |
||||
|
||||
Map<String, Object> map = new HashMap<>(10); |
||||
map.put(NAME, dataSourceName); |
||||
map.put(NOTE, desc); |
||||
map.put(TYPE, dataSourceType); |
||||
map.put(HOST, host); |
||||
map.put(PORT, port); |
||||
map.put(DATABASE, database); |
||||
map.put(USER_NAME, datasourceForm.getUser()); |
||||
map.put(PASSWORD, datasourceForm.getPassword()); |
||||
map.put(OTHER, otherMap); |
||||
result.put(Constants.DATA_LIST, map); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query datasource list by keyword |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
Integer count = getTotalCount(loginUser); |
||||
|
||||
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize); |
||||
pageInfo.setTotalCount(count); |
||||
List<DataSource> datasourceList = getDataSources(loginUser, searchVal, pageSize, pageInfo); |
||||
|
||||
pageInfo.setLists(datasourceList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get list paging |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageSize |
||||
* @param pageInfo |
||||
* @return |
||||
*/ |
||||
private List<DataSource> getDataSources(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) { |
||||
if (isAdmin(loginUser)) { |
||||
return dataSourceMapper.queryAllDataSourcePaging(searchVal, pageInfo.getStart(), pageSize); |
||||
} |
||||
return dataSourceMapper.queryDataSourcePaging(loginUser.getId(), searchVal, |
||||
pageInfo.getStart(), pageSize); |
||||
} |
||||
|
||||
/** |
||||
* get datasource total num |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
private Integer getTotalCount(User loginUser) { |
||||
if (isAdmin(loginUser)) { |
||||
return dataSourceMapper.countAllDatasource(); |
||||
} |
||||
return dataSourceMapper.countUserDatasource(loginUser.getId()); |
||||
} |
||||
|
||||
/** |
||||
* query data resource list |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
List<DataSource> datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); |
||||
|
||||
result.put(Constants.DATA_LIST, datasourceList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify datasource exists |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
public Result verifyDataSourceName(User loginUser, String name) { |
||||
Result result = new Result(); |
||||
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name); |
||||
if (dataSourceList != null && dataSourceList.size() > 0) { |
||||
logger.error("datasource name:{} has exist, can't create again.", name); |
||||
putMsg(result, Status.DATASOURCE_EXIST); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get connection |
||||
* |
||||
* @param dbType |
||||
* @param parameter |
||||
* @return |
||||
*/ |
||||
private Connection getConnection(DbType dbType, String parameter) { |
||||
Connection connection = null; |
||||
BaseDataSource datasource = null; |
||||
try { |
||||
switch (dbType) { |
||||
case POSTGRESQL: |
||||
datasource = JSONObject.parseObject(parameter, PostgreDataSource.class); |
||||
Class.forName(Constants.ORG_POSTGRESQL_DRIVER); |
||||
break; |
||||
case MYSQL: |
||||
datasource = JSONObject.parseObject(parameter, MySQLDataSource.class); |
||||
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER); |
||||
break; |
||||
case HIVE: |
||||
datasource = JSONObject.parseObject(parameter, HiveDataSource.class); |
||||
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); |
||||
break; |
||||
case SPARK: |
||||
datasource = JSONObject.parseObject(parameter, SparkDataSource.class); |
||||
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER); |
||||
break; |
||||
default: |
||||
break; |
||||
} |
||||
if(datasource != null){ |
||||
connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword()); |
||||
} |
||||
} catch (Exception e) { |
||||
logger.error(e.getMessage(),e); |
||||
} |
||||
return connection; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* check connection |
||||
* |
||||
* @param type |
||||
* @param parameter |
||||
* @return |
||||
*/ |
||||
public boolean checkConnection(DbType type, String parameter) { |
||||
Boolean isConnection = false; |
||||
Connection con = getConnection(type, parameter); |
||||
if (con != null) { |
||||
isConnection = true; |
||||
} |
||||
return isConnection; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* test connection |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
public boolean connectionTest(User loginUser, int id) { |
||||
DataSource dataSource = dataSourceMapper.queryById(id); |
||||
return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); |
||||
} |
||||
|
||||
/** |
||||
* build paramters |
||||
* |
||||
* @param name |
||||
* @param desc |
||||
* @param type |
||||
* @param host |
||||
* @param port |
||||
* @param database |
||||
* @param userName |
||||
* @param password |
||||
* @param other |
||||
* @return |
||||
*/ |
||||
public String buildParameter(String name, String desc, DbType type, String host, String port, String database, String userName, String password, String other) { |
||||
|
||||
String address = buildAddress(type, host, port); |
||||
String jdbcUrl = address + "/" + database; |
||||
String separator = ""; |
||||
if (Constants.MYSQL.equals(type.name()) || Constants.POSTGRESQL.equals(type.name())) { |
||||
separator = "&"; |
||||
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { |
||||
separator = ";"; |
||||
} |
||||
|
||||
Map<String, Object> parameterMap = new LinkedHashMap<String, Object>(6); |
||||
parameterMap.put(Constants.ADDRESS, address); |
||||
parameterMap.put(Constants.DATABASE, database); |
||||
parameterMap.put(Constants.JDBC_URL, jdbcUrl); |
||||
parameterMap.put(Constants.USER, userName); |
||||
parameterMap.put(Constants.PASSWORD, password); |
||||
if (other != null && !"".equals(other)) { |
||||
Map map = JSONObject.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() { |
||||
}); |
||||
if (map.size() > 0) { |
||||
Set<String> keys = map.keySet(); |
||||
StringBuilder otherSb = new StringBuilder(); |
||||
for (String key : keys) { |
||||
otherSb.append(String.format("%s=%s%s", key, map.get(key), separator)); |
||||
|
||||
} |
||||
otherSb.deleteCharAt(otherSb.length() - 1); |
||||
parameterMap.put(Constants.OTHER, otherSb); |
||||
} |
||||
|
||||
} |
||||
|
||||
logger.info("parameters map-----" + JSONObject.toJSONString(parameterMap)); |
||||
return JSONObject.toJSONString(parameterMap); |
||||
|
||||
|
||||
} |
||||
|
||||
private String buildAddress(DbType type, String host, String port) { |
||||
StringBuilder sb = new StringBuilder(); |
||||
if (Constants.MYSQL.equals(type.name())) { |
||||
sb.append(Constants.JDBC_MYSQL); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.POSTGRESQL.equals(type.name())) { |
||||
sb.append(Constants.JDBC_POSTGRESQL); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { |
||||
sb.append(Constants.JDBC_HIVE_2); |
||||
String[] hostArray = host.split(","); |
||||
if (hostArray.length > 0) { |
||||
for (String zkHost : hostArray) { |
||||
sb.append(String.format("%s:%s,", zkHost, port)); |
||||
} |
||||
sb.deleteCharAt(sb.length() - 1); |
||||
} |
||||
} |
||||
|
||||
return sb.toString(); |
||||
} |
||||
|
||||
/** |
||||
* delete datasource |
||||
* |
||||
* @param loginUser |
||||
* @param datasourceId |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) |
||||
public Result delete(User loginUser, int datasourceId) { |
||||
Result result = new Result(); |
||||
try { |
||||
//query datasource by id
|
||||
DataSource dataSource = dataSourceMapper.queryById(datasourceId); |
||||
if(dataSource == null){ |
||||
logger.error("resource id {} not exist", datasourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
if(loginUser.getId() != dataSource.getUserId()){ |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
dataSourceMapper.deleteDataSourceById(datasourceId); |
||||
datasourceUserMapper.deleteByDatasourceId(datasourceId); |
||||
putMsg(result, Status.SUCCESS); |
||||
} catch (Exception e) { |
||||
logger.error("delete datasource fail",e); |
||||
throw new RuntimeException("delete datasource fail"); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* unauthorized datasource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
//only admin operate
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query all data sources except userId |
||||
*/ |
||||
List<DataSource> resultList = new ArrayList<>(); |
||||
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); |
||||
Set<DataSource> datasourceSet = null; |
||||
if (datasourceList != null && datasourceList.size() > 0) { |
||||
datasourceSet = new HashSet<>(datasourceList); |
||||
|
||||
List<DataSource> authedDataSourceList = dataSourceMapper.authedDatasource(userId); |
||||
|
||||
Set<DataSource> authedDataSourceSet = null; |
||||
if (authedDataSourceList != null && authedDataSourceList.size() > 0) { |
||||
authedDataSourceSet = new HashSet<>(authedDataSourceList); |
||||
datasourceSet.removeAll(authedDataSourceSet); |
||||
|
||||
} |
||||
resultList = new ArrayList<>(datasourceSet); |
||||
} |
||||
result.put(Constants.DATA_LIST, resultList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* authorized datasource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> authedDatasource(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
List<DataSource> authedDatasourceList = dataSourceMapper.authedDatasource(userId); |
||||
result.put(Constants.DATA_LIST, authedDatasourceList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* get host and port by address |
||||
* |
||||
* @param address |
||||
* @return |
||||
*/ |
||||
private String[] getHostsAndPort(String address) { |
||||
String[] result = new String[2]; |
||||
String[] tmpArray = address.split("//"); |
||||
String hostsAndPorts = tmpArray[tmpArray.length - 1]; |
||||
StringBuilder hosts = new StringBuilder(""); |
||||
String[] hostPortArray = hostsAndPorts.split(","); |
||||
String port = hostPortArray[0].split(":")[1]; |
||||
for (String hostPort : hostPortArray) { |
||||
hosts.append(hostPort.split(":")[0]).append(","); |
||||
} |
||||
hosts.deleteCharAt(hosts.length() - 1); |
||||
result[0] = hosts.toString(); |
||||
result[1] = port; |
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,495 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.ExecuteType; |
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.*; |
||||
import cn.escheduler.common.utils.DateUtils; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import cn.escheduler.dao.mapper.ProcessInstanceMapper; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.model.*; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.text.ParseException; |
||||
import java.util.*; |
||||
|
||||
import static cn.escheduler.common.Constants.*; |
||||
|
||||
/** |
||||
* executor service |
||||
*/ |
||||
@Service |
||||
public class ExecutorService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class); |
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionMapper processDefinitionMapper; |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionService processDefinitionService; |
||||
|
||||
|
||||
@Autowired |
||||
private ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
|
||||
@Autowired |
||||
private ProcessDao processDao; |
||||
|
||||
/** |
||||
* execute process instance |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processDefinitionId process Definition Id |
||||
* @param cronTime cron time |
||||
* @param commandType command type |
||||
* @param failureStrategy failuer strategy |
||||
* @param startNodeList start nodelist |
||||
* @param taskDependType node dependency type |
||||
* @param warningType warning type |
||||
* @param warningGroupId notify group id |
||||
* @param receivers receivers |
||||
* @param receiversCc receivers cc |
||||
* @param timeout timeout |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> execProcessInstance(User loginUser, String projectName, |
||||
int processDefinitionId, String cronTime, CommandType commandType, |
||||
FailureStrategy failureStrategy, String startNodeList, |
||||
TaskDependType taskDependType, WarningType warningType, int warningGroupId, |
||||
String receivers, String receiversCc, RunMode runMode, |
||||
Priority processInstancePriority, Integer timeout) throws ParseException { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
// timeout is valid
|
||||
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { |
||||
putMsg(result,Status.TASK_TIMEOUT_PARAMS_ERROR); |
||||
return result; |
||||
} |
||||
Project project = projectMapper.queryByName(projectName); |
||||
Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); |
||||
if (checkResultAndAuth != null){ |
||||
return checkResultAndAuth; |
||||
} |
||||
|
||||
// check process define release state
|
||||
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineId(processDefinitionId); |
||||
result = checkProcessDefinitionValid(processDefinition, processDefinitionId); |
||||
if(result.get(Constants.STATUS) != Status.SUCCESS){ |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create command |
||||
*/ |
||||
int create = this.createCommand(commandType, processDefinitionId, |
||||
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), |
||||
warningGroupId, runMode,processInstancePriority); |
||||
if(create > 0 ){ |
||||
/** |
||||
* according to the process definition ID updateProcessInstance and CC recipient |
||||
*/ |
||||
processDefinitionMapper.updateReceiversAndCcById(receivers,receiversCc,processDefinitionId); |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* check whether the process definition can be executed |
||||
* |
||||
* @param processDefinition |
||||
* @param processDefineId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId){ |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (processDefinition == null) { |
||||
// check process definition exists
|
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST,processDefineId); |
||||
} else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { |
||||
// check process definition online
|
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE,processDefineId); |
||||
}else{ |
||||
result.put(Constants.STATUS, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* do action to process instance:pause, stop, repeat, recover from pause, recover from stop |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @param executeType |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); |
||||
if (processInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); |
||||
return result; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); |
||||
result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
checkResult = checkExecuteType(processInstance, executeType); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
switch (executeType) { |
||||
case REPEAT_RUNNING: |
||||
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING); |
||||
break; |
||||
case RECOVER_SUSPENDED_PROCESS: |
||||
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS); |
||||
break; |
||||
case START_FAILURE_TASK_PROCESS: |
||||
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS); |
||||
break; |
||||
case STOP: |
||||
if (processInstance.getState() == ExecutionStatus.READY_STOP) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); |
||||
} else { |
||||
processInstance.setCommandType(CommandType.STOP); |
||||
processInstance.addHistoryCmd(CommandType.STOP); |
||||
processDao.updateProcessInstance(processInstance); |
||||
result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_STOP); |
||||
} |
||||
break; |
||||
case PAUSE: |
||||
if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); |
||||
} else { |
||||
processInstance.setCommandType(CommandType.PAUSE); |
||||
processInstance.addHistoryCmd(CommandType.PAUSE); |
||||
processDao.updateProcessInstance(processInstance); |
||||
result = updateProcessInstanceState(processInstanceId, ExecutionStatus.READY_PAUSE); |
||||
} |
||||
break; |
||||
default: |
||||
logger.error(String.format("unknown execute type : %s", executeType.toString())); |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); |
||||
|
||||
break; |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* Check the state of process instance and the type of operation match |
||||
* |
||||
* @param processInstance |
||||
* @param executeType |
||||
* @return |
||||
*/ |
||||
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
ExecutionStatus executionStatus = processInstance.getState(); |
||||
boolean checkResult = false; |
||||
switch (executeType) { |
||||
case PAUSE: |
||||
case STOP: |
||||
if (executionStatus.typeIsRunning()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
case REPEAT_RUNNING: |
||||
if (executionStatus.typeIsFinished()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
case START_FAILURE_TASK_PROCESS: |
||||
if (executionStatus.typeIsFailure()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
case RECOVER_SUSPENDED_PROCESS: |
||||
if (executionStatus.typeIsPause()) { |
||||
checkResult = true; |
||||
} |
||||
default: |
||||
break; |
||||
} |
||||
if (!checkResult) { |
||||
putMsg(result,Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* update process instance state |
||||
* |
||||
* @param processInstanceId |
||||
* @param executionStatus |
||||
* @return |
||||
*/ |
||||
private Map<String, Object> updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
int update = processDao.updateProcessInstanceState(processInstanceId, executionStatus); |
||||
if (update > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* insert command, used in the implementation of the page, re run, recovery (pause / failure) execution |
||||
* |
||||
* @param loginUser |
||||
* @param instanceId |
||||
* @param processDefinitionId |
||||
* @param commandType |
||||
* @return |
||||
*/ |
||||
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Command command = new Command(); |
||||
command.setCommandType(commandType); |
||||
command.setProcessDefinitionId(processDefinitionId); |
||||
command.setCommandParam(String.format("{\"%s\":%d}", |
||||
CMDPARAM_RECOVER_PROCESS_ID_STRING, instanceId)); |
||||
command.setExecutorId(loginUser.getId()); |
||||
|
||||
if(!processDao.verifyIsNeedCreateCommand(command)){ |
||||
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND,processDefinitionId); |
||||
return result; |
||||
} |
||||
|
||||
int create = processDao.createCommand(command); |
||||
|
||||
if (create > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check if subprocesses are offline before starting process definition |
||||
* @param processDefineId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) { |
||||
Map<String, Object> result = new HashMap<String, Object>(); |
||||
|
||||
if (processDefineId == 0){ |
||||
logger.error("process definition id is null"); |
||||
putMsg(result,Status.REQUEST_PARAMS_NOT_VALID_ERROR,"process definition id"); |
||||
} |
||||
List<String> ids = new ArrayList<>(); |
||||
processDao.recurseFindSubProcessId(processDefineId, ids); |
||||
if (ids.size() > 0){ |
||||
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(ids); |
||||
if (processDefinitionList != null && processDefinitionList.size() > 0){ |
||||
for (ProcessDefinition processDefinition : processDefinitionList){ |
||||
/** |
||||
* if there is no online process, exit directly |
||||
*/ |
||||
if (processDefinition.getReleaseState() != ReleaseState.ONLINE){ |
||||
putMsg(result,Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); |
||||
logger.info("not release process definition id: {} , name : {}", |
||||
processDefinition.getId(), processDefinition.getName()); |
||||
return result; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query recipients and copyers by process definition id |
||||
* |
||||
* @param processDefineId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> getReceiverCc(int processDefineId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
ProcessDefinition processDefinition = processDefinitionMapper.queryByDefineId(processDefineId); |
||||
if (processDefinition == null){ |
||||
throw new RuntimeException("processDefineId is not exists"); |
||||
} |
||||
String receivers = processDefinition.getReceivers(); |
||||
String receiversCc = processDefinition.getReceiversCc(); |
||||
Map<String,String> dataMap = new HashMap<>(); |
||||
dataMap.put(Constants.RECEIVERS,receivers); |
||||
dataMap.put(Constants.RECEIVERS_CC,receiversCc); |
||||
|
||||
result.put(Constants.DATA_LIST, dataMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* create command |
||||
* |
||||
* @param commandType |
||||
* @param processDefineId |
||||
* @param nodeDep |
||||
* @param failureStrategy |
||||
* @param startNodeList |
||||
* @param schedule |
||||
* @param warningType |
||||
* @param excutorId |
||||
* @param warningGroupId |
||||
* @param runMode |
||||
* @return |
||||
* @throws ParseException |
||||
*/ |
||||
private int createCommand(CommandType commandType, int processDefineId, |
||||
TaskDependType nodeDep, FailureStrategy failureStrategy, |
||||
String startNodeList, String schedule, WarningType warningType, |
||||
int excutorId, int warningGroupId, |
||||
RunMode runMode,Priority processInstancePriority) throws ParseException { |
||||
|
||||
/** |
||||
* instantiate command schedule instance |
||||
*/ |
||||
Command command = new Command(); |
||||
|
||||
Map<String,String> cmdParam = new HashMap<>(); |
||||
if(commandType == null){ |
||||
command.setCommandType(CommandType.START_PROCESS); |
||||
}else{ |
||||
command.setCommandType(commandType); |
||||
} |
||||
command.setProcessDefinitionId(processDefineId); |
||||
if(nodeDep != null){ |
||||
command.setTaskDependType(nodeDep); |
||||
} |
||||
if(failureStrategy != null){ |
||||
command.setFailureStrategy(failureStrategy); |
||||
} |
||||
|
||||
if(StringUtils.isNotEmpty(startNodeList)){ |
||||
cmdParam.put(CMDPARAM_START_NODE_NAMES, startNodeList); |
||||
} |
||||
if(warningType != null){ |
||||
command.setWarningType(warningType); |
||||
} |
||||
command.setCommandParam(JSONUtils.toJson(cmdParam)); |
||||
command.setExecutorId(excutorId); |
||||
command.setWarningGroupId(warningGroupId); |
||||
command.setProcessInstancePriority(processInstancePriority); |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
if(StringUtils.isNotEmpty(schedule)){ |
||||
String[] interval = schedule.split(","); |
||||
if(interval.length == 2){ |
||||
start = DateUtils.getScheduleDate(interval[0]); |
||||
end = DateUtils.getScheduleDate(interval[1]); |
||||
} |
||||
} |
||||
|
||||
if(commandType == CommandType.COMPLEMENT_DATA){ |
||||
runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; |
||||
if(runMode == RunMode.RUN_MODE_SERIAL){ |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); |
||||
command.setCommandParam(JSONUtils.toJson(cmdParam)); |
||||
return processDao.createCommand(command); |
||||
}else if (runMode == RunMode.RUN_MODE_PARALLEL){ |
||||
int runCunt = 0; |
||||
while(!start.after(end)){ |
||||
runCunt += 1; |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); |
||||
command.setCommandParam(JSONUtils.toJson(cmdParam)); |
||||
processDao.createCommand(command); |
||||
start = DateUtils.getSomeDay(start, 1); |
||||
} |
||||
return runCunt; |
||||
} |
||||
}else{ |
||||
command.setCommandParam(JSONUtils.toJson(cmdParam)); |
||||
return processDao.createCommand(command); |
||||
} |
||||
|
||||
return 0; |
||||
} |
||||
|
||||
/** |
||||
* check result and auth |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param project |
||||
* @return |
||||
*/ |
||||
private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) { |
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,86 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.log.LogClient; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.Constants; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.model.TaskInstance; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
/** |
||||
* log service |
||||
*/ |
||||
@Service |
||||
public class LoggerService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LoggerService.class); |
||||
|
||||
@Autowired |
||||
private ProcessDao processDao; |
||||
|
||||
/** |
||||
* view log |
||||
* |
||||
* @param taskInstId |
||||
* @param skipLineNum |
||||
* @param limit |
||||
* @return |
||||
*/ |
||||
public Result queryLog(int taskInstId, int skipLineNum, int limit) { |
||||
|
||||
TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); |
||||
String host = taskInstance.getHost(); |
||||
if(StringUtils.isEmpty(host)){ |
||||
return new Result(Status.TASK_INSTANCE_HOST_NOT_FOUND.getCode(), Status.TASK_INSTANCE_HOST_NOT_FOUND.getMsg()); |
||||
} |
||||
logger.info("log host : {} , logPath : {} , logServer port : {}",host,taskInstance.getLogPath(),Constants.RPC_PORT); |
||||
|
||||
Result result = new Result(Status.SUCCESS.getCode(), Status.SUCCESS.getMsg()); |
||||
|
||||
if(host != null){ |
||||
LogClient logClient = new LogClient(host, Constants.RPC_PORT); |
||||
String log = logClient.rollViewLog(taskInstance.getLogPath(),skipLineNum,limit); |
||||
result.setData(log); |
||||
logger.info(log); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get log size |
||||
* |
||||
* @param taskInstId |
||||
* @return |
||||
*/ |
||||
public byte[] getLogBytes(int taskInstId) { |
||||
TaskInstance taskInstance = processDao.findTaskInstanceById(taskInstId); |
||||
if (taskInstance == null){ |
||||
throw new RuntimeException("task instance is null"); |
||||
} |
||||
String host = taskInstance.getHost(); |
||||
LogClient logClient = new LogClient(host, Constants.RPC_PORT); |
||||
return logClient.getLogBytes(taskInstance.getLogPath()); |
||||
} |
||||
} |
@ -0,0 +1,730 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.dto.treeview.Instance; |
||||
import cn.escheduler.api.dto.treeview.TreeViewDto; |
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.common.enums.Flag; |
||||
import cn.escheduler.common.enums.ReleaseState; |
||||
import cn.escheduler.common.enums.TaskType; |
||||
import cn.escheduler.common.graph.DAG; |
||||
import cn.escheduler.common.model.TaskNode; |
||||
import cn.escheduler.common.model.TaskNodeRelation; |
||||
import cn.escheduler.common.process.ProcessDag; |
||||
import cn.escheduler.common.process.Property; |
||||
import cn.escheduler.common.thread.Stopper; |
||||
import cn.escheduler.common.utils.CollectionUtils; |
||||
import cn.escheduler.common.utils.DateUtils; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.mapper.*; |
||||
import cn.escheduler.dao.model.*; |
||||
import com.alibaba.fastjson.JSON; |
||||
import com.fasterxml.jackson.core.JsonProcessingException; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import java.util.*; |
||||
import java.util.concurrent.ConcurrentHashMap; |
||||
|
||||
import static cn.escheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR; |
||||
import static cn.escheduler.api.service.SchedulerService.deleteSchedule; |
||||
import static cn.escheduler.api.utils.CheckUtils.checkOtherParams; |
||||
import static cn.escheduler.api.utils.CheckUtils.checkTaskNodeParameters; |
||||
import static cn.escheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID; |
||||
|
||||
/** |
||||
* process definition service |
||||
*/ |
||||
@Service |
||||
public class ProcessDefinitionService extends BaseDAGService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionService.class); |
||||
|
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionMapper processDefineMapper; |
||||
|
||||
@Autowired |
||||
private ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
|
||||
@Autowired |
||||
private TaskInstanceMapper taskInstanceMapper; |
||||
|
||||
@Autowired |
||||
private ScheduleMapper scheduleMapper; |
||||
|
||||
@Autowired |
||||
private ProcessDao processDao; |
||||
|
||||
/** |
||||
* create process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param name |
||||
* @param processDefinitionJson |
||||
* @param desc |
||||
* @param locations |
||||
* @param connects |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> createProcessDefinition(User loginUser, String projectName, String name, |
||||
String processDefinitionJson, String desc, String locations, String connects) throws JsonProcessingException { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultStatus != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessDefinition processDefine = new ProcessDefinition(); |
||||
Date now = new Date(); |
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
Map<String, Object> checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); |
||||
if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
processDefine.setName(name); |
||||
processDefine.setReleaseState(ReleaseState.OFFLINE); |
||||
processDefine.setProjectId(project.getId()); |
||||
processDefine.setUserId(loginUser.getId()); |
||||
processDefine.setProcessDefinitionJson(processDefinitionJson); |
||||
processDefine.setDesc(desc); |
||||
processDefine.setLocations(locations); |
||||
processDefine.setConnects(connects); |
||||
|
||||
//custom global params
|
||||
List<Property> globalParamsList = processData.getGlobalParams(); |
||||
if (globalParamsList != null && globalParamsList.size() > 0) { |
||||
Set<Property> globalParamsSet = new HashSet<>(globalParamsList); |
||||
globalParamsList = new ArrayList<>(globalParamsSet); |
||||
processDefine.setGlobalParamList(globalParamsList); |
||||
} |
||||
processDefine.setCreateTime(now); |
||||
processDefine.setUpdateTime(now); |
||||
processDefine.setFlag(Flag.YES); |
||||
processDefineMapper.insert(processDefine); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query proccess definition list |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryProccessDefinitionList(User loginUser, String projectName) { |
||||
|
||||
HashMap<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultStatus != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(project.getId()); |
||||
result.put(Constants.DATA_LIST, resourceList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query proccess definition list paging |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultStatus != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
Integer count = processDefineMapper.countDefineNumber(project.getId(), userId, searchVal); |
||||
|
||||
PageInfo pageInfo = new PageInfo<ProcessData>(pageNo, pageSize); |
||||
List<ProcessDefinition> resourceList = processDefineMapper.queryDefineListPaging(project.getId(), |
||||
searchVal, userId, pageInfo.getStart(), pageSize); |
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(resourceList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query datail of process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryProccessDefinitionById(User loginUser, String projectName, Integer processId) { |
||||
|
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultStatus != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processId); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processId); |
||||
} else { |
||||
result.put(Constants.DATA_LIST, processDefinition); |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* update process definition |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @param name |
||||
* @param processDefinitionJson |
||||
* @param desc |
||||
* @param locations |
||||
* @param connects |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateProccessDefinition(User loginUser, String projectName, int id, String name, |
||||
String processDefinitionJson, String desc, |
||||
String locations, String connects) throws JsonProcessingException { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultStatus = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultStatus != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
Map<String, Object> checkProcessJson = checkProcessNodeList(processData, processDefinitionJson); |
||||
if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) { |
||||
return result; |
||||
} |
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(id); |
||||
if (processDefinition == null) { |
||||
// check process definition exists
|
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id); |
||||
return result; |
||||
} else if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { |
||||
// online can not permit edit
|
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefinition.getName()); |
||||
return result; |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
ProcessDefinition processDefine = processDao.findProcessDefineById(id); |
||||
Date now = new Date(); |
||||
|
||||
processDefine.setId(id); |
||||
processDefine.setName(name); |
||||
processDefine.setReleaseState(ReleaseState.OFFLINE); |
||||
processDefine.setProjectId(project.getId()); |
||||
processDefine.setUserId(loginUser.getId()); |
||||
processDefine.setProcessDefinitionJson(processDefinitionJson); |
||||
processDefine.setDesc(desc); |
||||
processDefine.setLocations(locations); |
||||
processDefine.setConnects(connects); |
||||
|
||||
//custom global params
|
||||
List<Property> globalParamsList = processData.getGlobalParams(); |
||||
if (globalParamsList != null && globalParamsList.size() > 0) { |
||||
Set<Property> userDefParamsSet = new HashSet<>(globalParamsList); |
||||
globalParamsList = new ArrayList<>(userDefParamsSet); |
||||
processDefine.setGlobalParamList(globalParamsList); |
||||
} |
||||
processDefine.setUpdateTime(now); |
||||
processDefine.setFlag(Flag.YES); |
||||
if (processDefineMapper.update(processDefine) > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
} else { |
||||
putMsg(result, UPDATE_PROCESS_DEFINITION_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify process definition name unique |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> verifyProccessDefinitionName(User loginUser, String projectName, String name) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(), name); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.PROCESS_INSTANCE_EXIST, name); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* release process definition: online / offline |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @param releaseState |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) |
||||
public Map<String, Object> releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) { |
||||
HashMap<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ReleaseState state = ReleaseState.getEnum(releaseState); |
||||
|
||||
switch (state) { |
||||
case ONLINE: { |
||||
processDefineMapper.updateProcessDefinitionReleaseState(id, state); |
||||
break; |
||||
} |
||||
case OFFLINE: { |
||||
processDefineMapper.updateProcessDefinitionReleaseState(id, state); |
||||
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(new int[]{id}); |
||||
|
||||
for(Schedule schedule:scheduleList){ |
||||
logger.info("set schedule offline, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id); |
||||
// set status
|
||||
schedule.setReleaseState(ReleaseState.OFFLINE); |
||||
scheduleMapper.update(schedule); |
||||
deleteSchedule(project.getId(), id); |
||||
} |
||||
break; |
||||
} |
||||
default: { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "releaseState"); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* check the process definition node meets the specifications |
||||
* |
||||
* @param processData |
||||
* @param processDefinitionJson |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> checkProcessNodeList(ProcessData processData, String processDefinitionJson) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
try { |
||||
if (processData == null) { |
||||
logger.error("process data is null"); |
||||
putMsg(result,Status.DATA_IS_NOT_VALID, processDefinitionJson); |
||||
return result; |
||||
} |
||||
|
||||
// Check whether the task node is normal
|
||||
List<TaskNode> taskNodes = processData.getTasks(); |
||||
|
||||
if (taskNodes == null) { |
||||
logger.error("process node info is empty"); |
||||
putMsg(result, Status.DATA_IS_NULL, processDefinitionJson); |
||||
return result; |
||||
} |
||||
|
||||
// check has cycle
|
||||
if (graphHasCycle(taskNodes)) { |
||||
logger.error("process DAG has cycle"); |
||||
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE); |
||||
return result; |
||||
} |
||||
|
||||
// check whether the process definition json is normal
|
||||
for (TaskNode taskNode : taskNodes) { |
||||
if (!checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) { |
||||
logger.error("task node {} parameter invalid", taskNode.getName()); |
||||
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName()); |
||||
return result; |
||||
} |
||||
|
||||
// check extra params
|
||||
checkOtherParams(taskNode.getExtras()); |
||||
} |
||||
putMsg(result,Status.SUCCESS); |
||||
} catch (Exception e) { |
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); |
||||
result.put(Constants.MSG, e.getMessage()); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get task node details based on process definition |
||||
*/ |
||||
public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(defineId); |
||||
if (processDefinition == null) { |
||||
logger.info("process define not exists"); |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition.getId()); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
String processDefinitionJson = processDefinition.getProcessDefinitionJson(); |
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
|
||||
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); |
||||
|
||||
result.put(Constants.DATA_LIST, taskNodeList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* get task node details based on process definition |
||||
*/ |
||||
public Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
|
||||
Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>(); |
||||
String[] idList = defineIdList.split(","); |
||||
List<String> definitionIdList = Arrays.asList(idList); |
||||
List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList(definitionIdList); |
||||
if (processDefinitionList == null || processDefinitionList.size() ==0) { |
||||
logger.info("process definition not exists"); |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList); |
||||
return result; |
||||
} |
||||
|
||||
for(ProcessDefinition processDefinition : processDefinitionList){ |
||||
String processDefinitionJson = processDefinition.getProcessDefinitionJson(); |
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks(); |
||||
taskNodeMap.put(processDefinition.getId(), taskNodeList); |
||||
} |
||||
|
||||
result.put(Constants.DATA_LIST, taskNodeMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* Encapsulates the TreeView structure |
||||
* |
||||
* @param processId |
||||
* @param limit |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> viewTree(Integer processId, Integer limit) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processId); |
||||
if (processDefinition == null) { |
||||
logger.info("process define not exists"); |
||||
throw new RuntimeException("process define not exists"); |
||||
} |
||||
DAG<String, TaskNode, TaskNodeRelation> dag = genDagGraph(processDefinition); |
||||
/** |
||||
* nodes that is running |
||||
*/ |
||||
Map<String, List<TreeViewDto>> runningNodeMap = new ConcurrentHashMap<>(); |
||||
|
||||
/** |
||||
* nodes that is waiting torun |
||||
*/ |
||||
Map<String, List<TreeViewDto>> waitingRunningNodeMap = new ConcurrentHashMap<>(); |
||||
|
||||
/** |
||||
* List of process instances |
||||
*/ |
||||
List<ProcessInstance> processInstanceList = processInstanceMapper.queryByProcessDefineId(processId, limit); |
||||
|
||||
|
||||
if (limit > processInstanceList.size()) { |
||||
limit = processInstanceList.size(); |
||||
} |
||||
|
||||
TreeViewDto parentTreeViewDto = new TreeViewDto(); |
||||
parentTreeViewDto.setName("DAG"); |
||||
parentTreeViewDto.setType(""); |
||||
// Specify the process definition, because it is a TreeView for a process definition
|
||||
|
||||
for (int i = limit - 1; i >= 0; i--) { |
||||
ProcessInstance processInstance = processInstanceList.get(i); |
||||
|
||||
Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime(); |
||||
parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString() |
||||
, processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime()))); |
||||
} |
||||
|
||||
List<TreeViewDto> parentTreeViewDtoList = new ArrayList<>(); |
||||
parentTreeViewDtoList.add(parentTreeViewDto); |
||||
// Here is the encapsulation task instance
|
||||
for (String startNode : dag.getBeginNode()) { |
||||
runningNodeMap.put(startNode, parentTreeViewDtoList); |
||||
} |
||||
|
||||
while (Stopper.isRunning()) { |
||||
Set<String> postNodeList = null; |
||||
Iterator<Map.Entry<String, List<TreeViewDto>>> iter = runningNodeMap.entrySet().iterator(); |
||||
while (iter.hasNext()) { |
||||
Map.Entry<String, List<TreeViewDto>> en = iter.next(); |
||||
String nodeName = en.getKey(); |
||||
parentTreeViewDtoList = en.getValue(); |
||||
|
||||
TreeViewDto treeViewDto = new TreeViewDto(); |
||||
treeViewDto.setName(nodeName); |
||||
TaskNode taskNode = dag.getNode(nodeName); |
||||
treeViewDto.setType(taskNode.getType()); |
||||
|
||||
|
||||
//set treeViewDto instances
|
||||
for (int i = limit - 1; i >= 0; i--) { |
||||
ProcessInstance processInstance = processInstanceList.get(i); |
||||
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName); |
||||
if (taskInstance == null) { |
||||
treeViewDto.getInstances().add(new Instance(-1, "not running", "null")); |
||||
} else { |
||||
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); |
||||
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); |
||||
|
||||
int subProcessId = 0; |
||||
/** |
||||
* if process is sub process, the return sub id, or sub id=0 |
||||
*/ |
||||
if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) { |
||||
String taskJson = taskInstance.getTaskJson(); |
||||
taskNode = JSON.parseObject(taskJson, TaskNode.class); |
||||
subProcessId = Integer.parseInt(JSON.parseObject( |
||||
taskNode.getParams()).getString(CMDPARAM_SUB_PROCESS_DEFINE_ID)); |
||||
} |
||||
treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString() |
||||
, taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId)); |
||||
} |
||||
} |
||||
for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) { |
||||
pTreeViewDto.getChildren().add(treeViewDto); |
||||
} |
||||
postNodeList = dag.getSubsequentNodes(nodeName); |
||||
if (postNodeList != null && postNodeList.size() > 0) { |
||||
for (String nextNodeName : postNodeList) { |
||||
List<TreeViewDto> treeViewDtoList = waitingRunningNodeMap.get(nextNodeName); |
||||
if (treeViewDtoList != null && treeViewDtoList.size() > 0) { |
||||
treeViewDtoList.add(treeViewDto); |
||||
waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); |
||||
} else { |
||||
treeViewDtoList = new ArrayList<>(); |
||||
treeViewDtoList.add(treeViewDto); |
||||
waitingRunningNodeMap.put(nextNodeName, treeViewDtoList); |
||||
} |
||||
} |
||||
} |
||||
runningNodeMap.remove(nodeName); |
||||
} |
||||
|
||||
if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) { |
||||
break; |
||||
} else { |
||||
runningNodeMap.putAll(waitingRunningNodeMap); |
||||
waitingRunningNodeMap.clear(); |
||||
} |
||||
} |
||||
result.put(Constants.DATA_LIST, parentTreeViewDto); |
||||
result.put(Constants.STATUS, Status.SUCCESS); |
||||
result.put(Constants.MSG, Status.SUCCESS.getMsg()); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* Generate the DAG Graph based on the process definition id |
||||
* |
||||
* @param processDefinition |
||||
* @return |
||||
* @throws Exception |
||||
*/ |
||||
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDefinition processDefinition) throws Exception { |
||||
|
||||
String processDefinitionJson = processDefinition.getProcessDefinitionJson(); |
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
|
||||
List<TaskNode> taskNodeList = processData.getTasks(); |
||||
|
||||
processDefinition.setGlobalParamList(processData.getGlobalParams()); |
||||
|
||||
|
||||
List<TaskNodeRelation> taskNodeRelations = new ArrayList<>(); |
||||
|
||||
// Traverse node information and build relationships
|
||||
for (TaskNode taskNode : taskNodeList) { |
||||
String preTasks = taskNode.getPreTasks(); |
||||
List<String> preTasksList = JSONUtils.toList(preTasks, String.class); |
||||
|
||||
// If the dependency is not empty
|
||||
if (preTasksList != null) { |
||||
for (String depNode : preTasksList) { |
||||
taskNodeRelations.add(new TaskNodeRelation(depNode, taskNode.getName())); |
||||
} |
||||
} |
||||
} |
||||
|
||||
ProcessDag processDag = new ProcessDag(); |
||||
processDag.setEdges(taskNodeRelations); |
||||
processDag.setNodes(taskNodeList); |
||||
|
||||
|
||||
// Generate concrete Dag to be executed
|
||||
return genDagGraph(processDag); |
||||
|
||||
|
||||
} |
||||
|
||||
/** |
||||
* Generate the DAG of process |
||||
* |
||||
* @return DAG |
||||
*/ |
||||
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDag processDag) { |
||||
DAG<String, TaskNode, TaskNodeRelation> dag = new DAG<>(); |
||||
|
||||
/** |
||||
* Add the ndoes |
||||
*/ |
||||
if (CollectionUtils.isNotEmpty(processDag.getNodes())) { |
||||
for (TaskNode node : processDag.getNodes()) { |
||||
dag.addNode(node.getName(), node); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Add the edges |
||||
*/ |
||||
if (CollectionUtils.isNotEmpty(processDag.getEdges())) { |
||||
for (TaskNodeRelation edge : processDag.getEdges()) { |
||||
dag.addEdge(edge.getStartNode(), edge.getEndNode()); |
||||
} |
||||
} |
||||
|
||||
return dag; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* whether the graph has a ring |
||||
* |
||||
* @param taskNodeResponseList |
||||
* @return |
||||
*/ |
||||
private boolean graphHasCycle(List<TaskNode> taskNodeResponseList) { |
||||
DAG<String, TaskNode, String> graph = new DAG<>(); |
||||
|
||||
// Fill the vertices
|
||||
for (TaskNode taskNodeResponse : taskNodeResponseList) { |
||||
graph.addNode(taskNodeResponse.getName(), taskNodeResponse); |
||||
} |
||||
|
||||
// Fill edge relations
|
||||
for (TaskNode taskNodeResponse : taskNodeResponseList) { |
||||
taskNodeResponse.getPreTasks(); |
||||
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(),String.class); |
||||
if (CollectionUtils.isNotEmpty(preTasks)) { |
||||
for (String preTask : preTasks) { |
||||
graph.addEdge(preTask, taskNodeResponse.getName()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
return graph.hasCycle(); |
||||
} |
||||
|
||||
} |
||||
|
@ -0,0 +1,587 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.dto.gantt.GanttDto; |
||||
import cn.escheduler.api.dto.gantt.Task; |
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.DependResult; |
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
import cn.escheduler.common.enums.Flag; |
||||
import cn.escheduler.common.enums.TaskType; |
||||
import cn.escheduler.common.graph.DAG; |
||||
import cn.escheduler.common.model.TaskNode; |
||||
import cn.escheduler.common.model.TaskNodeRelation; |
||||
import cn.escheduler.common.process.Property; |
||||
import cn.escheduler.common.utils.CollectionUtils; |
||||
import cn.escheduler.common.utils.DateUtils; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.common.utils.ParameterUtils; |
||||
import cn.escheduler.common.utils.placeholder.BusinessTimeUtils; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import cn.escheduler.dao.mapper.ProcessInstanceMapper; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.mapper.TaskInstanceMapper; |
||||
import cn.escheduler.dao.model.*; |
||||
import com.alibaba.fastjson.JSON; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.io.BufferedReader; |
||||
import java.io.ByteArrayInputStream; |
||||
import java.io.IOException; |
||||
import java.io.InputStreamReader; |
||||
import java.nio.charset.Charset; |
||||
import java.text.ParseException; |
||||
import java.util.*; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import static cn.escheduler.common.Constants.*; |
||||
|
||||
/** |
||||
* process instance service |
||||
*/ |
||||
@Service |
||||
public class ProcessInstanceService extends BaseDAGService { |
||||
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class); |
||||
|
||||
@Autowired |
||||
ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
ProjectService projectService; |
||||
|
||||
@Autowired |
||||
ProcessDao processDao; |
||||
|
||||
@Autowired |
||||
ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
@Autowired |
||||
ProcessDefinitionMapper processDefineMapper; |
||||
|
||||
@Autowired |
||||
ProcessDefinitionService processDefinitionService; |
||||
|
||||
@Autowired |
||||
ExecutorService execService; |
||||
|
||||
@Autowired |
||||
TaskInstanceMapper taskInstanceMapper; |
||||
|
||||
@Autowired |
||||
LoggerService loggerService; |
||||
|
||||
/** |
||||
* query process instance by id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); |
||||
result.put(Constants.DATA_LIST, processInstance); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefineId |
||||
* @param startDate |
||||
* @param endDate |
||||
* @param searchVal |
||||
* @param stateType |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, |
||||
String startDate, String endDate, |
||||
String searchVal, ExecutionStatus stateType, String host, |
||||
Integer pageNo, Integer pageSize) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
int[] statusArray = null; |
||||
String statesStr = null; |
||||
// filter by state
|
||||
if (stateType != null) { |
||||
statusArray = new int[]{stateType.ordinal()}; |
||||
} |
||||
if (statusArray != null) { |
||||
statesStr = Arrays.toString(statusArray).replace("[", "").replace("]", ""); |
||||
} |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
try { |
||||
if (StringUtils.isNotEmpty(startDate)) { |
||||
start = DateUtils.getScheduleDate(startDate); |
||||
} |
||||
if (StringUtils.isNotEmpty(endDate)) { |
||||
end = DateUtils.getScheduleDate(endDate); |
||||
} |
||||
} catch (Exception e) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate"); |
||||
return result; |
||||
} |
||||
Integer count = processInstanceMapper.countProcessInstance(project.getId(), processDefineId, statesStr, |
||||
host, start, end, searchVal); |
||||
|
||||
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize); |
||||
List<ProcessInstance> processInstanceList = processInstanceMapper.queryProcessInstanceListPaging( |
||||
project.getId(), processDefineId, searchVal, statesStr, host, start, end, pageInfo.getStart(), pageSize); |
||||
|
||||
Set<String> exclusionSet = new HashSet<String>(){{ |
||||
add(Constants.CLASS); |
||||
add("locations"); |
||||
add("connects"); |
||||
add("processInstanceJson"); |
||||
}}; |
||||
|
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(CollectionUtils.getListByExclusion(processInstanceList, exclusionSet)); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* query task list by process instance id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processId); |
||||
List<TaskInstance> taskInstanceList = processDao.findValidTaskListByProcessId(processId); |
||||
AddDependResultForTaskList(taskInstanceList); |
||||
Map<String, Object> resultMap = new HashMap<>(); |
||||
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); |
||||
resultMap.put(TASK_LIST, taskInstanceList); |
||||
result.put(Constants.DATA_LIST, resultMap); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* add dependent result for dependent task |
||||
* @param taskInstanceList |
||||
*/ |
||||
private void AddDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException { |
||||
for(TaskInstance taskInstance: taskInstanceList){ |
||||
if(taskInstance.getTaskType().toUpperCase().equals(TaskType.DEPENDENT.toString())){ |
||||
Result logResult = loggerService.queryLog( |
||||
taskInstance.getId(), 0, 4098); |
||||
if(logResult.getCode() == Status.SUCCESS.ordinal()){ |
||||
String log = (String) logResult.getData(); |
||||
Map<String, DependResult> resultMap = parseLogForDependentResult(log); |
||||
taskInstance.setDependentResult(JSONUtils.toJson(resultMap)); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
public Map<String,DependResult> parseLogForDependentResult(String log) throws IOException { |
||||
Map<String, DependResult> resultMap = new HashMap<>(); |
||||
if(StringUtils.isEmpty(log)){ |
||||
return resultMap; |
||||
} |
||||
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(Charset.forName("utf8"))), Charset.forName("utf8"))); |
||||
String line; |
||||
while ((line = br.readLine()) != null) { |
||||
if(line.contains(DEPENDENT_SPLIT)){ |
||||
String[] tmpStringArray = line.split(":\\|\\|"); |
||||
if(tmpStringArray.length != 2){ |
||||
continue; |
||||
} |
||||
String dependResultString = tmpStringArray[1]; |
||||
String[] dependStringArray = dependResultString.split(","); |
||||
if(dependStringArray.length != 2){ |
||||
continue; |
||||
} |
||||
String key = dependStringArray[0].trim(); |
||||
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim()); |
||||
resultMap.put(key, dependResult); |
||||
} |
||||
} |
||||
return resultMap; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query sub process instance detail info by task id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param taskId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
TaskInstance taskInstance = processDao.findTaskInstanceById(taskId); |
||||
if (taskInstance == null) { |
||||
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); |
||||
return result; |
||||
} |
||||
if (!taskInstance.isSubProcess()) { |
||||
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); |
||||
return result; |
||||
} |
||||
|
||||
ProcessInstance subWorkflowInstance = processDao.findSubProcessInstance( |
||||
taskInstance.getProcessInstanceId(), taskInstance.getId()); |
||||
if (subWorkflowInstance == null) { |
||||
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); |
||||
return result; |
||||
} |
||||
Map<String, Object> dataMap = new HashMap<>(); |
||||
dataMap.put("subProcessInstanceId", subWorkflowInstance.getId()); |
||||
result.put(Constants.DATA_LIST, dataMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* update process instance |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @param processInstanceJson |
||||
* @param scheduleTime |
||||
* @param syncDefine |
||||
* @param flag |
||||
* @param locations |
||||
* @param connects |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, |
||||
String processInstanceJson, String scheduleTime, Boolean syncDefine, |
||||
Flag flag, String locations, String connects) throws ParseException { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
//check project permission
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
//check process instance exists
|
||||
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); |
||||
if (processInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); |
||||
return result; |
||||
} |
||||
|
||||
//check process instance status
|
||||
if (!processInstance.getState().typeIsFinished()) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, "update"); |
||||
return result; |
||||
} |
||||
Date schedule = null; |
||||
if (scheduleTime != null) { |
||||
schedule = DateUtils.getScheduleDate(scheduleTime); |
||||
} else { |
||||
schedule = processInstance.getScheduleTime(); |
||||
} |
||||
String globalParams = null; |
||||
String originDefParams = null; |
||||
if (StringUtils.isNotEmpty(processInstanceJson)) { |
||||
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); |
||||
//check workflow json is valid
|
||||
Map<String, Object> checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson); |
||||
if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
originDefParams = JSONUtils.toJson(processData.getGlobalParams()); |
||||
List<Property> globalParamList = processData.getGlobalParams(); |
||||
Map<String, String> globalParamMap = globalParamList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); |
||||
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, |
||||
processInstance.getCmdTypeIfComplement(), schedule); |
||||
} |
||||
int update = processDao.updateProcessInstance(processInstanceId, processInstanceJson, |
||||
globalParams, schedule, flag, locations, connects); |
||||
int updateDefine = 1; |
||||
if (syncDefine && StringUtils.isNotEmpty(processInstanceJson)) { |
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(processInstance.getProcessDefinitionId()); |
||||
processDefinition.setProcessDefinitionJson(processInstanceJson); |
||||
processDefinition.setGlobalParams(originDefParams); |
||||
processDefinition.setLocations(locations); |
||||
processDefinition.setConnects(connects); |
||||
updateDefine = processDefineMapper.update(processDefinition); |
||||
} |
||||
if (update > 0 && updateDefine > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
|
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query parent process instance detail info by sub process instance id |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param subId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessInstance subInstance = processDao.findProcessInstanceDetailById(subId); |
||||
if (subInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); |
||||
return result; |
||||
} |
||||
if (subInstance.getIsSubProcess() == Flag.NO) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); |
||||
return result; |
||||
} |
||||
|
||||
ProcessInstance parentWorkflowInstance = processDao.findParentProcessInstance(subId); |
||||
if (parentWorkflowInstance == null) { |
||||
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
Map<String, Object> dataMap = new HashMap<>(); |
||||
dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId()); |
||||
result.put(Constants.DATA_LIST, dataMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete process instance by id, at the same time,delete task instance and their mapping relation data |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param workflowId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer workflowId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessInstance processInstance = processDao.findProcessInstanceDetailById(workflowId); |
||||
if (processInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, workflowId); |
||||
return result; |
||||
} |
||||
int delete = processDao.deleteWorkProcessInstanceById(workflowId); |
||||
processDao.deleteAllSubWorkProcessByParentId(workflowId); |
||||
processDao.deleteWorkProcessMapByParentId(workflowId); |
||||
|
||||
if (delete > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* view process instance variables |
||||
* |
||||
* @param processInstanceId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> viewVariables( Integer processInstanceId) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); |
||||
|
||||
Map<String, String> timeParams = BusinessTimeUtils |
||||
.getBusinessTime(processInstance.getCmdTypeIfComplement(), |
||||
processInstance.getScheduleTime()); |
||||
|
||||
|
||||
String workflowInstanceJson = processInstance.getProcessInstanceJson(); |
||||
|
||||
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); |
||||
|
||||
String userDefinedParams = processInstance.getGlobalParams(); |
||||
|
||||
// global params
|
||||
List<Property> globalParams = new ArrayList<>(); |
||||
|
||||
if (userDefinedParams != null && userDefinedParams.length() > 0) { |
||||
globalParams = JSON.parseArray(userDefinedParams, Property.class); |
||||
} |
||||
|
||||
|
||||
List<TaskNode> taskNodeList = workflowData.getTasks(); |
||||
|
||||
// global param string
|
||||
String globalParamStr = JSON.toJSONString(globalParams); |
||||
globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams); |
||||
globalParams = JSON.parseArray(globalParamStr, Property.class); |
||||
for (Property property : globalParams) { |
||||
timeParams.put(property.getProp(), property.getValue()); |
||||
} |
||||
|
||||
// local params
|
||||
Map<String, List<Property>> localUserDefParams = new HashMap<>(); |
||||
for (TaskNode taskNode : taskNodeList) { |
||||
String parameter = taskNode.getParams(); |
||||
Map<String, String> map = JSONUtils.toMap(parameter); |
||||
String localParams = map.get(LOCAL_PARAMS); |
||||
if (localParams != null && !localParams.isEmpty()) { |
||||
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); |
||||
List<Property> localParamsList = JSON.parseArray(localParams, Property.class); |
||||
if (localParamsList.size() > 0) { |
||||
localUserDefParams.put(taskNode.getName(), localParamsList); |
||||
} |
||||
} |
||||
|
||||
} |
||||
|
||||
Map<String, Object> resultMap = new HashMap<>(); |
||||
|
||||
resultMap.put(GLOBAL_PARAMS, globalParams); |
||||
resultMap.put(LOCAL_PARAMS, localUserDefParams); |
||||
|
||||
result.put(Constants.DATA_LIST, resultMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* encapsulation gantt structure |
||||
* |
||||
* @param processInstanceId |
||||
* @return |
||||
* @throws Exception |
||||
*/ |
||||
public Map<String, Object> viewGantt(Integer processInstanceId) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); |
||||
|
||||
if (processInstance == null) { |
||||
throw new RuntimeException("workflow instance is null"); |
||||
} |
||||
|
||||
GanttDto ganttDto = new GanttDto(); |
||||
|
||||
DAG<String, TaskNode, TaskNodeRelation> dag = processInstance2DAG(processInstance); |
||||
//topological sort
|
||||
List<String> nodeList = dag.topologicalSort(); |
||||
|
||||
ganttDto.setTaskNames(nodeList); |
||||
|
||||
List<Task> taskList = new ArrayList<>(); |
||||
for (String node : nodeList) { |
||||
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); |
||||
if (taskInstance == null) { |
||||
continue; |
||||
} |
||||
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); |
||||
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); |
||||
Task task = new Task(); |
||||
task.setTaskName(taskInstance.getName()); |
||||
task.getStartDate().add(startTime.getTime()); |
||||
task.getEndDate().add(endTime.getTime()); |
||||
task.setIsoStart(startTime); |
||||
task.setIsoEnd(endTime); |
||||
task.setStatus(taskInstance.getState().toString()); |
||||
task.setExecutionDate(taskInstance.getStartTime()); |
||||
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime())); |
||||
taskList.add(task); |
||||
} |
||||
ganttDto.setTasks(taskList); |
||||
|
||||
result.put(Constants.DATA_LIST, ganttDto); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,370 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.mapper.ProjectUserMapper; |
||||
import cn.escheduler.dao.mapper.UserMapper; |
||||
import cn.escheduler.dao.model.Project; |
||||
import cn.escheduler.dao.model.ProjectUser; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.util.*; |
||||
|
||||
import static cn.escheduler.api.utils.CheckUtils.checkDesc; |
||||
|
||||
/** |
||||
* project service |
||||
*/ |
||||
@Service |
||||
public class ProjectService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ProjectService.class); |
||||
|
||||
@Autowired |
||||
private UserMapper userMapper; |
||||
|
||||
@Autowired |
||||
private UsersService userService; |
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private ProjectUserMapper projectUserMapper; |
||||
|
||||
/** |
||||
* create project |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> createProject(User loginUser, String name, String desc) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Map<String, Object> descCheck = checkDesc(desc); |
||||
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return descCheck; |
||||
} |
||||
|
||||
/** |
||||
* only general users can create projects. administrators have no corresponding tenants and can only view |
||||
* 管理员没有对应的租户,只能查看,只有普通用户才可以创建项目 |
||||
*/ |
||||
if (!userService.isGeneral(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
Project project = projectMapper.queryByName(name); |
||||
if (project != null) { |
||||
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name); |
||||
return result; |
||||
} |
||||
project = new Project(); |
||||
Date now = new Date(); |
||||
|
||||
project.setName(name); |
||||
project.setDesc(desc); |
||||
project.setUserId(loginUser.getId()); |
||||
project.setUserName(loginUser.getUserName()); |
||||
project.setCreateTime(now); |
||||
project.setUpdateTime(now); |
||||
|
||||
if (projectMapper.insert(project) > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.CREATE_PROJECT_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query project details by id |
||||
* |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryById(Integer projectId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryById(projectId); |
||||
|
||||
if (project != null) { |
||||
result.put(Constants.DATA_LIST, project); |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.PROJECT_NOT_FOUNT, projectId); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check project and authorization |
||||
* 检查项目权限 |
||||
* |
||||
* @param loginUser |
||||
* @param project |
||||
* @param projectName |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
if (project == null) { |
||||
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName); |
||||
} else if (!checkReadPermission(loginUser, project)) { |
||||
// check read permission
|
||||
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName); |
||||
}else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* admin can view all projects |
||||
* 如果是管理员,则所有项目都可见 |
||||
* |
||||
* @param loginUser |
||||
* @param pageSize |
||||
* @param pageNo |
||||
* @param searchVal |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
int count = 0; |
||||
PageInfo pageInfo = new PageInfo<Project>(pageNo, pageSize); |
||||
List<Project> projectList = null; |
||||
if (loginUser.getUserType() == UserType.ADMIN_USER) { |
||||
count = projectMapper.countAllProjects(searchVal); |
||||
projectList = projectMapper.queryAllProjectListPaging(pageInfo.getStart(), pageSize, searchVal); |
||||
for (Project project : projectList) { |
||||
project.setPerm(cn.escheduler.common.Constants.DEFAULT_ADMIN_PERMISSION); |
||||
} |
||||
|
||||
} else { |
||||
count = projectMapper.countProjects(loginUser.getId(), searchVal); |
||||
projectList = projectMapper.queryProjectListPaging(loginUser.getId(), |
||||
pageInfo.getStart(), pageSize, searchVal); |
||||
} |
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(projectList); |
||||
result.put(Constants.COUNT, count); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete project by id |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> deleteProject(User loginUser, Integer projectId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryById(projectId); |
||||
Map<String, Object> checkResult = getCheckResult(loginUser, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
int delete = projectMapper.delete(projectId); |
||||
if (delete > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.DELETE_PROJECT_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get check result |
||||
* |
||||
* @param loginUser |
||||
* @param project |
||||
* @return |
||||
*/ |
||||
private Map<String, Object> getCheckResult(User loginUser, Project project) { |
||||
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, project.getName()); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance project |
||||
* |
||||
* @param loginUser |
||||
* @param projectId |
||||
* @param projectName |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
Project project = projectMapper.queryById(projectId); |
||||
Map<String, Object> checkResult = getCheckResult(loginUser, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
project.setName(projectName); |
||||
project.setDesc(desc); |
||||
project.setUpdateTime(new Date()); |
||||
|
||||
int update = projectMapper.update(project); |
||||
if (update > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.UPDATE_PROJECT_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query unauthorized project |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
/** |
||||
* query all project list except specified userId |
||||
*/ |
||||
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId); |
||||
List<Project> resultList = new ArrayList<>(); |
||||
Set<Project> projectSet = null; |
||||
if (projectList != null && projectList.size() > 0) { |
||||
projectSet = new HashSet<>(projectList); |
||||
|
||||
List<Project> authedProjectList = projectMapper.authedProject(userId); |
||||
|
||||
resultList = getUnauthorizedProjects(projectSet, authedProjectList); |
||||
} |
||||
result.put(Constants.DATA_LIST, resultList); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get unauthorized project |
||||
* |
||||
* @param projectSet |
||||
* @param authedProjectList |
||||
* @return |
||||
*/ |
||||
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) { |
||||
List<Project> resultList; |
||||
Set<Project> authedProjectSet = null; |
||||
if (authedProjectList != null && authedProjectList.size() > 0) { |
||||
authedProjectSet = new HashSet<>(authedProjectList); |
||||
projectSet.removeAll(authedProjectSet); |
||||
|
||||
} |
||||
resultList = new ArrayList<>(projectSet); |
||||
return resultList; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query authorized project |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<Project> projects = projectMapper.authedProject(userId); |
||||
result.put(Constants.DATA_LIST, projects); |
||||
putMsg(result,Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* check whether have read permission |
||||
* |
||||
* @param user |
||||
* @param project |
||||
* @return |
||||
*/ |
||||
private boolean checkReadPermission(User user, Project project) { |
||||
int permissionId = queryPermission(user, project); |
||||
return (permissionId & cn.escheduler.common.Constants.READ_PERMISSION) != 0; |
||||
} |
||||
|
||||
/** |
||||
* query permission id |
||||
* |
||||
* @param user |
||||
* @param project |
||||
* @return |
||||
*/ |
||||
private int queryPermission(User user, Project project) { |
||||
if (user.getUserType() == UserType.ADMIN_USER) { |
||||
return cn.escheduler.common.Constants.READ_PERMISSION; |
||||
} |
||||
|
||||
if (project.getUserId() == user.getId()) { |
||||
return cn.escheduler.common.Constants.ALL_PERMISSIONS; |
||||
} |
||||
|
||||
ProjectUser projectUser = projectUserMapper.query(project.getId(), user.getId()); |
||||
|
||||
if (projectUser == null) { |
||||
return 0; |
||||
} |
||||
|
||||
return projectUser.getPerm(); |
||||
|
||||
} |
||||
|
||||
} |
@ -0,0 +1,60 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.dao.mapper.QueueMapper; |
||||
import cn.escheduler.dao.model.Queue; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* queue service |
||||
*/ |
||||
@Service |
||||
public class QueueService extends BaseService{ |
||||
|
||||
|
||||
@Autowired |
||||
private QueueMapper queueMapper; |
||||
|
||||
/** |
||||
* query queue list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryList(User loginUser) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<Queue> queueList = queueMapper.queryAllQueue(); |
||||
result.put(Constants.DATA_LIST, queueList); |
||||
putMsg(result,Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,825 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.ResourceType; |
||||
import cn.escheduler.common.utils.FileUtils; |
||||
import cn.escheduler.common.utils.HadoopUtils; |
||||
import cn.escheduler.common.utils.PropertyUtils; |
||||
import cn.escheduler.dao.mapper.*; |
||||
import cn.escheduler.dao.model.Resource; |
||||
import cn.escheduler.dao.model.UdfFunc; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.collections.BeanMap; |
||||
import org.apache.commons.lang.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
import org.springframework.web.multipart.MultipartFile; |
||||
|
||||
import java.text.MessageFormat; |
||||
import java.util.*; |
||||
|
||||
import static cn.escheduler.api.enums.Status.UPDATE_RESOURCE_ERROR; |
||||
import static cn.escheduler.common.Constants.*; |
||||
|
||||
/** |
||||
* resources service |
||||
*/ |
||||
@Service |
||||
public class ResourcesService extends BaseService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ResourcesService.class); |
||||
|
||||
@Autowired |
||||
private ResourceMapper resourcesMapper; |
||||
|
||||
@Autowired |
||||
private UdfFuncMapper udfFunctionMapper; |
||||
|
||||
@Autowired |
||||
private TenantMapper tenantMapper; |
||||
|
||||
@Autowired |
||||
private UserMapper userMapper; |
||||
|
||||
@Autowired |
||||
private ResourcesUserMapper resourcesUserMapper; |
||||
|
||||
/** |
||||
* create resource |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @param name |
||||
* @param desc |
||||
* @param file |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) |
||||
public Result createResource(User loginUser, |
||||
String name, |
||||
String desc, |
||||
ResourceType type, |
||||
MultipartFile file) { |
||||
Result result = new Result(); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
// file is empty
|
||||
if (file.isEmpty()) { |
||||
logger.error("file is empty: {}", file.getOriginalFilename()); |
||||
putMsg(result, Status.RESOURCE_FILE_IS_EMPTY); |
||||
return result; |
||||
} |
||||
|
||||
// file suffix
|
||||
String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); |
||||
String nameSuffix = FileUtils.suffix(name); |
||||
|
||||
// determine file suffix
|
||||
if (!StringUtils.equals(fileSuffix, nameSuffix)) { |
||||
/** |
||||
* rename file suffix and original suffix must be consistent |
||||
* 重命名的后缀必须与原文件后缀一致 |
||||
*/ |
||||
logger.error("rename file suffix and original suffix must be consistent: {}", file.getOriginalFilename()); |
||||
putMsg(result, Status.RESOURCE_SUFFIX_FORBID_CHANGE); |
||||
return result; |
||||
} |
||||
//
|
||||
//If resource type is UDF, only jar packages are allowed to be uploaded, and the suffix must be .jar
|
||||
if (Constants.UDF.equals(type.name())) { |
||||
if (!JAR.equalsIgnoreCase(fileSuffix)) { |
||||
logger.error(Status.UDF_RESOURCE_SUFFIX_NOT_JAR.getMsg()); |
||||
putMsg(result, Status.UDF_RESOURCE_SUFFIX_NOT_JAR); |
||||
return result; |
||||
} |
||||
} |
||||
if (file.getSize() > Constants.maxFileSize) { |
||||
logger.error("file size is too large: {}", file.getOriginalFilename()); |
||||
putMsg(result, Status.RESOURCE_SIZE_EXCEED_LIMIT); |
||||
return result; |
||||
} |
||||
|
||||
// check resoure name exists
|
||||
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); |
||||
if (resource != null) { |
||||
logger.error("resource {} has exist, can't recreate", name); |
||||
putMsg(result, Status.RESOURCE_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
resource = new Resource(name,file.getOriginalFilename(),desc,loginUser.getId(),type,file.getSize(),now,now); |
||||
|
||||
try { |
||||
resourcesMapper.insert(resource); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
Map dataMap = new BeanMap(resource); |
||||
Map<String, Object> resultMap = new HashMap<String, Object>(); |
||||
for (Object key : dataMap.keySet()) { |
||||
if (!"class".equalsIgnoreCase(key.toString())) { |
||||
resultMap.put(key.toString(), dataMap.get(key)); |
||||
} |
||||
} |
||||
result.setData(resultMap); |
||||
} catch (Exception e) { |
||||
logger.error("resource already exists, can't recreate ", e); |
||||
putMsg(result, Status.CREATE_RESOURCE_ERROR); |
||||
return result; |
||||
} |
||||
|
||||
// fail upload
|
||||
if (!upload(loginUser, name, file, type)) { |
||||
logger.error("upload resource: {} file: {} failed.", name, file.getOriginalFilename()); |
||||
putMsg(result, Status.HDFS_OPERATION_ERROR); |
||||
throw new RuntimeException(String.format("upload resource: %s file: %s failed.", name, file.getOriginalFilename())); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* update resource |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @param name |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) |
||||
public Result updateResource(User loginUser, |
||||
int resourceId, |
||||
String name, |
||||
String desc, |
||||
ResourceType type) { |
||||
Result result = new Result(); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
Resource resource = resourcesMapper.queryResourceById(resourceId); |
||||
String originResourceName = resource.getAlias(); |
||||
if (resource == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
if (loginUser.getId() != resource.getUserId()) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
if (name.equals(resource.getAlias()) && desc.equals(resource.getDesc())) { |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
//check resource aleady exists
|
||||
if (!resource.getAlias().equals(name)) { |
||||
Resource needUpdateResource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); |
||||
if (needUpdateResource != null) { |
||||
logger.error("resource {} already exists, can't recreate", name); |
||||
putMsg(result, Status.RESOURCE_EXIST); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
// updateProcessInstance data
|
||||
Date now = new Date(); |
||||
resource.setAlias(name); |
||||
resource.setDesc(desc); |
||||
resource.setUpdateTime(now); |
||||
|
||||
try { |
||||
resourcesMapper.update(resource); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
Map dataMap = new BeanMap(resource); |
||||
Map<String, Object> resultMap = new HashMap<>(5); |
||||
for (Object key : dataMap.keySet()) { |
||||
if (!Constants.CLASS.equalsIgnoreCase(key.toString())) { |
||||
resultMap.put(key.toString(), dataMap.get(key)); |
||||
} |
||||
} |
||||
result.setData(resultMap); |
||||
} catch (Exception e) { |
||||
logger.error(UPDATE_RESOURCE_ERROR.getMsg(), e); |
||||
putMsg(result, Status.UPDATE_RESOURCE_ERROR); |
||||
return result; |
||||
} |
||||
// if name unchanged, return directly without moving on HDFS
|
||||
if (originResourceName.equals(name)) { |
||||
return result; |
||||
} |
||||
|
||||
// hdfs move
|
||||
// query tenant by user id
|
||||
User user = userMapper.queryDetailsById(resource.getUserId()); |
||||
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); |
||||
// get file hdfs path
|
||||
// delete hdfs file by type
|
||||
String originHdfsFileName = ""; |
||||
String destHdfsFileName = ""; |
||||
if (resource.getType().equals(ResourceType.FILE)) { |
||||
originHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, originResourceName); |
||||
destHdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, name); |
||||
} else if (resource.getType().equals(ResourceType.UDF)) { |
||||
originHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, originResourceName); |
||||
destHdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, name); |
||||
} |
||||
try { |
||||
if (HadoopUtils.getInstance().exists(originHdfsFileName)) { |
||||
logger.info("hdfs copy {} -> {}", originHdfsFileName, destHdfsFileName); |
||||
HadoopUtils.getInstance().copy(originHdfsFileName, destHdfsFileName, true, true); |
||||
} else { |
||||
logger.error("{} not exist", originHdfsFileName); |
||||
putMsg(result,Status.RESOURCE_NOT_EXIST); |
||||
} |
||||
} catch (Exception e) { |
||||
logger.error(MessageFormat.format("hdfs copy {0} -> {1} fail", originHdfsFileName, destHdfsFileName), e); |
||||
putMsg(result,Status.HDFS_COPY_FAIL); |
||||
} |
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query resources list paging |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryResourceListPaging(User loginUser, ResourceType type, String searchVal, Integer pageNo, Integer pageSize) { |
||||
|
||||
HashMap<String, Object> result = new HashMap<>(5); |
||||
Integer count = 0; |
||||
List<Resource> resourceList = new ArrayList<>(); |
||||
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize); |
||||
if (isAdmin(loginUser)) { |
||||
count = resourcesMapper.countAllResourceNumberByType(type.ordinal()); |
||||
resourceList = resourcesMapper.queryAllResourceListPaging(type.ordinal(), searchVal, |
||||
pageInfo.getStart(), pageSize); |
||||
} else { |
||||
count = resourcesMapper.countResourceNumberByType(loginUser.getId(), type.ordinal()); |
||||
resourceList = resourcesMapper.queryResourceAuthoredPaging(loginUser.getId(), type.ordinal(), searchVal, |
||||
pageInfo.getStart(), pageSize); |
||||
} |
||||
|
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(resourceList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* upload file to hdfs |
||||
* |
||||
* @param loginUser |
||||
* @param name |
||||
* @param file |
||||
*/ |
||||
private boolean upload(User loginUser, String name, MultipartFile file, ResourceType type) { |
||||
// save to local
|
||||
String fileSuffix = FileUtils.suffix(file.getOriginalFilename()); |
||||
String nameSuffix = FileUtils.suffix(name); |
||||
|
||||
// determine file suffix
|
||||
if (!StringUtils.equals(fileSuffix, nameSuffix)) { |
||||
return false; |
||||
} |
||||
// query tenant
|
||||
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); |
||||
// random file name
|
||||
String localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); |
||||
|
||||
|
||||
// save file to hdfs, and delete original file
|
||||
String hdfsFilename = ""; |
||||
String resourcePath = ""; |
||||
if (type.equals(ResourceType.FILE)) { |
||||
hdfsFilename = HadoopUtils.getHdfsFilename(tenantCode, name); |
||||
resourcePath = HadoopUtils.getHdfsDir(tenantCode); |
||||
} else if (type.equals(ResourceType.UDF)) { |
||||
hdfsFilename = HadoopUtils.getHdfsUdfFilename(tenantCode, name); |
||||
resourcePath = HadoopUtils.getHdfsUdfDir(tenantCode); |
||||
} |
||||
try { |
||||
if (HadoopUtils.getInstance().exists(resourcePath)) { |
||||
cn.escheduler.api.utils.FileUtils.copyFile(file, localFilename); |
||||
HadoopUtils.getInstance().copyLocalToHdfs(localFilename, hdfsFilename, true, true); |
||||
} else { |
||||
logger.error("{} is not exist", resourcePath); |
||||
return false; |
||||
} |
||||
} catch (Exception e) { |
||||
logger.error(e.getMessage(), e); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
/** |
||||
* query resource list |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryResourceList(User loginUser, ResourceType type) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
List<Resource> resourceList = resourcesMapper.queryResourceListAuthored(loginUser.getId(), type.ordinal()); |
||||
result.put(Constants.DATA_LIST, resourceList); |
||||
putMsg(result,Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete resource |
||||
* |
||||
* @param loginUser |
||||
* @param resourceId |
||||
*/ |
||||
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) |
||||
public Result delete(User loginUser, int resourceId) throws Exception { |
||||
Result result = new Result(); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
//get resource and hdfs path
|
||||
Resource resource = resourcesMapper.queryResourceById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("resource file not exist, resource id {}", resourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
if (loginUser.getId() != resource.getUserId()) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); |
||||
String hdfsFilename = ""; |
||||
|
||||
// delete hdfs file by type
|
||||
hdfsFilename = getHdfsFileName(resource, tenantCode, hdfsFilename); |
||||
|
||||
//delete data in database
|
||||
resourcesMapper.delete(resourceId); |
||||
resourcesUserMapper.deleteByResourceId(resourceId); |
||||
//delete file on hdfs
|
||||
HadoopUtils.getInstance().delete(hdfsFilename, false); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify resource by name and type |
||||
* |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
public Result verifyResourceName(String name, ResourceType type) { |
||||
Result result = new Result(); |
||||
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); |
||||
if (resource != null) { |
||||
logger.error("resource type:{} name:{} has exist, can't create again.", type, name); |
||||
putMsg(result, Status.RESOURCE_EXIST); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* view resource file online |
||||
* |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
public Result readResource(int resourceId, int skipLineNum, int limit) { |
||||
Result result = new Result(); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
// get resource by id
|
||||
Resource resource = resourcesMapper.queryResourceById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("resouce file not exist, resource id {}", resourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
//check preview or not by file suffix
|
||||
String nameSuffix = FileUtils.suffix(resource.getAlias()); |
||||
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); |
||||
if (StringUtils.isNotEmpty(resourceViewSuffixs)) { |
||||
List<String> strList = Arrays.asList(resourceViewSuffixs.split(",")); |
||||
if (!strList.contains(nameSuffix)) { |
||||
logger.error("resouce suffix {} not support view, resource id {}", nameSuffix, resourceId); |
||||
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
User user = userMapper.queryDetailsById(resource.getUserId()); |
||||
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); |
||||
// hdfs path
|
||||
String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); |
||||
logger.info("resource hdfs path is {} ", hdfsFileName); |
||||
try { |
||||
List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
Map<String, Object> map = new HashMap<>(); |
||||
map.put(ALIAS, resource.getAlias()); |
||||
map.put(CONTENT, StringUtils.join(content.toArray(), "\n")); |
||||
result.setData(map); |
||||
} catch (Exception e) { |
||||
logger.error(String.format("Resource %s read failed", hdfsFileName), e); |
||||
putMsg(result, Status.HDFS_OPERATION_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create resource file online |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @param fileName |
||||
* @param fileSuffix |
||||
* @param desc |
||||
* @param content |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) |
||||
public Result onlineCreateResource(User loginUser, ResourceType type, String fileName, String fileSuffix, String desc, String content) { |
||||
Result result = new Result(); |
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
//check file suffix
|
||||
String nameSuffix = fileSuffix.trim(); |
||||
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); |
||||
if (StringUtils.isNotEmpty(resourceViewSuffixs)) { |
||||
List<String> strList = Arrays.asList(resourceViewSuffixs.split(",")); |
||||
if (!strList.contains(nameSuffix)) { |
||||
logger.error("resouce suffix {} not support create", nameSuffix); |
||||
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
String name = fileName.trim() + "." + nameSuffix; |
||||
|
||||
//check file already exists
|
||||
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); |
||||
if (resource != null) { |
||||
logger.error("resource {} has exist, can't recreate .", name); |
||||
putMsg(result, Status.RESOURCE_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
// save data
|
||||
Date now = new Date(); |
||||
resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now); |
||||
|
||||
resourcesMapper.insert(resource); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
Map dataMap = new BeanMap(resource); |
||||
Map<String, Object> resultMap = new HashMap<>(5); |
||||
for (Object key : dataMap.keySet()) { |
||||
if (!Constants.CLASS.equalsIgnoreCase(key.toString())) { |
||||
resultMap.put(key.toString(), dataMap.get(key)); |
||||
} |
||||
} |
||||
result.setData(resultMap); |
||||
|
||||
String tenantCode = tenantMapper.queryById(loginUser.getTenantId()).getTenantCode(); |
||||
|
||||
result = uploadContentToHdfs(name, tenantCode, content); |
||||
if (!result.getCode().equals(Status.SUCCESS.getCode())) { |
||||
throw new RuntimeException(result.getMsg()); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance resource |
||||
* |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
public Result updateResourceContent(int resourceId, String content) { |
||||
Result result = new Result(); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
Resource resource = resourcesMapper.queryResourceById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("read file not exist, resource id {}", resourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
//check can edit by file suffix
|
||||
String nameSuffix = FileUtils.suffix(resource.getAlias()); |
||||
String resourceViewSuffixs = FileUtils.getResourceViewSuffixs(); |
||||
if (StringUtils.isNotEmpty(resourceViewSuffixs)) { |
||||
List<String> strList = Arrays.asList(resourceViewSuffixs.split(",")); |
||||
if (!strList.contains(nameSuffix)) { |
||||
logger.error("resouce suffix {} not support updateProcessInstance, resource id {}", nameSuffix, resourceId); |
||||
putMsg(result, Status.RESOURCE_SUFFIX_NOT_SUPPORT_VIEW); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
User user = userMapper.queryDetailsById(resource.getUserId()); |
||||
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); |
||||
|
||||
result = uploadContentToHdfs(resource.getAlias(), tenantCode, content); |
||||
if (!result.getCode().equals(Status.SUCCESS.getCode())) { |
||||
throw new RuntimeException(result.getMsg()); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* @param resourceName |
||||
* @param tenantCode |
||||
* @param content |
||||
* @return |
||||
*/ |
||||
private Result uploadContentToHdfs(String resourceName, String tenantCode, String content) { |
||||
Result result = new Result(); |
||||
String localFilename = ""; |
||||
String hdfsFileName = ""; |
||||
try { |
||||
localFilename = FileUtils.getUploadFilename(tenantCode, UUID.randomUUID().toString()); |
||||
|
||||
if (!FileUtils.writeContent2File(content, localFilename)) { |
||||
// write file fail
|
||||
logger.error("file {} fail, content is {}", localFilename, content); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
// get file hdfs path
|
||||
hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resourceName); |
||||
String resourcePath = HadoopUtils.getHdfsDir(tenantCode); |
||||
logger.info("resource hdfs path is {} ", hdfsFileName); |
||||
|
||||
HadoopUtils hadoopUtils = HadoopUtils.getInstance(); |
||||
if (hadoopUtils.exists(resourcePath)) { |
||||
if (hadoopUtils.exists(hdfsFileName)) { |
||||
hadoopUtils.delete(hdfsFileName, false); |
||||
} |
||||
|
||||
hadoopUtils.copyLocalToHdfs(localFilename, hdfsFileName, true, true); |
||||
} else { |
||||
logger.error("{} is not exist", resourcePath); |
||||
result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); |
||||
result.setMsg(String.format("%s is not exist", resourcePath)); |
||||
} |
||||
} catch (Exception e) { |
||||
logger.error(e.getMessage(), e); |
||||
result.setCode(Status.HDFS_OPERATION_ERROR.getCode()); |
||||
result.setMsg(String.format("copy %s to hdfs %s fail", localFilename, hdfsFileName)); |
||||
return result; |
||||
} |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* download file |
||||
* |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
public org.springframework.core.io.Resource downloadResource(int resourceId) throws Exception { |
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
throw new RuntimeException("hdfs not startup"); |
||||
} |
||||
|
||||
Resource resource = resourcesMapper.queryResourceById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("download file not exist, resource id {}", resourceId); |
||||
return null; |
||||
} |
||||
User user = userMapper.queryDetailsById(resource.getUserId()); |
||||
String tenantCode = tenantMapper.queryById(user.getTenantId()).getTenantCode(); |
||||
|
||||
String hdfsFileName = ""; |
||||
hdfsFileName = getHdfsFileName(resource, tenantCode, hdfsFileName); |
||||
|
||||
String localFileName = FileUtils.getDownloadFilename(resource.getAlias()); |
||||
logger.info("resource hdfs path is {} ", hdfsFileName); |
||||
|
||||
HadoopUtils.getInstance().copyHdfsToLocal(hdfsFileName, localFileName, false, true); |
||||
org.springframework.core.io.Resource file = cn.escheduler.api.utils.FileUtils.file2Resource(localFileName); |
||||
return file; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* unauthorized file |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> unauthorizedFile(User loginUser, Integer userId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<Resource> resourceList = resourcesMapper.queryResourceExceptUserId(userId); |
||||
Set<Resource> resourceSet = null; |
||||
if (resourceList != null && resourceList.size() > 0) { |
||||
resourceSet = new HashSet<>(resourceList); |
||||
|
||||
List<Resource> authedResourceList = resourcesMapper.queryAuthorizedResourceList(userId); |
||||
|
||||
getAuthorizedResourceList(resourceSet, authedResourceList); |
||||
} |
||||
result.put(Constants.DATA_LIST, new ArrayList<>(resourceSet)); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
/** |
||||
* unauthorized udf function |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> unauthorizedUDFFunction(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<UdfFunc> udfFuncList = udfFunctionMapper.queryUdfFuncExceptUserId(userId); |
||||
List<UdfFunc> resultList = new ArrayList<>(); |
||||
Set<UdfFunc> udfFuncSet = null; |
||||
if (udfFuncList != null && udfFuncList.size() > 0) { |
||||
udfFuncSet = new HashSet<>(udfFuncList); |
||||
|
||||
List<UdfFunc> authedUDFFuncList = udfFunctionMapper.authedUdfFunc(userId); |
||||
|
||||
getAuthorizedResourceList(udfFuncSet, authedUDFFuncList); |
||||
resultList = new ArrayList<>(udfFuncSet); |
||||
} |
||||
result.put(Constants.DATA_LIST, resultList); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
/** |
||||
* authorized udf function |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> authorizedUDFFunction(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
List<UdfFunc> udfFuncs = udfFunctionMapper.authedUdfFunc(userId); |
||||
result.put(Constants.DATA_LIST, udfFuncs); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* authorized file |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> authorizedFile(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
List<Resource> authedResources = resourcesMapper.queryAuthorizedResourceList(userId); |
||||
|
||||
result.put(Constants.DATA_LIST, authedResources); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get hdfs file name |
||||
* |
||||
* @param resource |
||||
* @param tenantCode |
||||
* @param hdfsFileName |
||||
* @return |
||||
*/ |
||||
private String getHdfsFileName(Resource resource, String tenantCode, String hdfsFileName) { |
||||
if (resource.getType().equals(ResourceType.FILE)) { |
||||
hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); |
||||
} else if (resource.getType().equals(ResourceType.UDF)) { |
||||
hdfsFileName = HadoopUtils.getHdfsUdfFilename(tenantCode, resource.getAlias()); |
||||
} |
||||
return hdfsFileName; |
||||
} |
||||
|
||||
/** |
||||
* get authorized resource list |
||||
* |
||||
* @param resourceSet |
||||
* @param authedResourceList |
||||
*/ |
||||
private void getAuthorizedResourceList(Set<?> resourceSet, List<?> authedResourceList) { |
||||
Set<?> authedResourceSet = null; |
||||
if (authedResourceList != null && authedResourceList.size() > 0) { |
||||
authedResourceSet = new HashSet<>(authedResourceList); |
||||
resourceSet.removeAll(authedResourceSet); |
||||
|
||||
} |
||||
} |
||||
|
||||
} |
@ -0,0 +1,488 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
|
||||
import cn.escheduler.api.dto.ScheduleParam; |
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.quartz.ProcessScheduleJob; |
||||
import cn.escheduler.api.quartz.QuartzExecutors; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.common.enums.FailureStrategy; |
||||
import cn.escheduler.common.enums.Priority; |
||||
import cn.escheduler.common.enums.ReleaseState; |
||||
import cn.escheduler.common.enums.WarningType; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.mapper.MasterServerMapper; |
||||
import cn.escheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.mapper.ScheduleMapper; |
||||
import cn.escheduler.dao.model.*; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import java.io.IOException; |
||||
import java.util.*; |
||||
|
||||
/** |
||||
* scheduler service |
||||
*/ |
||||
@Service |
||||
public class SchedulerService extends BaseService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SchedulerService.class); |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
@Autowired |
||||
private ExecutorService executorService; |
||||
|
||||
@Autowired |
||||
private ProcessDao processDao; |
||||
|
||||
@Autowired |
||||
private MasterServerMapper masterServerMapper; |
||||
|
||||
@Autowired |
||||
private ScheduleMapper scheduleMapper; |
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionMapper processDefinitionMapper; |
||||
|
||||
/** |
||||
* save schedule |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefineId |
||||
* @param schedule |
||||
* @param warningType |
||||
* @param warningGroupId |
||||
* @param failureStrategy |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) |
||||
public Map<String, Object> insertSchedule(User loginUser, String projectName, Integer processDefineId, String schedule, WarningType warningType, |
||||
int warningGroupId, FailureStrategy failureStrategy, |
||||
String receivers, String receiversCc,Priority processInstancePriority) throws IOException { |
||||
|
||||
Map<String, Object> result = new HashMap<String, Object>(5); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = checkAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
// check work flow define release state
|
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); |
||||
result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
Schedule scheduleObj = new Schedule(); |
||||
Date now = new Date(); |
||||
|
||||
scheduleObj.setProjectName(projectName); |
||||
scheduleObj.setProcessDefinitionId(processDefinition.getId()); |
||||
scheduleObj.setProcessDefinitionName(processDefinition.getName()); |
||||
|
||||
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); |
||||
scheduleObj.setStartTime(scheduleParam.getStartTime()); |
||||
scheduleObj.setEndTime(scheduleParam.getEndTime()); |
||||
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { |
||||
logger.error(scheduleParam.getCrontab() + " verify failure"); |
||||
|
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); |
||||
return result; |
||||
} |
||||
scheduleObj.setCrontab(scheduleParam.getCrontab()); |
||||
scheduleObj.setWarningType(warningType); |
||||
scheduleObj.setWarningGroupId(warningGroupId); |
||||
scheduleObj.setFailureStrategy(failureStrategy); |
||||
scheduleObj.setCreateTime(now); |
||||
scheduleObj.setUpdateTime(now); |
||||
scheduleObj.setUserId(loginUser.getId()); |
||||
scheduleObj.setUserName(loginUser.getUserName()); |
||||
scheduleObj.setReleaseState(ReleaseState.OFFLINE); |
||||
scheduleObj.setProcessInstancePriority(processInstancePriority); |
||||
scheduleMapper.insert(scheduleObj); |
||||
|
||||
/** |
||||
* updateProcessInstance receivers and cc by process definition id |
||||
*/ |
||||
processDefinitionMapper.updateReceiversAndCcById(receivers, receiversCc, processDefineId); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* updateProcessInstance schedule |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @param scheduleExpression |
||||
* @param warningType |
||||
* @param warningGroupId |
||||
* @param failureStrategy |
||||
* @param scheduleStatus |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) |
||||
public Map<String, Object> updateSchedule(User loginUser, String projectName, Integer id, String scheduleExpression, WarningType warningType, |
||||
int warningGroupId, FailureStrategy failureStrategy, |
||||
String receivers, String receiversCc, ReleaseState scheduleStatus, |
||||
Priority processInstancePriority) throws IOException { |
||||
Map<String, Object> result = new HashMap<String, Object>(5); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = checkAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
// check schedule exists
|
||||
Schedule schedule = scheduleMapper.queryById(id); |
||||
|
||||
if (schedule == null) { |
||||
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); |
||||
return result; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(schedule.getProcessDefinitionId()); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* scheduling on-line status forbid modification |
||||
*/ |
||||
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { |
||||
return result; |
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
// updateProcessInstance param
|
||||
if (StringUtils.isNotEmpty(scheduleExpression)) { |
||||
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); |
||||
schedule.setStartTime(scheduleParam.getStartTime()); |
||||
schedule.setEndTime(scheduleParam.getEndTime()); |
||||
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { |
||||
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); |
||||
return result; |
||||
} |
||||
schedule.setCrontab(scheduleParam.getCrontab()); |
||||
} |
||||
|
||||
if (warningType != null) { |
||||
schedule.setWarningType(warningType); |
||||
} |
||||
|
||||
schedule.setWarningGroupId(warningGroupId); |
||||
|
||||
if (failureStrategy != null) { |
||||
schedule.setFailureStrategy(failureStrategy); |
||||
} |
||||
|
||||
if (scheduleStatus != null) { |
||||
schedule.setReleaseState(scheduleStatus); |
||||
} |
||||
schedule.setUpdateTime(now); |
||||
schedule.setProcessInstancePriority(processInstancePriority); |
||||
scheduleMapper.update(schedule); |
||||
|
||||
/** |
||||
* updateProcessInstance recipients and cc by process definition ID |
||||
*/ |
||||
processDefinitionMapper.updateReceiversAndCcById(receivers, receiversCc, schedule.getProcessDefinitionId()); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* set schedule online or offline |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param id |
||||
* @param scheduleStatus |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) |
||||
public Map<String, Object> setScheduleState(User loginUser, String projectName, Integer id, ReleaseState scheduleStatus) { |
||||
|
||||
Map<String, Object> result = new HashMap<String, Object>(5); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
Map<String, Object> checkResult = checkAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
// check schedule exists
|
||||
Schedule scheduleObj = scheduleMapper.queryById(id); |
||||
|
||||
if (scheduleObj == null) { |
||||
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); |
||||
return result; |
||||
} |
||||
// check schedule release state
|
||||
if(scheduleObj.getReleaseState() == scheduleStatus){ |
||||
logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", |
||||
scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); |
||||
putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); |
||||
return result; |
||||
} |
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(scheduleObj.getProcessDefinitionId()); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); |
||||
return result; |
||||
} |
||||
|
||||
if(scheduleStatus == ReleaseState.ONLINE){ |
||||
// check process definition release state
|
||||
if(processDefinition.getReleaseState() != ReleaseState.ONLINE){ |
||||
logger.info("not release process definition id: {} , name : {}", |
||||
processDefinition.getId(), processDefinition.getName()); |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, scheduleObj.getProcessDefinitionId()); |
||||
return result; |
||||
} |
||||
// check sub process definition release state
|
||||
List<String> subProcessDefineIds = new ArrayList<>(); |
||||
processDao.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); |
||||
if (subProcessDefineIds.size() > 0){ |
||||
List<ProcessDefinition> subProcessDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(subProcessDefineIds); |
||||
if (subProcessDefinitionList != null && subProcessDefinitionList.size() > 0){ |
||||
for (ProcessDefinition subProcessDefinition : subProcessDefinitionList){ |
||||
/** |
||||
* if there is no online process, exit directly |
||||
*/ |
||||
if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE){ |
||||
logger.info("not release process definition id: {} , name : {}", |
||||
subProcessDefinition.getId(), subProcessDefinition.getName()); |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); |
||||
return result; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
// check master server exists
|
||||
List<MasterServer> masterServers = masterServerMapper.queryAllMaster(); |
||||
|
||||
if (masterServers.size() == 0) { |
||||
putMsg(result, Status.MASTER_NOT_EXISTS); |
||||
} |
||||
|
||||
// set status
|
||||
scheduleObj.setReleaseState(scheduleStatus); |
||||
|
||||
scheduleMapper.update(scheduleObj); |
||||
|
||||
try { |
||||
switch (scheduleStatus) { |
||||
case ONLINE: { |
||||
logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}, port: {}", project.getId(), processDefinition.getId(), masterServers); |
||||
setSchedule(project.getId(), id); |
||||
break; |
||||
} |
||||
case OFFLINE: { |
||||
logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}, port: {}", project.getId(), processDefinition.getId(), masterServers); |
||||
deleteSchedule(project.getId(), id); |
||||
break; |
||||
} |
||||
default: { |
||||
putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); |
||||
return result; |
||||
} |
||||
} |
||||
} catch (Exception e) { |
||||
result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); |
||||
throw new RuntimeException(result.get(Constants.MSG).toString()); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* query schedule |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processDefineId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { |
||||
|
||||
HashMap<String, Object> result = new HashMap<>(); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = checkAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processDao.findProcessDefineById(processDefineId); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); |
||||
return result; |
||||
} |
||||
|
||||
Integer count = scheduleMapper.countByProcessDefineId(processDefineId, searchVal); |
||||
|
||||
PageInfo pageInfo = new PageInfo<Schedule>(pageNo, pageSize); |
||||
|
||||
List<Schedule> scheduleList = scheduleMapper.queryByProcessDefineIdPaging(processDefinition.getId(), searchVal, pageInfo.getStart(), pageSize); |
||||
|
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(scheduleList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query schedule list |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryScheduleList(User loginUser, String projectName) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
Map<String, Object> checkResult = checkAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName); |
||||
|
||||
result.put(Constants.DATA_LIST, schedules); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* set schedule |
||||
* |
||||
* @see |
||||
*/ |
||||
public void setSchedule(int projectId, int scheduleId) throws RuntimeException{ |
||||
logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); |
||||
|
||||
|
||||
Schedule schedule = processDao.querySchedule(scheduleId); |
||||
if (schedule == null) { |
||||
logger.warn("process schedule info not exists"); |
||||
} |
||||
|
||||
Date startDate = schedule.getStartTime(); |
||||
Date endDate = schedule.getEndTime(); |
||||
|
||||
String jobName = QuartzExecutors.buildJobName(scheduleId); |
||||
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); |
||||
|
||||
Map<String, Object> dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule); |
||||
|
||||
QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate, |
||||
schedule.getCrontab(), dataMap); |
||||
|
||||
} |
||||
|
||||
/** |
||||
* delete schedule |
||||
*/ |
||||
public static void deleteSchedule(int projectId, int processId) throws RuntimeException{ |
||||
logger.info("delete schedules of project id:{}, flow id:{}", projectId, processId); |
||||
|
||||
String jobName = QuartzExecutors.buildJobName(processId); |
||||
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); |
||||
|
||||
if(!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)){ |
||||
logger.warn("set offline failure:projectId:{},processId:{}",projectId,processId); |
||||
throw new RuntimeException(String.format("set offline failure")); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* check valid |
||||
* |
||||
* @param result |
||||
* @param bool |
||||
* @param status |
||||
* @return |
||||
*/ |
||||
private boolean checkValid(Map<String, Object> result, boolean bool, Status status) { |
||||
// timeout is valid
|
||||
if (bool) { |
||||
putMsg(result, status); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param project |
||||
* @return |
||||
*/ |
||||
private Map<String, Object> checkAuth(User loginUser, String projectName, Project project) { |
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
return null; |
||||
} |
||||
} |
@ -0,0 +1,83 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.dao.mapper.MasterServerMapper; |
||||
import cn.escheduler.dao.mapper.WorkerServerMapper; |
||||
import cn.escheduler.dao.model.MasterServer; |
||||
import cn.escheduler.dao.model.User; |
||||
import cn.escheduler.dao.model.WorkerServer; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* server service |
||||
*/ |
||||
@Service |
||||
public class ServerService extends BaseService{ |
||||
|
||||
|
||||
@Autowired |
||||
MasterServerMapper masterServerMapper; |
||||
|
||||
@Autowired |
||||
WorkerServerMapper workerServerMapper; |
||||
|
||||
/** |
||||
* query master list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> queryMaster(User loginUser) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
|
||||
List<MasterServer> masterList = masterServerMapper.queryAllMaster(); |
||||
result.put(Constants.DATA_LIST, masterList); |
||||
putMsg(result,Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query worker list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> queryWorker(User loginUser) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (checkAdmin(loginUser, result)){ |
||||
return result; |
||||
} |
||||
|
||||
List<WorkerServer> workerList = workerServerMapper.queryAllWorker(); |
||||
result.put(Constants.DATA_LIST, workerList); |
||||
putMsg(result,Status.SUCCESS); |
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,133 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
|
||||
import cn.escheduler.api.controller.BaseController; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.dao.mapper.SessionMapper; |
||||
import cn.escheduler.dao.model.Session; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import javax.servlet.http.Cookie; |
||||
import javax.servlet.http.HttpServletRequest; |
||||
import java.util.Date; |
||||
import java.util.UUID; |
||||
|
||||
/** |
||||
* session service |
||||
*/ |
||||
@Service |
||||
public class SessionService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SessionService.class); |
||||
|
||||
@Autowired |
||||
private SessionMapper sessionMapper; |
||||
|
||||
/** |
||||
* get user session from request |
||||
* |
||||
* @param request |
||||
* @return |
||||
*/ |
||||
public Session getSession(HttpServletRequest request) { |
||||
String sessionId = request.getHeader(Constants.SESSION_ID); |
||||
|
||||
if(StringUtils.isBlank(sessionId)) { |
||||
Cookie cookie = getCookie(request, Constants.SESSION_ID); |
||||
|
||||
if (cookie != null) { |
||||
sessionId = cookie.getValue(); |
||||
} |
||||
} |
||||
|
||||
if(StringUtils.isBlank(sessionId)) { |
||||
return null; |
||||
} |
||||
|
||||
String ip = BaseController.getClientIpAddress(request); |
||||
logger.info("get session: {}, ip: {}", sessionId, ip); |
||||
|
||||
return sessionMapper.queryByIdAndIp(sessionId, ip); |
||||
} |
||||
|
||||
/** |
||||
* create session |
||||
* |
||||
* @param user |
||||
* @param ip |
||||
* @return |
||||
*/ |
||||
public String createSession(User user, String ip) { |
||||
// logined
|
||||
Session session = sessionMapper.queryByUserIdAndIp(user.getId(), ip); |
||||
Date now = new Date(); |
||||
|
||||
/** |
||||
* if you have logged in and are still valid, return directly |
||||
*/ |
||||
if (session != null) { |
||||
if (now.getTime() - session.getLastLoginTime().getTime() <= Constants.SESSION_TIME_OUT * 1000) { |
||||
/** |
||||
* updateProcessInstance the latest login time |
||||
*/ |
||||
sessionMapper.update(session.getId(), now); |
||||
|
||||
return session.getId(); |
||||
|
||||
} else { |
||||
/** |
||||
* session expired, then delete this session first |
||||
*/ |
||||
sessionMapper.deleteById(session.getId()); |
||||
} |
||||
} |
||||
|
||||
// assign new session
|
||||
session = new Session(); |
||||
|
||||
session.setId(UUID.randomUUID().toString()); |
||||
session.setIp(ip); |
||||
session.setUserId(user.getId()); |
||||
session.setLastLoginTime(now); |
||||
|
||||
sessionMapper.insert(session); |
||||
|
||||
return session.getId(); |
||||
} |
||||
|
||||
/** |
||||
* sign out |
||||
* |
||||
* @param ip |
||||
* @param loginUser |
||||
*/ |
||||
public void signOut(String ip, User loginUser) { |
||||
/** |
||||
* query session by user id and ip |
||||
*/ |
||||
Session session = sessionMapper.queryByUserIdAndIp(loginUser.getId(), ip); |
||||
//delete session
|
||||
sessionMapper.deleteById(session.getId()); |
||||
} |
||||
} |
@ -0,0 +1,132 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
import cn.escheduler.common.utils.CollectionUtils; |
||||
import cn.escheduler.common.utils.DateUtils; |
||||
import cn.escheduler.dao.ProcessDao; |
||||
import cn.escheduler.dao.mapper.ProjectMapper; |
||||
import cn.escheduler.dao.mapper.TaskInstanceMapper; |
||||
import cn.escheduler.dao.model.ProcessInstance; |
||||
import cn.escheduler.dao.model.Project; |
||||
import cn.escheduler.dao.model.TaskInstance; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.text.MessageFormat; |
||||
import java.util.*; |
||||
|
||||
/** |
||||
* task instance service |
||||
*/ |
||||
@Service |
||||
public class TaskInstanceService extends BaseService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TaskInstanceService.class); |
||||
|
||||
@Autowired |
||||
ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
ProjectService projectService; |
||||
|
||||
@Autowired |
||||
ProcessDao processDao; |
||||
|
||||
@Autowired |
||||
TaskInstanceMapper taskInstanceMapper; |
||||
|
||||
|
||||
/** |
||||
* query task list by project, process instance, task name, task start time, task end time, task status, keyword paging |
||||
* |
||||
* @param loginUser |
||||
* @param projectName |
||||
* @param processInstanceId |
||||
* @param taskName |
||||
* @param startDate |
||||
* @param endDate |
||||
* @param searchVal |
||||
* @param stateType |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> queryTaskListPaging(User loginUser, String projectName, |
||||
Integer processInstanceId, String taskName, String startDate, String endDate, |
||||
String searchVal, ExecutionStatus stateType,String host, |
||||
Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
int[] statusArray = null; |
||||
String statesStr = null; |
||||
// filter by status
|
||||
if(stateType != null){ |
||||
statusArray = new int[]{stateType.ordinal()}; |
||||
} |
||||
if(statusArray != null){ |
||||
statesStr = Arrays.toString(statusArray).replace("[", "").replace("]",""); |
||||
} |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
try { |
||||
if(StringUtils.isNotEmpty(startDate)){ |
||||
start = DateUtils.getScheduleDate(startDate); |
||||
} |
||||
if(StringUtils.isNotEmpty( endDate)){ |
||||
end = DateUtils.getScheduleDate(endDate); |
||||
} |
||||
} catch (Exception e) { |
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); |
||||
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate")); |
||||
return result; |
||||
} |
||||
Integer count = taskInstanceMapper.countTaskInstance(project.getId(), processInstanceId, taskName, statesStr, |
||||
host,start, end, searchVal); |
||||
|
||||
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize); |
||||
Set<String> exclusionSet = new HashSet<String>(){{ |
||||
add(Constants.CLASS); |
||||
add("taskJson"); |
||||
}}; |
||||
List<TaskInstance> taskInstanceList = taskInstanceMapper.queryTaskInstanceListPaging( |
||||
project.getId(), processInstanceId, searchVal, taskName, statesStr, host, start, end, pageInfo.getStart(), pageSize); |
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceList,exclusionSet)); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,82 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.dao.TaskRecordDao; |
||||
import cn.escheduler.dao.model.TaskRecord; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* task record service |
||||
*/ |
||||
@Service |
||||
public class TaskRecordService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TaskRecordService.class); |
||||
|
||||
/** |
||||
* query task record list paging |
||||
* |
||||
* @param taskName |
||||
* @param startDate |
||||
* @param taskDate |
||||
* @param sourceTable |
||||
* @param destTable |
||||
* @param endDate |
||||
* @param state |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> queryTaskRecordListPaging(String taskName, String startDate, |
||||
String taskDate, String sourceTable, |
||||
String destTable, String endDate, |
||||
String state, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(10); |
||||
PageInfo pageInfo = new PageInfo<TaskRecord>(pageNo, pageSize); |
||||
|
||||
Map<String, String> map = new HashMap<>(10); |
||||
map.put("taskName", taskName); |
||||
map.put("taskDate", taskDate); |
||||
map.put("state", state); |
||||
map.put("sourceTable", sourceTable); |
||||
map.put("targetTable", destTable); |
||||
map.put("startTime", startDate); |
||||
map.put("endTime", endDate); |
||||
map.put("offset", pageInfo.getStart().toString()); |
||||
map.put("pageSize", pageInfo.getPageSize().toString()); |
||||
|
||||
int count = TaskRecordDao.countTaskRecord(map); |
||||
List<TaskRecord> recordList = TaskRecordDao.queryAllTaskRecord(map); |
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(recordList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
|
||||
} |
||||
} |
@ -0,0 +1,284 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.HadoopUtils; |
||||
import cn.escheduler.common.utils.PropertyUtils; |
||||
import cn.escheduler.dao.mapper.TenantMapper; |
||||
import cn.escheduler.dao.model.Tenant; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* tenant service |
||||
*/ |
||||
@Service |
||||
public class TenantService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TenantService.class); |
||||
|
||||
@Autowired |
||||
private TenantMapper tenantMapper; |
||||
|
||||
/** |
||||
* create tenant |
||||
* |
||||
* @param loginUser |
||||
* @param tenantCode |
||||
* @param tenantName |
||||
* @param queueId |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager",rollbackFor = Exception.class) |
||||
public Map<String,Object> createTenant(User loginUser, |
||||
String tenantCode, |
||||
String tenantName, |
||||
int queueId, |
||||
String desc) throws Exception { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
if (!checkTenant(tenantCode)){ |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, tenantCode); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
Tenant tenant = new Tenant(); |
||||
Date now = new Date(); |
||||
|
||||
tenant.setTenantCode(tenantCode); |
||||
tenant.setTenantName(tenantName); |
||||
tenant.setQueueId(queueId); |
||||
tenant.setDesc(desc); |
||||
tenant.setCreateTime(now); |
||||
tenant.setUpdateTime(now); |
||||
|
||||
// save
|
||||
tenantMapper.insert(tenant); |
||||
|
||||
// if hdfs startup
|
||||
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; |
||||
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); |
||||
/** |
||||
* init resource path and udf path |
||||
*/ |
||||
HadoopUtils.getInstance().mkdir(resourcePath); |
||||
HadoopUtils.getInstance().mkdir(udfsPath); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
/** |
||||
* query tenant list paging |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String,Object> queryTenantList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
Integer count = tenantMapper.countTenantPaging(searchVal); |
||||
|
||||
PageInfo<Tenant> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
|
||||
List<Tenant> scheduleList = tenantMapper.queryTenantPaging(searchVal, pageInfo.getStart(), pageSize); |
||||
|
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(scheduleList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance tenant |
||||
* |
||||
* @param loginUser |
||||
* @param tenantCode |
||||
* @param tenantName |
||||
* @param queueId |
||||
* @param desc |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateTenant(User loginUser,int id,String tenantCode, String tenantName, int queueId, String desc) throws Exception { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
Tenant tenant = tenantMapper.queryById(id); |
||||
|
||||
if (tenant == null){ |
||||
putMsg(result, Status.USER_NOT_EXIST, id); |
||||
return result; |
||||
} |
||||
|
||||
// updateProcessInstance tenant
|
||||
/** |
||||
* if the tenant code is modified, the original resource needs to be copied to the new tenant. |
||||
*/ |
||||
if (!tenant.getTenantCode().equals(tenantCode)){ |
||||
Tenant newTenant = tenantMapper.queryByTenantCode(tenantCode); |
||||
if (newTenant == null){ |
||||
// if hdfs startup
|
||||
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
String resourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + tenantCode + "/resources"; |
||||
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); |
||||
//init hdfs resource
|
||||
HadoopUtils.getInstance().mkdir(resourcePath); |
||||
HadoopUtils.getInstance().mkdir(udfsPath); |
||||
} |
||||
}else { |
||||
putMsg(result, Status.TENANT_CODE_HAS_ALREADY_EXISTS); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
if (StringUtils.isNotEmpty(tenantCode)){ |
||||
tenant.setTenantCode(tenantCode); |
||||
} |
||||
|
||||
if (StringUtils.isNotEmpty(tenantName)){ |
||||
tenant.setTenantName(tenantName); |
||||
} |
||||
|
||||
if (queueId != 0){ |
||||
tenant.setQueueId(queueId); |
||||
} |
||||
tenant.setDesc(desc); |
||||
tenant.setUpdateTime(now); |
||||
tenantMapper.update(tenant); |
||||
|
||||
result.put(Constants.STATUS, Status.SUCCESS); |
||||
result.put(Constants.MSG, Status.SUCCESS.getMsg()); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete tenant |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> deleteTenantById(User loginUser, int id) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
Tenant tenant = tenantMapper.queryById(id); |
||||
|
||||
String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); |
||||
HadoopUtils.getInstance().delete(tenantPath, true); |
||||
|
||||
tenantMapper.deleteById(id); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query tenant list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryTenantList(User loginUser) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
if (checkAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<Tenant> resourceList = tenantMapper.queryAllTenant(); |
||||
result.put(Constants.DATA_LIST, resourceList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify tenant code |
||||
* |
||||
* @param tenantCode |
||||
* @return |
||||
*/ |
||||
public Result verifyTenantCode(String tenantCode) { |
||||
Result result=new Result(); |
||||
Tenant tenant= tenantMapper.queryByTenantCode(tenantCode); |
||||
if (tenant != null) { |
||||
logger.error("tenant {} has exist, can't create again.", tenantCode); |
||||
putMsg(result, Status.TENANT_NAME_EXIST); |
||||
}else{ |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* check tenant exists |
||||
* |
||||
* @param tenantCode |
||||
* @return |
||||
*/ |
||||
private boolean checkTenant(String tenantCode) { |
||||
return tenantMapper.queryByTenantCode(tenantCode) == null ? true : false; |
||||
} |
||||
} |
@ -0,0 +1,329 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.UdfType; |
||||
import cn.escheduler.common.utils.PropertyUtils; |
||||
import cn.escheduler.dao.mapper.ResourceMapper; |
||||
import cn.escheduler.dao.mapper.UDFUserMapper; |
||||
import cn.escheduler.dao.mapper.UdfFuncMapper; |
||||
import cn.escheduler.dao.model.Resource; |
||||
import cn.escheduler.dao.model.UdfFunc; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* udf function service |
||||
*/ |
||||
@Service |
||||
public class UdfFuncService extends BaseService{ |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(UdfFuncService.class); |
||||
|
||||
@Autowired |
||||
private ResourceMapper resourceMapper; |
||||
|
||||
@Autowired |
||||
private UdfFuncMapper udfFuncMapper; |
||||
|
||||
@Autowired |
||||
private UDFUserMapper udfUserMapper; |
||||
|
||||
|
||||
/** |
||||
* create udf function |
||||
* |
||||
* @param loginUser |
||||
* @param funcName |
||||
* @param argTypes |
||||
* @param database |
||||
* @param desc |
||||
* @param type |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
public Result createUdfFunction(User loginUser, |
||||
String funcName, |
||||
String className, |
||||
String argTypes, |
||||
String database, |
||||
String desc, |
||||
UdfType type, |
||||
int resourceId) { |
||||
Result result = new Result(); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
// verify udf func name exist
|
||||
UdfFunc udfFunc = udfFuncMapper.queryUdfFuncByName(funcName); |
||||
if (udfFunc != null) { |
||||
logger.error("udf func {} has exist, can't recreate", funcName); |
||||
putMsg(result, Status.UDF_FUNCTION_EXISTS); |
||||
return result; |
||||
} |
||||
|
||||
Resource resource = resourceMapper.queryResourceById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("resourceId {} is not exist", resourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
//save data
|
||||
UdfFunc udf = new UdfFunc(); |
||||
Date now = new Date(); |
||||
udf.setUserId(loginUser.getId()); |
||||
udf.setFuncName(funcName); |
||||
udf.setClassName(className); |
||||
if (StringUtils.isNotEmpty(argTypes)) { |
||||
udf.setArgTypes(argTypes); |
||||
} |
||||
if (StringUtils.isNotEmpty(argTypes)) { |
||||
udf.setDatabase(database); |
||||
} |
||||
udf.setDesc(desc); |
||||
udf.setResourceId(resourceId); |
||||
udf.setResourceName(resource.getAlias()); |
||||
udf.setType(type); |
||||
|
||||
udf.setCreateTime(now); |
||||
udf.setUpdateTime(now); |
||||
|
||||
udfFuncMapper.insert(udf); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query udf function |
||||
*/ |
||||
public Map<String, Object> queryUdfFuncDetail(int id) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
UdfFunc udfFunc = udfFuncMapper.queryUdfById(id); |
||||
if (udfFunc == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
result.put(Constants.DATA_LIST, udfFunc); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance udf function |
||||
* |
||||
* @param funcName |
||||
* @param argTypes |
||||
* @param database |
||||
* @param desc |
||||
* @param type |
||||
* @param resourceId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateUdfFunc(int udfFuncId, |
||||
String funcName, |
||||
String className, |
||||
String argTypes, |
||||
String database, |
||||
String desc, |
||||
UdfType type, |
||||
int resourceId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
// verify udfFunc is exist
|
||||
UdfFunc udf = udfFuncMapper.queryUdfById(udfFuncId); |
||||
|
||||
// if hdfs not startup
|
||||
if (!PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
logger.error("hdfs startup state: {}", PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
if (udf == null) { |
||||
result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST); |
||||
result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg()); |
||||
return result; |
||||
} |
||||
|
||||
// verify udfFuncName is exist
|
||||
if (!funcName.equals(udf.getFuncName())) { |
||||
UdfFunc udfFunc = udfFuncMapper.queryUdfFuncByName(funcName); |
||||
if (udfFunc != null) { |
||||
logger.error("UdfFunc {} has exist, can't create again.", funcName); |
||||
result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); |
||||
result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
Resource resource = resourceMapper.queryResourceById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("resourceId {} is not exist", resourceId); |
||||
result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); |
||||
result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg()); |
||||
return result; |
||||
} |
||||
Date now = new Date(); |
||||
udf.setFuncName(funcName); |
||||
udf.setClassName(className); |
||||
if (StringUtils.isNotEmpty(argTypes)) { |
||||
udf.setArgTypes(argTypes); |
||||
} |
||||
if (StringUtils.isNotEmpty(argTypes)) { |
||||
udf.setDatabase(database); |
||||
} |
||||
udf.setDesc(desc); |
||||
udf.setResourceId(resourceId); |
||||
udf.setResourceName(resource.getAlias()); |
||||
udf.setType(type); |
||||
|
||||
|
||||
udf.setCreateTime(now); |
||||
udf.setUpdateTime(now); |
||||
|
||||
udfFuncMapper.update(udf); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* query udf function list paging |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
Integer count = getTotalCount(loginUser); |
||||
|
||||
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize); |
||||
pageInfo.setTotalCount(count); |
||||
List<UdfFunc> udfFuncList = getUdfFuncs(loginUser, searchVal, pageSize, pageInfo); |
||||
|
||||
pageInfo.setLists(udfFuncList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get udf functions |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageSize |
||||
* @param pageInfo |
||||
* @return |
||||
*/ |
||||
private List<UdfFunc> getUdfFuncs(User loginUser, String searchVal, Integer pageSize, PageInfo pageInfo) { |
||||
if (isAdmin(loginUser)) { |
||||
return udfFuncMapper.queryAllUdfFuncPaging(searchVal, pageInfo.getStart(), pageSize); |
||||
} |
||||
return udfFuncMapper.queryUdfFuncPaging(loginUser.getId(), searchVal, |
||||
pageInfo.getStart(), pageSize); |
||||
} |
||||
|
||||
/** |
||||
* udf function total |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
private Integer getTotalCount(User loginUser) { |
||||
if (isAdmin(loginUser)) { |
||||
return udfFuncMapper.countAllUdfFunc(); |
||||
} |
||||
return udfFuncMapper.countUserUdfFunc(loginUser.getId()); |
||||
} |
||||
|
||||
/** |
||||
* query data resource by type |
||||
* |
||||
* @param loginUser |
||||
* @param type |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryResourceList(User loginUser, Integer type) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(loginUser.getId(), type); |
||||
|
||||
result.put(Constants.DATA_LIST, udfFuncList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete udf function |
||||
* |
||||
* @param id |
||||
*/ |
||||
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) |
||||
public Result delete(int id) { |
||||
Result result = new Result(); |
||||
|
||||
udfFuncMapper.delete(id); |
||||
udfUserMapper.deleteByUdfFuncId(id); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify udf function by name |
||||
* |
||||
* @param name |
||||
* @return |
||||
*/ |
||||
public Result verifyUdfFuncByName(String name) { |
||||
Result result = new Result(); |
||||
UdfFunc udfFunc = udfFuncMapper.queryUdfFuncByName(name); |
||||
if (udfFunc != null) { |
||||
logger.error("UDF function name:{} has exist, can't create again.", name); |
||||
putMsg(result, Status.UDF_FUNCTION_EXISTS); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,632 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.CheckUtils; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.common.utils.CollectionUtils; |
||||
import cn.escheduler.common.utils.EncryptionUtils; |
||||
import cn.escheduler.common.utils.HadoopUtils; |
||||
import cn.escheduler.common.utils.PropertyUtils; |
||||
import cn.escheduler.dao.mapper.*; |
||||
import cn.escheduler.dao.model.*; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import java.util.*; |
||||
|
||||
/** |
||||
* user service |
||||
*/ |
||||
@Service |
||||
public class UsersService extends BaseService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(UsersService.class); |
||||
|
||||
@Autowired |
||||
private UserMapper userMapper; |
||||
|
||||
@Autowired |
||||
private TenantMapper tenantMapper; |
||||
|
||||
@Autowired |
||||
private ProjectUserMapper projectUserMapper; |
||||
|
||||
@Autowired |
||||
private ResourcesUserMapper resourcesUserMapper; |
||||
|
||||
@Autowired |
||||
private ResourceMapper resourceMapper; |
||||
|
||||
@Autowired |
||||
private DatasourceUserMapper datasourceUserMapper; |
||||
|
||||
@Autowired |
||||
private UDFUserMapper udfUserMapper; |
||||
|
||||
@Autowired |
||||
private AlertGroupMapper alertGroupMapper; |
||||
|
||||
|
||||
/** |
||||
* create user, only system admin have permission |
||||
* |
||||
* @param loginUser |
||||
* @param userName |
||||
* @param userPassword |
||||
* @param email |
||||
* @param tenantId |
||||
* @param phone |
||||
* @return |
||||
*/ |
||||
@Transactional(value = "TransactionManager", rollbackFor = Exception.class) |
||||
public Map<String, Object> createUser(User loginUser, |
||||
String userName, |
||||
String userPassword, |
||||
String email, |
||||
int tenantId, |
||||
String phone) throws Exception { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result = CheckUtils.checkUserParams(userName, userPassword, email, phone); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
if (check(result, checkTenant(tenantId), Status.TENANT_NOT_EXIST, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
User user = new User(); |
||||
Date now = new Date(); |
||||
|
||||
user.setUserName(userName); |
||||
user.setUserPassword(EncryptionUtils.getMd5(userPassword)); |
||||
user.setEmail(email); |
||||
user.setTenantId(tenantId); |
||||
user.setPhone(phone); |
||||
// create general users, administrator users are currently built-in
|
||||
user.setUserType(UserType.GENERAL_USER); |
||||
user.setCreateTime(now); |
||||
user.setUpdateTime(now); |
||||
|
||||
// save user
|
||||
userMapper.insert(user); |
||||
|
||||
Tenant tenant = tenantMapper.queryById(tenantId); |
||||
// if hdfs startup
|
||||
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
String userPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode() + "/home/" + user.getId(); |
||||
|
||||
HadoopUtils.getInstance().mkdir(userPath); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query user |
||||
* |
||||
* @param name |
||||
* @param password |
||||
* @return |
||||
*/ |
||||
public User queryUser(String name, String password) { |
||||
String md5 = EncryptionUtils.getMd5(password); |
||||
return userMapper.queryForCheck(name, md5); |
||||
} |
||||
|
||||
/** |
||||
* check general user or not |
||||
* |
||||
* @param user |
||||
* @return |
||||
*/ |
||||
public boolean isGeneral(User user) { |
||||
return user.getUserType() == UserType.GENERAL_USER; |
||||
} |
||||
|
||||
/** |
||||
* query user list |
||||
* |
||||
* @param loginUser |
||||
* @param searchVal |
||||
* @param pageNo |
||||
* @param pageSize |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
Integer count = userMapper.countUserPaging(searchVal); |
||||
|
||||
PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
|
||||
List<User> scheduleList = userMapper.queryUserPaging(searchVal, pageInfo.getStart(), pageSize); |
||||
|
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(scheduleList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance user |
||||
* |
||||
* @param userId |
||||
* @param userName |
||||
* @param userPassword |
||||
* @param email |
||||
* @param tenantId |
||||
* @param phone |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> updateUser(int userId, String userName, String userPassword, String email, int tenantId, String phone) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
User user = userMapper.queryById(userId); |
||||
|
||||
if (user == null) { |
||||
putMsg(result, Status.USER_NOT_EXIST, userId); |
||||
return result; |
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
if (StringUtils.isNotEmpty(userName)) { |
||||
user.setUserName(userName); |
||||
} |
||||
|
||||
if (StringUtils.isNotEmpty(userPassword)) { |
||||
user.setUserPassword(EncryptionUtils.getMd5(userPassword)); |
||||
} |
||||
|
||||
if (StringUtils.isNotEmpty(email)) { |
||||
user.setEmail(email); |
||||
} |
||||
user.setPhone(phone); |
||||
user.setUpdateTime(now); |
||||
|
||||
//if user switches the tenant, the user's resources need to be copied to the new tenant
|
||||
if (user.getTenantId() != tenantId) { |
||||
Tenant oldTenant = tenantMapper.queryById(user.getTenantId()); |
||||
//query tenant
|
||||
Tenant newTenant = tenantMapper.queryById(tenantId); |
||||
if (newTenant != null) { |
||||
// if hdfs startup
|
||||
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
String newTenantCode = newTenant.getTenantCode(); |
||||
String oldResourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + oldTenant.getTenantCode() + "/resources"; |
||||
String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode()); |
||||
|
||||
|
||||
String newResourcePath = HadoopUtils.getHdfsDataBasePath() + "/" + newTenantCode + "/resources"; |
||||
String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode); |
||||
|
||||
//file resources list
|
||||
List<Resource> fileResourcesList = resourceMapper.queryResourceCreatedByUser(userId, 0); |
||||
if (CollectionUtils.isNotEmpty(fileResourcesList)) { |
||||
for (Resource resource : fileResourcesList) { |
||||
HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getAlias(), newResourcePath, false, true); |
||||
} |
||||
} |
||||
|
||||
//udf resources
|
||||
List<Resource> udfResourceList = resourceMapper.queryResourceCreatedByUser(userId, 1); |
||||
if (CollectionUtils.isNotEmpty(udfResourceList)) { |
||||
for (Resource resource : udfResourceList) { |
||||
HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getAlias(), newUdfsPath, false, true); |
||||
} |
||||
} |
||||
|
||||
//Delete the user from the old tenant directory
|
||||
String oldUserPath = HadoopUtils.getHdfsDataBasePath() + "/" + oldTenant.getTenantCode() + "/home/" + userId; |
||||
HadoopUtils.getInstance().delete(oldUserPath, true); |
||||
|
||||
|
||||
//create user in the new tenant directory
|
||||
String newUserPath = HadoopUtils.getHdfsDataBasePath() + "/" + newTenant.getTenantCode() + "/home/" + user.getId(); |
||||
HadoopUtils.getInstance().mkdir(newUserPath); |
||||
} |
||||
} |
||||
user.setTenantId(tenantId); |
||||
} |
||||
|
||||
// updateProcessInstance user
|
||||
userMapper.update(user); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete user |
||||
* |
||||
* @param loginUser |
||||
* @param id |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NOT_EXIST, id); |
||||
return result; |
||||
} |
||||
|
||||
// delete user
|
||||
User user = userMapper.queryTenantCodeByUserId(id); |
||||
|
||||
|
||||
if (PropertyUtils.getBoolean(cn.escheduler.common.Constants.HDFS_STARTUP_STATE)){ |
||||
String userPath = HadoopUtils.getHdfsDataBasePath() + "/" + user.getTenantCode() + "/home/" + id; |
||||
|
||||
HadoopUtils.getInstance().delete(userPath, true); |
||||
} |
||||
|
||||
userMapper.delete(id); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* grant project |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @param projectIds |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
//if the selected projectIds are empty, delete all items associated with the user
|
||||
projectUserMapper.deleteByUserId(userId); |
||||
|
||||
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS, Constants.MSG)) { |
||||
return result; |
||||
} |
||||
|
||||
String[] projectIdArr = projectIds.split(","); |
||||
|
||||
for (String projectId : projectIdArr) { |
||||
Date now = new Date(); |
||||
ProjectUser projectUser = new ProjectUser(); |
||||
projectUser.setUserId(userId); |
||||
projectUser.setProjectId(Integer.parseInt(projectId)); |
||||
projectUser.setPerm(7); |
||||
projectUser.setCreateTime(now); |
||||
projectUser.setUpdateTime(now); |
||||
projectUserMapper.insert(projectUser); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* grant resource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @param resourceIds |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
resourcesUserMapper.deleteByUserId(userId); |
||||
|
||||
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS, Constants.MSG)) { |
||||
return result; |
||||
} |
||||
|
||||
String[] resourcesIdArr = resourceIds.split(","); |
||||
|
||||
for (String resourceId : resourcesIdArr) { |
||||
Date now = new Date(); |
||||
ResourcesUser resourcesUser = new ResourcesUser(); |
||||
resourcesUser.setUserId(userId); |
||||
resourcesUser.setResourcesId(Integer.parseInt(resourceId)); |
||||
resourcesUser.setPerm(7); |
||||
resourcesUser.setCreateTime(now); |
||||
resourcesUser.setUpdateTime(now); |
||||
resourcesUserMapper.insert(resourcesUser); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* grant udf function |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @param udfIds |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
|
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
udfUserMapper.deleteByUserId(userId); |
||||
|
||||
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS, Constants.MSG)) { |
||||
return result; |
||||
} |
||||
|
||||
String[] resourcesIdArr = udfIds.split(","); |
||||
|
||||
for (String udfId : resourcesIdArr) { |
||||
Date now = new Date(); |
||||
UDFUser udfUser = new UDFUser(); |
||||
udfUser.setUserId(userId); |
||||
udfUser.setUdfId(Integer.parseInt(udfId)); |
||||
udfUser.setPerm(7); |
||||
udfUser.setCreateTime(now); |
||||
udfUser.setUpdateTime(now); |
||||
udfUserMapper.insert(udfUser); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* grant datasource |
||||
* |
||||
* @param loginUser |
||||
* @param userId |
||||
* @param datasourceIds |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
datasourceUserMapper.deleteByUserId(userId); |
||||
|
||||
if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS, Constants.MSG)) { |
||||
return result; |
||||
} |
||||
|
||||
String[] datasourceIdArr = datasourceIds.split(","); |
||||
|
||||
for (String datasourceId : datasourceIdArr) { |
||||
Date now = new Date(); |
||||
|
||||
DatasourceUser datasourceUser = new DatasourceUser(); |
||||
datasourceUser.setUserId(userId); |
||||
datasourceUser.setDatasourceId(Integer.parseInt(datasourceId)); |
||||
datasourceUser.setPerm(7); |
||||
datasourceUser.setCreateTime(now); |
||||
datasourceUser.setUpdateTime(now); |
||||
datasourceUserMapper.insert(datasourceUser); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query user info |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> getUserInfo(User loginUser) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
User user = null; |
||||
if (loginUser.getUserType() == UserType.ADMIN_USER) { |
||||
user = loginUser; |
||||
} else { |
||||
user = userMapper.queryDetailsById(loginUser.getId()); |
||||
|
||||
List<AlertGroup> alertGroups = alertGroupMapper.queryByUserId(loginUser.getId()); |
||||
|
||||
StringBuilder sb = new StringBuilder(); |
||||
|
||||
if (alertGroups != null && alertGroups.size() > 0) { |
||||
for (int i = 0; i < alertGroups.size() - 1; i++) { |
||||
sb.append(alertGroups.get(i).getGroupName() + ","); |
||||
} |
||||
sb.append(alertGroups.get(alertGroups.size() - 1)); |
||||
user.setAlertGroup(sb.toString()); |
||||
} |
||||
} |
||||
|
||||
result.put(Constants.DATA_LIST, user); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query user list |
||||
* |
||||
* @param loginUser |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> queryUserList(User loginUser) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
List<User> userList = userMapper.queryAllUsers(); |
||||
result.put(Constants.DATA_LIST, userList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify user name exists |
||||
* |
||||
* @param userName |
||||
* @return |
||||
*/ |
||||
public Result verifyUserName(String userName) { |
||||
|
||||
cn.escheduler.api.utils.Result result = new cn.escheduler.api.utils.Result(); |
||||
User user = userMapper.queryByUserName(userName); |
||||
if (user != null) { |
||||
logger.error("user {} has exist, can't create again.", userName); |
||||
|
||||
putMsg(result, Status.USER_NAME_EXIST); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* unauthorized user |
||||
* |
||||
* @param loginUser |
||||
* @param alertgroupId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
|
||||
List<User> userList = userMapper.queryAllUsers(); |
||||
List<User> resultUsers = new ArrayList<>(); |
||||
Set<User> userSet = null; |
||||
if (userList != null && userList.size() > 0) { |
||||
userSet = new HashSet<>(userList); |
||||
|
||||
List<User> authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId); |
||||
|
||||
Set<User> authedUserSet = null; |
||||
if (authedUserList != null && authedUserList.size() > 0) { |
||||
authedUserSet = new HashSet<>(authedUserList); |
||||
userSet.removeAll(authedUserSet); |
||||
} |
||||
resultUsers = new ArrayList<>(userSet); |
||||
} |
||||
result.put(Constants.DATA_LIST, resultUsers); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* authorized user |
||||
* |
||||
* @param loginUser |
||||
* @param alertgroupId |
||||
* @return |
||||
*/ |
||||
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) { |
||||
Map<String, Object> result = new HashMap<>(5); |
||||
//only admin can operate
|
||||
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM, Constants.STATUS)) { |
||||
return result; |
||||
} |
||||
List<User> userList = userMapper.queryUserListByAlertGroupId(alertgroupId); |
||||
result.put(Constants.DATA_LIST, userList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check |
||||
* |
||||
* @param result |
||||
* @param bool |
||||
* @param userNoOperationPerm |
||||
* @param status |
||||
* @return |
||||
*/ |
||||
private boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm, String status) { |
||||
//only admin can operate
|
||||
if (bool) { |
||||
result.put(Constants.STATUS, userNoOperationPerm); |
||||
result.put(status, userNoOperationPerm.getMsg()); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* @param tenantId |
||||
* @return |
||||
*/ |
||||
private boolean checkTenant(int tenantId) { |
||||
return tenantMapper.queryById(tenantId) == null ? true : false; |
||||
} |
||||
} |
@ -0,0 +1,161 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.utils; |
||||
|
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.common.task.AbstractParameters; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import cn.escheduler.common.utils.TaskParametersUtils; |
||||
import org.apache.commons.lang.StringUtils; |
||||
|
||||
import java.text.MessageFormat; |
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
import java.util.regex.Pattern; |
||||
|
||||
|
||||
|
||||
/** |
||||
* check utils |
||||
*/ |
||||
public class CheckUtils { |
||||
|
||||
|
||||
/** |
||||
* check username |
||||
* |
||||
* @param userName |
||||
*/ |
||||
public static boolean checkUserName(String userName) { |
||||
return regexChecks(userName, cn.escheduler.common.Constants.REGEX_USER_NAME); |
||||
} |
||||
|
||||
/** |
||||
* check email |
||||
* |
||||
* @param email |
||||
*/ |
||||
public static boolean checkEmail(String email) { |
||||
return email.length() > 5 && email.length() <= 40 && regexChecks(email, cn.escheduler.common.Constants.REGEX_MAIL_NAME) ; |
||||
} |
||||
|
||||
/** |
||||
* check project description |
||||
* |
||||
* @param desc |
||||
*/ |
||||
public static Map<String, Object> checkDesc(String desc) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (StringUtils.isNotEmpty(desc) && desc.length() > 200) { |
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); |
||||
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "desc length")); |
||||
}else{ |
||||
result.put(Constants.STATUS, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check extra info |
||||
* |
||||
* @param otherParams |
||||
*/ |
||||
public static boolean checkOtherParams(String otherParams) { |
||||
return StringUtils.isNotEmpty(otherParams) && !JSONUtils.checkJsonVaild(otherParams); |
||||
} |
||||
|
||||
/** |
||||
* check password |
||||
* |
||||
* @param password |
||||
*/ |
||||
public static boolean checkPassword(String password) { |
||||
return StringUtils.isNotEmpty(password) && password.length() >= 2 && password.length() <= 20; |
||||
} |
||||
|
||||
/** |
||||
* check phone |
||||
* |
||||
* @param phone |
||||
*/ |
||||
public static boolean checkPhone(String phone) { |
||||
return StringUtils.isNotEmpty(phone) && phone.length() > 18; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* check task node parameter |
||||
* |
||||
* @param parameter |
||||
* @param taskType |
||||
* @return |
||||
*/ |
||||
public static boolean checkTaskNodeParameters(String parameter, String taskType) { |
||||
AbstractParameters abstractParameters = TaskParametersUtils.getParameters(taskType, parameter); |
||||
|
||||
if (abstractParameters != null) { |
||||
return abstractParameters.checkParameters(); |
||||
} |
||||
|
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* check params |
||||
* @param userName |
||||
* @param password |
||||
* @param email |
||||
* @param phone |
||||
* @return |
||||
*/ |
||||
public static Map<String, Object> checkUserParams(String userName, String password, String email, String phone){ |
||||
Map<String, Object> result = new HashMap<>(); |
||||
try{ |
||||
CheckUtils.checkUserName(userName); |
||||
CheckUtils.checkEmail(email); |
||||
CheckUtils.checkPassword(password); |
||||
CheckUtils.checkPhone(phone); |
||||
result.put(Constants.STATUS, Status.SUCCESS); |
||||
result.put(Constants.MSG, Status.SUCCESS.getMsg()); |
||||
}catch (Exception e){ |
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); |
||||
result.put(Constants.MSG, e.getMessage()); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/** |
||||
* 正则匹配 |
||||
* |
||||
* @param str |
||||
* @param pattern |
||||
* @return |
||||
*/ |
||||
private static boolean regexChecks(String str, Pattern pattern) { |
||||
if (org.apache.commons.lang3.StringUtils.isEmpty(str)) { |
||||
return false; |
||||
} |
||||
|
||||
return pattern.matcher(str).matches(); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,119 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.utils; |
||||
|
||||
/** |
||||
* web application constants |
||||
*/ |
||||
public class Constants { |
||||
|
||||
/** |
||||
* status |
||||
*/ |
||||
public static final String STATUS = "status"; |
||||
|
||||
/** |
||||
* message |
||||
*/ |
||||
public static final String MSG = "msg"; |
||||
|
||||
/** |
||||
* data total |
||||
* 数据总数 |
||||
*/ |
||||
public static final String COUNT = "count"; |
||||
|
||||
/** |
||||
* page size |
||||
* 每页数据条数 |
||||
*/ |
||||
public static final String PAGE_SIZE = "pageSize"; |
||||
|
||||
/** |
||||
* current page no |
||||
* 当前页码 |
||||
*/ |
||||
public static final String PAGE_NUMBER = "pageNo"; |
||||
|
||||
/** |
||||
* result |
||||
*/ |
||||
public static final String RESULT = "result"; |
||||
|
||||
/** |
||||
* |
||||
*/ |
||||
public static final String DATA_LIST = "data"; |
||||
|
||||
public static final String TOTAL_LIST = "totalList"; |
||||
|
||||
public static final String CURRENT_PAGE = "currentPage"; |
||||
|
||||
public static final String TOTAL_PAGE = "totalPage"; |
||||
|
||||
public static final String TOTAL = "total"; |
||||
|
||||
/** |
||||
* session user |
||||
*/ |
||||
public static final String SESSION_USER = "session.user"; |
||||
|
||||
public static final String SESSION_ID = "sessionId"; |
||||
|
||||
public static final String PASSWORD_DEFAULT = "******"; |
||||
|
||||
/** |
||||
* driver |
||||
*/ |
||||
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver"; |
||||
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver"; |
||||
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver"; |
||||
|
||||
/** |
||||
* database type |
||||
*/ |
||||
public static final String MYSQL = "MYSQL"; |
||||
public static final String POSTGRESQL = "POSTGRESQL"; |
||||
public static final String HIVE = "HIVE"; |
||||
public static final String SPARK = "SPARK"; |
||||
|
||||
/** |
||||
* jdbc url |
||||
*/ |
||||
public static final String JDBC_MYSQL = "jdbc:mysql://"; |
||||
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://"; |
||||
public static final String JDBC_HIVE_2 = "jdbc:hive2://"; |
||||
|
||||
|
||||
public static final String ADDRESS = "address"; |
||||
public static final String DATABASE = "database"; |
||||
public static final String JDBC_URL = "jdbcUrl"; |
||||
public static final String USER = "user"; |
||||
public static final String PASSWORD = "password"; |
||||
public static final String OTHER = "other"; |
||||
|
||||
|
||||
/** |
||||
* session timeout |
||||
*/ |
||||
public static final int SESSION_TIME_OUT = 7200; |
||||
public static final int maxFileSize = 1024 * 1024 * 1024; |
||||
public static final String UDF = "UDF"; |
||||
public static final String CLASS = "class"; |
||||
public static final String RECEIVERS = "receivers"; |
||||
public static final String RECEIVERS_CC = "receiversCc"; |
||||
} |
@ -0,0 +1,79 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.utils; |
||||
|
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.core.io.Resource; |
||||
import org.springframework.core.io.UrlResource; |
||||
import org.springframework.web.multipart.MultipartFile; |
||||
|
||||
import java.io.File; |
||||
import java.io.IOException; |
||||
import java.net.MalformedURLException; |
||||
import java.nio.file.Files; |
||||
import java.nio.file.Path; |
||||
import java.nio.file.Paths; |
||||
|
||||
/** |
||||
* file utils |
||||
*/ |
||||
public class FileUtils { |
||||
private static final Logger logger = LoggerFactory.getLogger(FileUtils.class); |
||||
|
||||
/** |
||||
* copy source file to target file |
||||
* |
||||
* @param file |
||||
* @param destFilename |
||||
*/ |
||||
|
||||
public static void copyFile(MultipartFile file, String destFilename) { |
||||
try { |
||||
|
||||
File destFile = new File(destFilename); |
||||
File destParentDir = new File(destFile.getParent()); |
||||
|
||||
if (!destParentDir.exists()) { |
||||
org.apache.commons.io.FileUtils.forceMkdir(destParentDir); |
||||
} |
||||
|
||||
Files.copy(file.getInputStream(), Paths.get(destFilename)); |
||||
} catch (IOException e) { |
||||
logger.error(String.format("failed to copy file , {} is empty file", file.getOriginalFilename()), e); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* file to resource |
||||
* |
||||
* @param filename |
||||
* @return |
||||
*/ |
||||
public static Resource file2Resource(String filename) throws MalformedURLException { |
||||
Path file = Paths.get(filename); |
||||
|
||||
Resource resource = new UrlResource(file.toUri()); |
||||
if (resource.exists() || resource.isReadable()) { |
||||
return resource; |
||||
} else { |
||||
logger.error("file can not read : {}", filename); |
||||
|
||||
} |
||||
return null; |
||||
} |
||||
} |
@ -0,0 +1,117 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.utils; |
||||
|
||||
import java.util.List; |
||||
|
||||
/** |
||||
* page info |
||||
* |
||||
* @param <T> |
||||
*/ |
||||
public class PageInfo<T> { |
||||
|
||||
/** |
||||
* list |
||||
*/ |
||||
private List<T> lists; |
||||
/** |
||||
* total count |
||||
*/ |
||||
private Integer totalCount = 0; |
||||
/** |
||||
* page size |
||||
*/ |
||||
private Integer pageSize = 20; |
||||
/** |
||||
* current page |
||||
*/ |
||||
private Integer currentPage = 0; |
||||
/** |
||||
* pageNo |
||||
*/ |
||||
private Integer pageNo; |
||||
|
||||
public PageInfo(Integer currentPage,Integer pageSize){ |
||||
if(currentPage==null){ |
||||
currentPage=1; |
||||
} |
||||
this.pageNo=(currentPage-1)*pageSize; |
||||
this.pageSize=pageSize; |
||||
this.currentPage=currentPage; |
||||
} |
||||
|
||||
public Integer getStart() { |
||||
return pageNo; |
||||
} |
||||
|
||||
public void setStart(Integer start) { |
||||
this.pageNo = start; |
||||
} |
||||
|
||||
public Integer getTotalPage() { |
||||
if (pageSize==null||pageSize == 0) { |
||||
pageSize = 7; |
||||
} |
||||
if (this.totalCount % this.pageSize == 0) { |
||||
return (this.totalCount / this.pageSize)==0?1:(this.totalCount / this.pageSize); |
||||
} |
||||
return (this.totalCount / this.pageSize + 1); |
||||
} |
||||
|
||||
public List<T> getLists() { |
||||
return lists; |
||||
} |
||||
|
||||
public void setLists(List<T> lists) { |
||||
this.lists = lists; |
||||
} |
||||
|
||||
public Integer getTotalCount() { |
||||
if (totalCount==null) { |
||||
totalCount = 0; |
||||
} |
||||
return totalCount; |
||||
} |
||||
|
||||
public void setTotalCount(Integer totalCount) { |
||||
this.totalCount = totalCount; |
||||
} |
||||
|
||||
public Integer getPageSize() { |
||||
if (pageSize==null||pageSize == 0) { |
||||
pageSize = 7; |
||||
} |
||||
return pageSize; |
||||
} |
||||
|
||||
public void setPageSize(Integer pageSize) { |
||||
this.pageSize = pageSize; |
||||
} |
||||
|
||||
public void setCurrentPage(Integer currentPage) { |
||||
this.currentPage = currentPage; |
||||
} |
||||
|
||||
public Integer getCurrentPage() { |
||||
if (currentPage==null||currentPage <= 0) { |
||||
this.currentPage = 1; |
||||
} |
||||
return this.currentPage; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,82 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.utils; |
||||
|
||||
/** |
||||
* result |
||||
* |
||||
* @param <T> |
||||
*/ |
||||
public class Result<T> { |
||||
/** |
||||
* status |
||||
* 状态码 |
||||
*/ |
||||
private Integer code; |
||||
|
||||
/** |
||||
* message |
||||
* 消息 |
||||
*/ |
||||
private String msg; |
||||
|
||||
/** |
||||
* data |
||||
*/ |
||||
private T data; |
||||
|
||||
public Result(){} |
||||
|
||||
public Result(Integer code , String msg){ |
||||
this.code = code; |
||||
this.msg = msg; |
||||
} |
||||
|
||||
public Integer getCode() { |
||||
return code; |
||||
} |
||||
|
||||
public void setCode(Integer code) { |
||||
this.code = code; |
||||
} |
||||
|
||||
public String getMsg() { |
||||
return msg; |
||||
} |
||||
|
||||
public void setMsg(String msg) { |
||||
this.msg = msg; |
||||
} |
||||
|
||||
public T getData() { |
||||
return data; |
||||
} |
||||
|
||||
public void setData(T data) { |
||||
this.data = data; |
||||
} |
||||
|
||||
|
||||
@Override |
||||
public String toString() { |
||||
return "Status{" + |
||||
"code='" + code + '\'' + |
||||
", msg='" + msg + '\'' + |
||||
", data=" + data + |
||||
'}'; |
||||
} |
||||
} |
@ -0,0 +1,42 @@
|
||||
<!-- Logback configuration. See http://logback.qos.ch/manual/index.html --> |
||||
<configuration scan="true" scanPeriod="120 seconds"> |
||||
<logger name="org.apache.zookeeper" level="WARN"/> |
||||
<logger name="org.apache.hbase" level="WARN"/> |
||||
<logger name="org.apache.hadoop" level="WARN"/> |
||||
|
||||
<property name="log.base" value="logs" /> |
||||
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> |
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
</appender> |
||||
|
||||
<appender name="APISERVERLOGFILE" class="ch.qos.logback.core.rolling.RollingFileAppender"> |
||||
<!-- Log level filter --> |
||||
<filter class="ch.qos.logback.classic.filter.ThresholdFilter"> |
||||
<level>INFO</level> |
||||
</filter> |
||||
<file>${log.base}/escheduler-api-server.log</file> |
||||
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> |
||||
<fileNamePattern>${log.base}/escheduler-api-server.%d{yyyy-MM-dd_HH}.%i.log</fileNamePattern> |
||||
<maxHistory>168</maxHistory> |
||||
<maxFileSize>64MB</maxFileSize> |
||||
</rollingPolicy> |
||||
|
||||
<encoder> |
||||
<pattern> |
||||
[%level] %date{yyyy-MM-dd HH:mm:ss.SSS} %logger{96}:[%line] - %msg%n |
||||
</pattern> |
||||
<charset>UTF-8</charset> |
||||
</encoder> |
||||
|
||||
</appender> |
||||
|
||||
<root level="INFO"> |
||||
<appender-ref ref="APISERVERLOGFILE" /> |
||||
</root> |
||||
</configuration> |
@ -0,0 +1,16 @@
|
||||
# server port |
||||
server.port=12345 |
||||
|
||||
# session config |
||||
server.session.timeout=7200 |
||||
|
||||
|
||||
server.context-path=/escheduler/ |
||||
|
||||
# file size limit for upload |
||||
spring.http.multipart.max-file-size=1024MB |
||||
spring.http.multipart.max-request-size=1024MB |
||||
|
||||
#post content |
||||
server.max-http-post-size=5000000 |
||||
|
@ -0,0 +1,95 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class DataAnalysisControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
|
||||
@Test |
||||
public void countTaskState() throws Exception { |
||||
|
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("startDate","2019-02-01 00:00:00"); |
||||
paramsMap.add("endDate","2019-02-28 00:00:00"); |
||||
paramsMap.add("projectId","21"); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/task-state-count") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
|
||||
@Test |
||||
public void countProcessInstanceState() throws Exception { |
||||
|
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("startDate","2019-02-01 00:00:00"); |
||||
paramsMap.add("endDate","2019-02-28 00:00:00"); |
||||
paramsMap.add("projectId","21"); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/process-state-count") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,94 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class DataSourceControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(DataSourceControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
|
||||
|
||||
@Test |
||||
public void queryDataSource() throws Exception { |
||||
MvcResult mvcResult = mockMvc.perform(get("/datasources/list").header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82").param("type","HIVE")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
|
||||
@Test |
||||
public void connectDataSource() throws Exception { |
||||
|
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("name","hvie数据源"); |
||||
paramsMap.add("type","HIVE"); |
||||
paramsMap.add("host","192.168.xx.xx"); |
||||
paramsMap.add("port","10000"); |
||||
paramsMap.add("database","default"); |
||||
paramsMap.add("userName","hive"); |
||||
paramsMap.add("password",""); |
||||
paramsMap.add("other",""); |
||||
MvcResult mvcResult = mockMvc.perform(post("/datasources/connect") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,69 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ExecutorControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(ExecutorControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
|
||||
@Test |
||||
public void startCheckProcessDefinition() throws Exception { |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/executors/start-check","project_test1") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.param("processDefinitionId","226")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,77 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class LoggerControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
|
||||
@Test |
||||
public void queryLog() throws Exception { |
||||
|
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("taskInstId","6007"); |
||||
paramsMap.add("skipLineNum","0"); |
||||
paramsMap.add("limit","1000"); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/log/detail") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,74 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class LoginControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void login() throws Exception { |
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("userName","admin"); |
||||
paramsMap.add("userPassword","admin123"); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/login") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,102 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ProcessDefinitionControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void createProcessDefinition() throws Exception { |
||||
//String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-50438\",\"name\":\"shell_01\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo \\\"123\\\"\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{\"self\":\"NO_DEP_PRE\",\"outer\":{\"strategy\":\"NONE\",\"taskList\":[]}},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"preTasks\":[]}]}";
|
||||
String json = "{\n" + |
||||
" \"globalParams\": [ ], \n" + |
||||
" \"tasks\": [\n" + |
||||
" {\n" + |
||||
" \"type\": \"SHELL\", \n" + |
||||
" \"id\": \"tasks-50438\", \n" + |
||||
" \"name\": \"shell_01\", \n" + |
||||
" \"params\": {\n" + |
||||
" \"resourceList\": [ ], \n" + |
||||
" \"localParams\": [ ], \n" + |
||||
" \"rawScript\": \"echo \\\"123\\\"\"\n" + |
||||
" }, \n" + |
||||
" \"desc\": \"\", \n" + |
||||
" \"runFlag\": \"NORMAL\", \n" + |
||||
" \"dependence\": {\n" + |
||||
" \"self\": \"NO_DEP_PRE\", \n" + |
||||
" \"outer\": {\n" + |
||||
" \"strategy\": \"NONE\", \n" + |
||||
" \"taskList\": [ ]\n" + |
||||
" }\n" + |
||||
" }, \n" + |
||||
" \"maxRetryTimes\": \"0\", \n" + |
||||
" \"retryInterval\": \"1\", \n" + |
||||
" \"preTasks\": [ ]\n" + |
||||
" }\n" + |
||||
" ]\n" + |
||||
"}"; |
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("name","shell_process_01_test"); |
||||
paramsMap.add("processDefinitionJson",json); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/process/save","project_test1") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isCreated()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,72 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ProcessInstanceControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void queryTaskListByProcessId() throws Exception { |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/instance/task-list-by-process-id","project_test1") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.param("processInstanceId","1370")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
|
||||
|
||||
|
||||
} |
||||
} |
@ -0,0 +1,75 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ProjectControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(ProcessInstanceControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void createProject() throws Exception { |
||||
|
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
paramsMap.add("projectName","project_test1"); |
||||
paramsMap.add("desc","the test project"); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(post("/projects/create") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isCreated()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,68 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class QueueControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void queryList() throws Exception { |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/queue/list") |
||||
.header("sessionId", "d199af92-d8e0-4f1b-bacb-5b3414567e5c")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,74 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.ResourceType; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import com.alibaba.fastjson.JSONObject; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ResourcesControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void querytResourceList() throws Exception { |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/resources/list") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.param("type", ResourceType.FILE.name())) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
result.getCode().equals(Status.SUCCESS.getCode()); |
||||
JSONObject object = (JSONObject) JSONObject.parse(mvcResult.getResponse().getContentAsString()); |
||||
|
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,67 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class SchedulerControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void queryScheduleList() throws Exception { |
||||
MvcResult mvcResult = mockMvc.perform(post("/projects/{projectName}/schedule/list","project_test1") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,81 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.util.LinkedMultiValueMap; |
||||
import org.springframework.util.MultiValueMap; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class TaskInstanceControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(SchedulerControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void queryTaskListPaging() throws Exception { |
||||
|
||||
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); |
||||
//paramsMap.add("processInstanceId","1380");
|
||||
paramsMap.add("searchVal",""); |
||||
paramsMap.add("taskName",""); |
||||
//paramsMap.add("stateType","");
|
||||
paramsMap.add("startDate","2019-02-26 19:48:00"); |
||||
paramsMap.add("endDate","2019-02-26 19:48:22"); |
||||
paramsMap.add("pageNo","1"); |
||||
paramsMap.add("pageSize","20"); |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/projects/{projectName}/task-instance/list-paging","project_test1") |
||||
.header("sessionId", "08fae8bf-fe2d-4fc0-8129-23c37fbfac82") |
||||
.params(paramsMap)) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,69 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class TenantControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(DataAnalysisControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
|
||||
@Test |
||||
public void countTaskState() throws Exception { |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/tenant/list") |
||||
.header("sessionId", "d199af92-d8e0-4f1b-bacb-5b3414567e5c")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,68 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.controller; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.utils.JSONUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Before; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.http.MediaType; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
import org.springframework.test.web.servlet.MockMvc; |
||||
import org.springframework.test.web.servlet.MvcResult; |
||||
import org.springframework.test.web.servlet.setup.MockMvcBuilders; |
||||
import org.springframework.web.context.WebApplicationContext; |
||||
|
||||
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; |
||||
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class UsersControllerTest { |
||||
private static Logger logger = LoggerFactory.getLogger(QueueControllerTest.class); |
||||
|
||||
private MockMvc mockMvc; |
||||
|
||||
@Autowired |
||||
private WebApplicationContext webApplicationContext; |
||||
|
||||
@Before |
||||
public void setUp() { |
||||
mockMvc = MockMvcBuilders.webAppContextSetup(webApplicationContext).build(); |
||||
} |
||||
@Test |
||||
public void queryList() throws Exception { |
||||
|
||||
MvcResult mvcResult = mockMvc.perform(get("/users/list") |
||||
.header("sessionId", "d199af92-d8e0-4f1b-bacb-5b3414567e5c")) |
||||
.andExpect(status().isOk()) |
||||
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8)) |
||||
.andReturn(); |
||||
|
||||
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
logger.info(mvcResult.getResponse().getContentAsString()); |
||||
} |
||||
} |
@ -0,0 +1,51 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class DataAnalysisServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceTest.class); |
||||
|
||||
@Autowired |
||||
private DataAnalysisService dataAnalysisService; |
||||
|
||||
@Test |
||||
public void countDefinitionByUser(){ |
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Map<String, Object> map = dataAnalysisService.countDefinitionByUser(loginUser, 21); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,51 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.DbType; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class DataSourceServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceTest.class); |
||||
|
||||
@Autowired |
||||
private DataSourceService dataSourceService; |
||||
|
||||
@Test |
||||
public void queryDataSourceList(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Map<String, Object> map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal()); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
} |
||||
} |
@ -0,0 +1,65 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.text.MessageFormat; |
||||
import java.util.HashMap; |
||||
import java.util.Map; |
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ExecutorServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); |
||||
|
||||
@Autowired |
||||
private ExecutorService executorService; |
||||
|
||||
@Test |
||||
public void startCheckByProcessDefinedId(){ |
||||
Map<String, Object> map = executorService.startCheckByProcessDefinedId(214); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
|
||||
} |
||||
@Test |
||||
public void putMsgWithParamsTest() { |
||||
|
||||
Map<String,Object> map = new HashMap<>(5); |
||||
putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS); |
||||
|
||||
logger.info(map.toString()); |
||||
} |
||||
|
||||
|
||||
void putMsgWithParams(Map<String, Object> result, Status status,Object ... statusParams) { |
||||
result.put(Constants.STATUS, status); |
||||
if(statusParams != null && statusParams.length > 0){ |
||||
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); |
||||
}else { |
||||
result.put(Constants.MSG, status.getMsg()); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,49 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Result; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class LoggerServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(LoggerServiceTest.class); |
||||
|
||||
@Autowired |
||||
private LoggerService loggerService; |
||||
|
||||
@Test |
||||
public void queryDataSourceList(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Result result = loggerService.queryLog(6007, 0, 100); |
||||
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue()); |
||||
} |
||||
} |
@ -0,0 +1,66 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import com.alibaba.fastjson.JSON; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ProcessDefinitionServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceTest.class); |
||||
|
||||
@Autowired |
||||
ProcessDefinitionService processDefinitionService; |
||||
|
||||
@Test |
||||
public void queryProccessDefinitionList() throws Exception { |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
|
||||
Map<String, Object> map = processDefinitionService.queryProccessDefinitionList(loginUser,"project_test1"); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
logger.info(JSON.toJSONString(map)); |
||||
} |
||||
|
||||
@Test |
||||
public void queryProcessDefinitionListPagingTest() throws Exception { |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Map<String, Object> map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "",1, 5,0); |
||||
|
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
logger.info(JSON.toJSONString(map)); |
||||
} |
||||
} |
@ -0,0 +1,78 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.DependResult; |
||||
import cn.escheduler.common.enums.ExecutionStatus; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import com.alibaba.fastjson.JSON; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.io.IOException; |
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ProcessInstanceServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceServiceTest.class); |
||||
|
||||
@Autowired |
||||
ProcessInstanceService processInstanceService; |
||||
|
||||
@Test |
||||
public void viewVariables() throws Exception { |
||||
Map<String, Object> map = processInstanceService.viewVariables(1389); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
logger.info(JSON.toJSONString(map)); |
||||
} |
||||
|
||||
@Test |
||||
public void testDependResult(){ |
||||
String logString = "[INFO] 2019-03-19 17:11:08.475 cn.escheduler.server.worker.log.TaskLogger:[172] - [taskAppId=TASK_223_10739_452334] dependent item complete :|| 223-ALL-day-last1Day,SUCCESS\n" + |
||||
"[INFO] 2019-03-19 17:11:08.476 cn.escheduler.server.worker.runner.TaskScheduleThread:[172] - task : 223_10739_452334 exit status code : 0\n" + |
||||
"[root@node2 current]# "; |
||||
try { |
||||
Map<String, DependResult> resultMap = |
||||
processInstanceService.parseLogForDependentResult(logString); |
||||
Assert.assertEquals(resultMap.size() , 1); |
||||
} catch (IOException e) { |
||||
|
||||
} |
||||
} |
||||
|
||||
@Test |
||||
public void queryProcessInstanceList() throws Exception { |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Map<String, Object> map = processInstanceService.queryProcessInstanceList(loginUser, "project_test1", 0, "", "", "", ExecutionStatus.FAILURE, "", 1, 5); |
||||
|
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
logger.info(JSON.toJSONString(map)); |
||||
} |
||||
} |
@ -0,0 +1,52 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.ResourceType; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class ResourcesServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(ResourcesServiceTest.class); |
||||
|
||||
@Autowired |
||||
private ResourcesService resourcesService; |
||||
|
||||
@Test |
||||
public void querytResourceList(){ |
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
|
||||
Map<String, Object> map = resourcesService.queryResourceList(loginUser, ResourceType.FILE); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
} |
||||
} |
@ -0,0 +1,57 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.ReleaseState; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.Project; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class SchedulerServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class); |
||||
|
||||
@Autowired |
||||
private SchedulerService schedulerService; |
||||
|
||||
@Test |
||||
public void testSetScheduleState(){ |
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Project project = new Project(); |
||||
project.setName("project_test1"); |
||||
project.setId(21); |
||||
|
||||
Map<String, Object> map = schedulerService.setScheduleState(loginUser, project.getName(), 44, ReleaseState.ONLINE); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,50 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class SessionServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(SessionServiceTest.class); |
||||
|
||||
@Autowired |
||||
private SessionService sessionService; |
||||
|
||||
@Test |
||||
public void createSession(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
|
||||
String session = sessionService.createSession(loginUser, "127.0.0.1"); |
||||
Assert.assertTrue(StringUtils.isNotEmpty(session)); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,57 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class TaskInstanceServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(TaskInstanceServiceTest.class); |
||||
|
||||
@Autowired |
||||
private TaskInstanceService taskInstanceService; |
||||
|
||||
@Test |
||||
public void queryTaskListPaging(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(27); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
|
||||
Map<String, Object> map = taskInstanceService.queryTaskListPaging(loginUser, "project_test1", 0, "", |
||||
"2019-02-26 19:48:00", "2019-02-26 19:48:22", "", null, "", 1, 20); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
PageInfo pageInfo = (PageInfo) map.get("data"); |
||||
logger.info(pageInfo.getLists().toString()); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,52 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class TenantServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(TenantServiceTest.class); |
||||
|
||||
@Autowired |
||||
private TenantService tenantService; |
||||
|
||||
@Test |
||||
public void queryTenantList(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setUserType(UserType.ADMIN_USER); |
||||
Map<String, Object> map = tenantService.queryTenantList(loginUser); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
logger.info(map.toString()); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,58 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.api.utils.PageInfo; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class UdfFuncServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceTest.class); |
||||
|
||||
@Autowired |
||||
private UdfFuncService udfFuncService; |
||||
|
||||
@Test |
||||
public void queryUdfFuncListPaging(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(19); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
|
||||
Map<String, Object> map = udfFuncService.queryUdfFuncListPaging(loginUser, "", 1, 10); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
|
||||
PageInfo pageInfo = (PageInfo) map.get("data"); |
||||
logger.info(pageInfo.getLists().toString()); |
||||
|
||||
} |
||||
} |
@ -0,0 +1,53 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
package cn.escheduler.api.service; |
||||
|
||||
import cn.escheduler.api.enums.Status; |
||||
import cn.escheduler.api.utils.Constants; |
||||
import cn.escheduler.common.enums.UserType; |
||||
import cn.escheduler.dao.model.User; |
||||
import org.junit.Assert; |
||||
import org.junit.Test; |
||||
import org.junit.runner.RunWith; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.boot.test.context.SpringBootTest; |
||||
import org.springframework.test.context.junit4.SpringRunner; |
||||
|
||||
import java.util.Map; |
||||
|
||||
@RunWith(SpringRunner.class) |
||||
@SpringBootTest |
||||
public class UsersServiceTest { |
||||
private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class); |
||||
|
||||
@Autowired |
||||
private UsersService usersService; |
||||
|
||||
@Test |
||||
public void getUserInfo(){ |
||||
|
||||
User loginUser = new User(); |
||||
loginUser.setId(19); |
||||
loginUser.setUserType(UserType.GENERAL_USER); |
||||
Map<String, Object> map = usersService.getUserInfo(loginUser); |
||||
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); |
||||
logger.info(map.toString()); |
||||
|
||||
} |
||||
} |
Loading…
Reference in new issue