Browse Source

Merge pull request #476 from lgcareer/branch-1.0.2

determine if the file exists in hdfs when read file
pull/2/head
lgcareer 6 years ago committed by GitHub
parent
commit
893af254f7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 1
      escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java
  2. 27
      escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java

1
escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java

@ -175,6 +175,7 @@ public enum Status {
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"), UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar"),
HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"), HDFS_COPY_FAIL(20009, "hdfs copy {0} -> {1} fail"),
RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!"), RESOURCE_FILE_EXIST(20010, "resource file {0} already exists in hdfs,please delete it or change name!"),
RESOURCE_FILE_NOT_EXIST(20011, "resource file {0} not exists in hdfs!"),

27
escheduler-api/src/main/java/cn/escheduler/api/service/ResourcesService.java

@ -515,13 +515,19 @@ public class ResourcesService extends BaseService {
String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias()); String hdfsFileName = HadoopUtils.getHdfsFilename(tenantCode, resource.getAlias());
logger.info("resource hdfs path is {} ", hdfsFileName); logger.info("resource hdfs path is {} ", hdfsFileName);
try { try {
List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit); if(HadoopUtils.getInstance().exists(hdfsFileName)){
List<String> content = HadoopUtils.getInstance().catFile(hdfsFileName, skipLineNum, limit);
putMsg(result, Status.SUCCESS);
Map<String, Object> map = new HashMap<>();
map.put(ALIAS, resource.getAlias());
map.put(CONTENT, StringUtils.join(content.toArray(), "\n"));
result.setData(map);
}else{
logger.error("read file {} not exist in hdfs", hdfsFileName);
putMsg(result, Status.RESOURCE_FILE_NOT_EXIST);
}
putMsg(result, Status.SUCCESS);
Map<String, Object> map = new HashMap<>();
map.put(ALIAS, resource.getAlias());
map.put(CONTENT, StringUtils.join(content.toArray(), "\n"));
result.setData(map);
} catch (Exception e) { } catch (Exception e) {
logger.error(String.format("Resource %s read failed", hdfsFileName), e); logger.error(String.format("Resource %s read failed", hdfsFileName), e);
putMsg(result, Status.HDFS_OPERATION_ERROR); putMsg(result, Status.HDFS_OPERATION_ERROR);
@ -565,17 +571,14 @@ public class ResourcesService extends BaseService {
String name = fileName.trim() + "." + nameSuffix; String name = fileName.trim() + "." + nameSuffix;
//check file already exists result = verifyResourceName(name,type,loginUser);
Resource resource = resourcesMapper.queryResourceByNameAndType(name, type.ordinal()); if (!result.getCode().equals(Status.SUCCESS.getCode())) {
if (resource != null) {
logger.error("resource {} has exist, can't recreate .", name);
putMsg(result, Status.RESOURCE_EXIST);
return result; return result;
} }
// save data // save data
Date now = new Date(); Date now = new Date();
resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now); Resource resource = new Resource(name,name,desc,loginUser.getId(),type,content.getBytes().length,now,now);
resourcesMapper.insert(resource); resourcesMapper.insert(resource);

Loading…
Cancel
Save