Browse Source

[Improvement-11013][dolphinscheduler-common] YarnHAAdminUtils#getActiveRMName function Add HTTPS Hadoop environment support (#11017)

* [Improvement-11013][dolphinscheduler-common] YarnHAAdminUtils#getActiveRMName function Add HTTPS Hadoop environment support

* [Improvement-11013][dolphinscheduler-common] add test code

* [Improvement-11013][dolphinscheduler-common] code format

* [Improvement-11013][dolphinscheduler-common] add hadooputils test code

* [Improvement-11013][dolphinscheduler-common] remove extra blank lines

* [Improvement-11013][dolphinscheduler-common] adjusts test code

* [Improvement-11013][dolphinscheduler-common] remove useless import

* [Improvement-11013][dolphinscheduler-common] add test code

* [Improvement-11013][dolphinscheduler-common] shortened string

* [Improvement-11013][dolphinscheduler-common]  format test code style

Co-authored-by: liyangyang <liyangyang@bizseer.com>
3.1.0-release
liyangyang 2 years ago committed by GitHub
parent
commit
f13e7a95ae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 26
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java
  2. 5
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java
  3. 73
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java

26
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java

@ -130,8 +130,8 @@ public class HadoopUtils implements Closeable, StorageOperate {
String defaultFS = configuration.get(Constants.FS_DEFAULT_FS); String defaultFS = configuration.get(Constants.FS_DEFAULT_FS);
if (StringUtils.isBlank(defaultFS)){ if (StringUtils.isBlank(defaultFS)) {
defaultFS= PropertyUtils.getString(Constants.FS_DEFAULT_FS); defaultFS = PropertyUtils.getString(Constants.FS_DEFAULT_FS);
} }
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file //first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
@ -615,12 +615,6 @@ public class HadoopUtils implements Closeable, StorageOperate {
*/ */
public static String getAppAddress(String appAddress, String rmHa) { public static String getAppAddress(String appAddress, String rmHa) {
//get active ResourceManager
String activeRM = YarnHAAdminUtils.getActiveRMName(rmHa);
if (StringUtils.isEmpty(activeRM)) {
return null;
}
String[] split1 = appAddress.split(Constants.DOUBLE_SLASH); String[] split1 = appAddress.split(Constants.DOUBLE_SLASH);
@ -637,6 +631,13 @@ public class HadoopUtils implements Closeable, StorageOperate {
String end = Constants.COLON + split2[1]; String end = Constants.COLON + split2[1];
//get active ResourceManager
String activeRM = YarnHAAdminUtils.getActiveRMName(start, rmHa);
if (StringUtils.isEmpty(activeRM)) {
return null;
}
return start + activeRM + end; return start + activeRM + end;
} }
@ -658,13 +659,16 @@ public class HadoopUtils implements Closeable, StorageOperate {
private static final class YarnHAAdminUtils { private static final class YarnHAAdminUtils {
/** /**
* get active resourcemanager * get active resourcemanager node
* @param protocol http protocol
* @param rmIds yarn ha ids
* @return yarn active node
*/ */
public static String getActiveRMName(String rmIds) { public static String getActiveRMName(String protocol, String rmIds) {
String[] rmIdArr = rmIds.split(Constants.COMMA); String[] rmIdArr = rmIds.split(Constants.COMMA);
String yarnUrl = "http://%s:" + HADOOP_RESOURCE_MANAGER_HTTP_ADDRESS_PORT_VALUE + "/ws/v1/cluster/info"; String yarnUrl = protocol + "%s:" + HADOOP_RESOURCE_MANAGER_HTTP_ADDRESS_PORT_VALUE + "/ws/v1/cluster/info";
try { try {

5
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/CommonUtilsTest.java

@ -36,9 +36,10 @@ import org.slf4j.LoggerFactory;
* configuration test * configuration test
*/ */
@RunWith(PowerMockRunner.class) @RunWith(PowerMockRunner.class)
@PrepareForTest(value = { PropertyUtils.class, UserGroupInformation.class}) @PrepareForTest(value = {PropertyUtils.class, UserGroupInformation.class})
public class CommonUtilsTest { public class CommonUtilsTest {
private static final Logger logger = LoggerFactory.getLogger(CommonUtilsTest.class); private static final Logger logger = LoggerFactory.getLogger(CommonUtilsTest.class);
@Test @Test
public void getSystemEnvPath() { public void getSystemEnvPath() {
String envPath; String envPath;
@ -48,7 +49,7 @@ public class CommonUtilsTest {
@Test @Test
public void isDevelopMode() { public void isDevelopMode() {
logger.info("develop mode: {}",CommonUtils.isDevelopMode()); logger.info("develop mode: {}", CommonUtils.isDevelopMode());
Assert.assertTrue(true); Assert.assertTrue(true);
} }

73
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HadoopUtilsTest.java

@ -0,0 +1,73 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.spi.enums.ResourceType;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* hadoop utils test
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest(value = {HadoopUtils.class})
@SuppressStaticInitializationFor("org.apache.dolphinscheduler.common.utils.HttpUtils")
public class HadoopUtilsTest {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtilsTest.class);
@Test
public void getHdfsTenantDir() {
logger.info(HadoopUtils.getHdfsTenantDir("1234"));
Assert.assertTrue(true);
}
@Test
public void getHdfsUdfFileName() {
logger.info(HadoopUtils.getHdfsUdfFileName("admin", "file_name"));
Assert.assertTrue(true);
}
@Test
public void getHdfsResourceFileName() {
logger.info(HadoopUtils.getHdfsResourceFileName("admin", "file_name"));
Assert.assertTrue(true);
}
@Test
public void getHdfsFileName() {
logger.info(HadoopUtils.getHdfsFileName(ResourceType.FILE, "admin", "file_name"));
Assert.assertTrue(true);
}
@Test
public void getAppAddress() {
PowerMockito.mockStatic(HttpUtils.class);
PowerMockito.when(HttpUtils.get("http://ds1:8088/ws/v1/cluster/info")).thenReturn("{\"clusterInfo\":{\"state\":\"STARTED\",\"haState\":\"ACTIVE\"}}");
logger.info(HadoopUtils.getAppAddress("http://ds1:8088/ws/v1/cluster/apps/%s", "ds1,ds2"));
Assert.assertTrue(true);
}
}
Loading…
Cancel
Save