Browse Source

Merge pull request #1 in PG/plugin-repository-hdfs from ~RINOUX/plugin-repository-hdfs:master to master

* commit 'a3926e8a46faee5970b7eaf8f816951871b13a02':
  update
  readme.md edited online with Bitbucket
  readme.md edited online with Bitbucket
  update
  REPORT-10696 HDFS插件
release/10.0
rinoux 6 years ago
parent
commit
4ff29d5d9c
  1. BIN
      demo.png
  2. BIN
      img/class_fields.png
  3. BIN
      img/db_fields.png
  4. BIN
      img/set.png
  5. 3
      plugin.xml
  6. 36
      readme.md
  7. 57
      src/com/fr/plugin/hdfs/repository/HDFSFactoryProvider.java
  8. 44
      src/com/fr/plugin/hdfs/repository/core/HDFSConfig.java
  9. 47
      src/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java
  10. 6
      src/com/fr/plugin/hdfs/repository/core/HDFSResourceRepository.java
  11. 18
      src/com/fr/plugin/hdfs/repository/decision/HDFSFileServerComponent.java
  12. 38
      src/com/fr/plugin/hdfs/repository/decision/HDFSFileServerOption.java
  13. 179
      src/com/fr/plugin/hdfs/repository/decision/js/bundle.js

BIN
demo.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 268 KiB

BIN
img/class_fields.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

BIN
img/db_fields.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 110 KiB

BIN
img/set.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

3
plugin.xml

@ -14,6 +14,9 @@
<main-package>com.fr.plugin</main-package>
<!--功能记录点类-->
<function-recorder class="com.fr.plugin.hdfs.repository.HDFSFactoryProvider"/>
<extra-decision>
<SystemOptionProvider class="com.fr.plugin.hdfs.repository.decision.HDFSFileServerOption"/>
</extra-decision>
<extra-core>
<ResourceRepositoryFactoryProvider class="com.fr.plugin.hdfs.repository.HDFSFactoryProvider"/>
</extra-core>

36
readme.md

@ -1,32 +1,8 @@
# 开发和测试使用说明
- 1、hdfs仓库插件是平台文件服务器的扩展,首先需要正常方式安装插件;
- 2、在没有平台可视化设置之前,可以通过添加数据库字段来开启hdfs;
> 在fine_conf_entity添加HDFS的仓库配置:
![字段](./img/db_fields.png)
> 图中的`ResourceModuleConfig.profiles.HDFS_TEST.workroots.[你的机器id(可以拷贝表中LOCAL_ENV的)]`为hdfs的工作路径。
- 安装插件;
- 进入平台,管理系统/智能运维/集群配置/文件服务器;
- 选择HDFS协议,添加主机、端口等配置,如果设置了kerberos认证,还需要设置principal和keyTab文件路径。以及文件服务器在hdfs上的工作路径;
- 测试连接并保存;
> `host`设置为Hadoop的hdfs地址,比如默认localhost,`port`为hdfs端口(默认9000)。
> 如果有用户组设置还需要对应用户名 `username`
> 在fine_conf_entity添加数据库字段设置仓库为hdfs:
![设置](./img/set.png)
> 在fine_conf_classname:
添加
![profile](./img/class_fields.png)
- 3、启动设计器或者平台
- 4、其它说明:
改数据库太麻烦,可以直接在适当的地方(比如com.fr.io.ResourceRepositoryActivator.doExtra()方法末尾)添加代码
``` Map<String, Object> hdfsConfig = new HashMap<String, Object>();
hdfsConfig.put("host", "localhost");
hdfsConfig.put("port", 9000);
try {
ProfileFactory.create("HDFS", "HDFS_TEST", "/app", true, hdfsConfig).install().apply();
} catch (RepositoryException e) {
e.printStackTrace();
}
>> 示例
![demo](demo.png)

57
src/com/fr/plugin/hdfs/repository/HDFSFactoryProvider.java

@ -1,26 +1,11 @@
package com.fr.plugin.hdfs.repository;
import com.fr.event.Event;
import com.fr.event.EventDispatcher;
import com.fr.event.Listener;
import com.fr.io.base.exception.RepositoryException;
import com.fr.io.base.provider.RepositoryFactoryProvider;
import com.fr.io.context.ResourceModuleContext;
import com.fr.io.context.info.InstalledComponent;
import com.fr.io.context.info.RepositoryApplyPolicy;
import com.fr.io.fun.AbstractRepositoryFactoryProvider;
import com.fr.log.FineLoggerFactory;
import com.fr.plugin.context.PluginContext;
import com.fr.plugin.hdfs.repository.core.HDFSConfig;
import com.fr.plugin.hdfs.repository.core.HDFSRepositoryFactory;
import com.fr.plugin.observer.PluginEventType;
import com.fr.plugin.transform.ExecuteFunctionRecord;
import com.fr.plugin.transform.FunctionRecorder;
import com.fr.stable.project.ProjectConstants;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Created by rinoux on 2018/8/10.
@ -30,48 +15,6 @@ public class HDFSFactoryProvider extends AbstractRepositoryFactoryProvider<HDFSC
private static HDFSRepositoryFactory factory = new HDFSRepositoryFactory();
/* static {
Listener<PluginContext> stopListener = new Listener<PluginContext>() {
@Override
public void on(Event event, PluginContext param) {
if (param.getMarker().getPluginID().equals("com.fr.plugin.hdfs.repository")) {
try {
if (ResourceModuleContext.getCurrentRepo().getIdentity().equals(HDFSRepositoryFactory.IDENTITY)) {
ResourceModuleContext.apply(ProjectConstants.LOCAL_ENV, RepositoryApplyPolicy.EXCLUSIVE);
}
Map<String, InstalledComponent> all = ResourceModuleContext.getInstaller().findAll();
List<InstalledComponent> components = new ArrayList<InstalledComponent>(all.values());
for (InstalledComponent component : components) {
if (component.getFactory().getIdentity().equals(HDFSRepositoryFactory.IDENTITY)) {
ResourceModuleContext.discard(component.getRepoName());
ResourceModuleContext.uninstall(component.getRepoName());
}
}
} catch (RepositoryException e) {
FineLoggerFactory.getLogger().error(e.getMessage());
} finally {
ResourceModuleContext.removeFactory(HDFSRepositoryFactory.IDENTITY);
}
}
}
};
Listener<PluginContext> startListener = new Listener<PluginContext>() {
@Override
public void on(Event event, PluginContext param) {
ResourceModuleContext.getFactoryLoader().add(factory);
}
};
EventDispatcher.listen(PluginEventType.BeforeStop, stopListener);
EventDispatcher.listen(PluginEventType.BeforeUnInstall, stopListener);
EventDispatcher.listen(PluginEventType.AfterActive, startListener);
EventDispatcher.listen(PluginEventType.AfterInstall, startListener);
}*/
@Override
@ExecuteFunctionRecord
public RepositoryFactoryProvider<HDFSConfig> getFactory() {

44
src/com/fr/plugin/hdfs/repository/core/HDFSConfig.java

@ -3,6 +3,9 @@ package com.fr.plugin.hdfs.repository.core;
import com.fr.config.holder.Conf;
import com.fr.config.holder.factory.Holders;
import com.fr.io.config.CommonRepoConfig;
import com.fr.io.context.info.GetConfig;
import com.fr.io.context.info.SetConfig;
import com.fr.stable.StringUtils;
/**
* Created by rinoux on 2018/8/10.
@ -11,6 +14,45 @@ public class HDFSConfig extends CommonRepoConfig {
public HDFSConfig() {
super(HDFSRepositoryFactory.IDENTITY);
}
/**
* 客户端用户
*/
private Conf<String> principal = Holders.simple(StringUtils.EMPTY);
// TODO: 2018/8/10 config fields
/**
* keyTab文件
*/
private Conf<String> keyTab = Holders.simple(StringUtils.EMPTY);
@GetConfig("principal")
public String getPrincipal() {
return principal.get();
}
@SetConfig("principal")
public void setPrincipal(String principal) {
this.principal.set(principal);
}
@GetConfig("keyTab")
public String getKeyTab() {
return keyTab.get();
}
@SetConfig("keyTab")
public void setKeyTab(String keyTab) {
this.keyTab.set(keyTab);
}
@Override
public HDFSConfig clone() throws CloneNotSupportedException {
HDFSConfig cloned = (HDFSConfig) super.clone();
cloned.principal = (Conf<String>) this.principal.clone();
cloned.keyTab = (Conf<String>) this.keyTab.clone();
return cloned;
}
}

47
src/com/fr/plugin/hdfs/repository/core/HDFSRepositoryFactory.java

@ -1,6 +1,5 @@
package com.fr.plugin.hdfs.repository.core;
import com.fr.general.GeneralUtils;
import com.fr.io.base.provider.impl.ConfigRepositoryFactory;
import com.fr.io.context.info.RepositoryProfile;
import com.fr.io.repository.ResourceRepository;
@ -9,6 +8,8 @@ import com.fr.stable.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import java.io.IOException;
import java.net.URI;
@ -17,7 +18,8 @@ import java.net.URI;
* Created by rinoux on 2018/8/10.
*/
public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
static final String IDENTITY = "HDFS";
public static final String IDENTITY = "HDFS";
private static final String HDFS_SCHEMA = "hdfs://";
private static final String DEFAULT_HOST = "localhost";
private static final String DEFAULT_PORT = "9000";
@ -64,7 +66,6 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
private FileSystem createFileSystem(HDFSConfig config) {
FileSystem fs;
Configuration conf = createConfiguration();
//根据配置生成url
@ -72,16 +73,26 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
String port = config.getPort() == 0 ? DEFAULT_PORT : String.valueOf(config.getPort());
String hdfsUrl = HDFS_SCHEMA.concat(host).concat(":").concat(port);
String user = config.getUsername();
String principal = config.getPrincipal();
//开启了kerberos验证
if (kerberosAuthenticated(config)) {
try {
UserGroupInformation.setConfiguration(conf);//UserGroupInformation初始化
UserGroupInformation.loginUserFromKeytab(config.getPrincipal(), config.getKeyTab());
} catch (IOException e) {
FineLoggerFactory.getLogger().error(e.getMessage());
}
}
try {
if (StringUtils.isNotEmpty(user)) {
fs = FileSystem.get(URI.create(hdfsUrl), conf, user);
if (StringUtils.isNotEmpty(principal)) {
return FileSystem.get(URI.create(hdfsUrl), conf, principal);
} else {
fs = FileSystem.get(URI.create(hdfsUrl), conf);
return FileSystem.get(URI.create(hdfsUrl), conf);
}
return fs;
} catch (Exception e) {
throw new RuntimeException(e);
}
@ -94,19 +105,21 @@ public class HDFSRepositoryFactory extends ConfigRepositoryFactory<HDFSConfig> {
* @return 配置
*/
private Configuration createConfiguration() {
Configuration conf = new Configuration(true);
final Configuration conf = new Configuration();
//使用GeneralUtils才能加载到FileSystem实现类
conf.setClassLoader(new ClassLoader() {
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return GeneralUtils.classForName(name);
}
});
//设置hdfs协议实现
conf.setClass("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class, FileSystem.class);
conf.setClass("fs.hdfs.impl", DistributedFileSystem.class, FileSystem.class);
//使用GeneralUtils才能加载到FileSystem实现类
conf.setClassLoader(this.getClass().getClassLoader());
return conf;
}
private boolean kerberosAuthenticated(HDFSConfig config) {
return StringUtils.isNotEmpty(config.getKeyTab()) && StringUtils.isNotEmpty(config.getPrincipal());
}
}

6
src/com/fr/plugin/hdfs/repository/core/HDFSResourceRepository.java

@ -291,6 +291,12 @@ public class HDFSResourceRepository extends BaseResourceRepository {
}
}
@Override
public boolean isAccurateDiskSize() {
return false;
}
@Override
public String getIdentity() {
return HDFSRepositoryFactory.IDENTITY;

18
src/com/fr/plugin/hdfs/repository/decision/HDFSFileServerComponent.java

@ -0,0 +1,18 @@
package com.fr.plugin.hdfs.repository.decision;
import com.fr.web.struct.Component;
import com.fr.web.struct.browser.RequestClient;
import com.fr.web.struct.category.ScriptPath;
/**
* Created by rinoux on 2018/11/26.
*/
public class HDFSFileServerComponent extends Component {
public static HDFSFileServerComponent KEY = new HDFSFileServerComponent();
@Override
public ScriptPath script(RequestClient req) {
return ScriptPath.build("com/fr/plugin/hdfs/repository/decision/js/bundle.js");
}
}

38
src/com/fr/plugin/hdfs/repository/decision/HDFSFileServerOption.java

@ -0,0 +1,38 @@
package com.fr.plugin.hdfs.repository.decision;
import com.fr.decision.fun.SystemOptionProvider;
import com.fr.decision.fun.impl.AbstractSystemOptionProvider;
import com.fr.decision.web.MainComponent;
import com.fr.stable.fun.mark.API;
import com.fr.web.struct.Atom;
/**
* Created by rinoux on 2018/11/26.
*/
@API(level = SystemOptionProvider.CURRENT_LEVEL)
public class HDFSFileServerOption extends AbstractSystemOptionProvider {
@Override
public String id() {
return "hdfs_file_server";
}
@Override
public String displayName() {
return null;
}
@Override
public int sortIndex() {
return 2;
}
@Override
public Atom attach() {
return MainComponent.KEY;
}
@Override
public Atom client() {
return HDFSFileServerComponent.KEY;
}
}

179
src/com/fr/plugin/hdfs/repository/decision/js/bundle.js

@ -0,0 +1,179 @@
BI.config("dec.constant.intelligence.cluster.file.server", function (items) {
items.push({
value: "HDFS", // 地址栏显示的hash值
id: "decision-intelligence-cluster-file-hdfs", // id
text: BI.i18nText(""), // 文字
cardType: "dec.intelligence.cluster.file.hdfs",
});
return items;
});
!(function () {
var LABEL_WIDTH = 116, EDITOR_WIDTH = 300;
var HDFS = BI.inherit(BI.Widget, {
props: {
baseCls: "dec-cluster-ftp",
value: {}
},
_store: function () {
return BI.Models.getModel("dec.model.intelligence.cluster.file.hdfs", {
value: this.options.value
});
},
watch: {},
render: function () {
var self = this, o = this.options;
return {
type: "bi.vertical",
tgap: 15,
items: [
{
type: "dec.label.editor.item",
textCls: "dec-font-weight-bold",
textWidth: LABEL_WIDTH,
editorWidth: EDITOR_WIDTH,
watermark: BI.i18nText("Dec-Please_Input"),
text: BI.i18nText("Dec-Basic_Host"),
value: this.model.host,
ref: function (_ref) {
self.hostRow = _ref;
},
listeners: [{
eventName: BI.Editor.EVENT_CHANGE,
action: function () {
self.store.setHost(this.getValue());
}
}]
}, {
type: "dec.label.editor.item",
textCls: "dec-font-weight-bold",
textWidth: LABEL_WIDTH,
editorWidth: EDITOR_WIDTH,
watermark: BI.i18nText("Dec-Please_Input"),
text: BI.i18nText("Dec-Server_Port"),
value: this.model.port,
ref: function (_ref) {
self.portRow = _ref;
},
listeners: [{
eventName: BI.Editor.EVENT_CHANGE,
action: function () {
self.store.setPort(this.getValue());
}
}]
},
{
type: "dec.label.editor.item",
textCls: "dec-font-weight-bold",
textWidth: LABEL_WIDTH,
editorWidth: EDITOR_WIDTH,
watermark: BI.i18nText("Principal"),
text: BI.i18nText("Principal"),
value: this.model.principal,
listeners: [{
eventName: BI.Editor.EVENT_CHANGE,
action: function () {
self.store.setPrincipal(this.getValue());
}
}]
},
{
type: "dec.label.editor.item",
textCls: "dec-font-weight-bold",
textWidth: LABEL_WIDTH,
editorWidth: EDITOR_WIDTH,
watermark: BI.i18nText("keyTab文件路径"),
text: BI.i18nText("keyTab"),
value: this.model.keyTab,
listeners: [{
eventName: BI.Editor.EVENT_CHANGE,
action: function () {
self.store.setKeyTab(this.getValue());
}
}]
}]
};
},
getValue: function () {
return {
host: this.model.host,
port: BI.parseInt(this.model.port),
principal: this.model.principal,
keyTab: this.model.keyTab
};
},
validation: function () {
var valid = true, host = this.model.host, port = this.model.port;
if (!(/^[a-zA-Z0-9._-]+(\.[a-zA-Z]{2,6})?$/.test(host))) {
this.hostRow.showError(BI.i18nText("Dec-Error_Format"));
valid = false;
}
if (BI.isEmpty(host)) {
this.hostRow.showError(BI.i18nText("Dec-Error_Null"));
valid = false;
}
if (!(/^[0-9]+$/.test(port))) {
this.portRow.showError(BI.i18nText("Dec-Error_Format"));
valid = false;
}
if (BI.isEmptyString(port)) {
this.portRow.showError(BI.i18nText("Dec-Error_Null"));
valid = false;
}
return valid;
}
});
BI.shortcut("dec.intelligence.cluster.file.hdfs", HDFS);
})();
!(function () {
var Model = BI.inherit(Fix.Model, {
state: function () {
var val = this.options.value;
return {
host: val.host,
port: val.port,
principal: val.principal,
keyTab: val.keyTab
};
},
computed: {
encodingArray: function () {
return BI.map(DecCst.EncodeConstants.ENCODING_ARRAY, function (i, v) {
return {
value: v
};
});
}
},
actions: {
setHost: function (v) {
this.model.host = v;
},
setPort: function (v) {
this.model.port = v;
},
setPrincipal: function (v) {
this.model.principal = v;
},
setKeyTab: function (v) {
this.model.keyTab = v;
}
}
});
BI.model("dec.model.intelligence.cluster.file.hdfs", Model);
})();
Loading…
Cancel
Save