diff --git a/docs/configs/docsdev.js b/docs/configs/docsdev.js index e005b7a63d..6e8a3d58c0 100644 --- a/docs/configs/docsdev.js +++ b/docs/configs/docsdev.js @@ -271,6 +271,10 @@ export default { title: 'HIVE', link: '/en-us/docs/dev/user_doc/guide/datasource/hive.html', }, + { + title: 'SNOWFLAKE', + link: '/en-us/docs/dev/user_doc/guide/datasource/snowflake.html', + }, { title: 'KYUUBI', link: '/en-us/docs/dev/user_doc/guide/datasource/kyuubi.html', @@ -972,6 +976,10 @@ export default { title: 'HIVE', link: '/zh-cn/docs/dev/user_doc/guide/datasource/hive.html', }, + { + title: 'SNOWFLAKE', + link: '/zh-cn/docs/dev/user_doc/guide/datasource/snowflake.html', + }, { title: 'KYUUBI', link: '/zh-cn/docs/dev/user_doc/guide/datasource/kyuubi.html', diff --git a/docs/docs/en/guide/datasource/snowflake.md b/docs/docs/en/guide/datasource/snowflake.md new file mode 100644 index 0000000000..db6f61dcce --- /dev/null +++ b/docs/docs/en/guide/datasource/snowflake.md @@ -0,0 +1,21 @@ +# SNOWFLAKE + +## Use Snowflake + +![snowflake](../../../../img/new_ui/dev/datasource/snowflake.png) + +## Datasource Parameters + +| **Datasource** | **Description** | +|----------------------------|--------------------------------------------------------------| +| Datasource | Select SNOWFLAKE. | +| Datasource name | Enter the name of the DataSource. | +| Description | Enter a description of the DataSource. | +| IP/Host Name | Enter the SNOWFLAKE service IP. | +| Port | Enter the SNOWFLAKE service port. | +| Username | Set the username for SNOWFLAKE connection. | +| Password | Set the password for SNOWFLAKE connection. | +| Database name | Enter the database name of the SNOWFLAKE connection. | +| Datawarehouse name | Enter the datawarehouse name of the SNOWFLAKE connection. | +| Jdbc connection parameters | Parameter settings for SNOWFLAKE connection, in JSON format. | + diff --git a/docs/docs/zh/guide/datasource/snowflake.md b/docs/docs/zh/guide/datasource/snowflake.md new file mode 100644 index 0000000000..9ef6007cd5 --- /dev/null +++ b/docs/docs/zh/guide/datasource/snowflake.md @@ -0,0 +1,17 @@ +# SNOWFLAKE 数据源 + +![snowflake](../../../../img/new_ui/dev/datasource/snowflake.png) + +## 数据源参数 + +- 数据源:选择 SNOWFLAKE 数据源 +- 数据源名称:输入数据源的名称 +- 描述:输入数据源的描述 +- IP 主机名:输入连接 SNOWFLAKE 数据源 的 IP +- 端口:输入连接 SNOWFLAKE 数据源 的端口 +- 用户名:设置连接 SNOWFLAKE 数据源 的用户名 +- 密码:设置连接 SNOWFLAKE 数据源 的密码 +- 数据库名:输入连接 SNOWFLAKE 数据源 的数据库名称 +- 数据仓库: 输入数据 SNOWFLAKE 的数据仓库名称 +- Jdbc 连接参数:用于 SNOWFLAKE 数据源 连接的参数设置,以 JSON 形式填写 + diff --git a/docs/img/new_ui/dev/datasource/snowflake.png b/docs/img/new_ui/dev/datasource/snowflake.png new file mode 100644 index 0000000000..1ba54cf98b Binary files /dev/null and b/docs/img/new_ui/dev/datasource/snowflake.png differ diff --git a/dolphinscheduler-bom/pom.xml b/dolphinscheduler-bom/pom.xml index a0ac6be911..09955f7725 100644 --- a/dolphinscheduler-bom/pom.xml +++ b/dolphinscheduler-bom/pom.xml @@ -107,6 +107,7 @@ 1.12.300 2.21.0 1.0.0-beta.19 + 3.13.10 2.18.0 2.8.0 6.0.0 @@ -865,6 +866,12 @@ casdoor-spring-boot-starter ${casdoor.version} + + + net.snowflake + snowflake-jdbc + ${snowflake-jdbc.version} + diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/DataSourceConstants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/DataSourceConstants.java index 3812759e3d..25ea5bea48 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/DataSourceConstants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/constants/DataSourceConstants.java @@ -40,7 +40,7 @@ public class DataSourceConstants { public static final String COM_DAMENG_JDBC_DRIVER = "dm.jdbc.driver.DmDriver"; public static final String ORG_APACHE_KYUUBI_JDBC_DRIVER = "org.apache.kyuubi.jdbc.KyuubiHiveDriver"; public static final String COM_OCEANBASE_JDBC_DRIVER = "com.oceanbase.jdbc.Driver"; - + public static final String NET_SNOWFLAKE_JDBC_DRIVER = "net.snowflake.client.jdbc.SnowflakeDriver"; /** * validation Query */ @@ -57,6 +57,7 @@ public class DataSourceConstants { public static final String ATHENA_VALIDATION_QUERY = "select 1"; public static final String TRINO_VALIDATION_QUERY = "select 1"; public static final String DAMENG_VALIDATION_QUERY = "select 1"; + public static final String SNOWFLAKE_VALIDATION_QUERY = "select 1"; public static final String KYUUBI_VALIDATION_QUERY = "select 1"; @@ -80,6 +81,7 @@ public class DataSourceConstants { public static final String JDBC_TRINO = "jdbc:trino://"; public static final String JDBC_DAMENG = "jdbc:dm://"; public static final String JDBC_OCEANBASE = "jdbc:oceanbase://"; + public static final String JDBC_SNOWFLAKE = "jdbc:snowflake://"; /** * database type diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml index 21eacfe9f4..e84fab766f 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-all/pom.xml @@ -112,6 +112,11 @@ dolphinscheduler-datasource-dameng ${project.version} + + org.apache.dolphinscheduler + dolphinscheduler-datasource-snowflake + ${project.version} + org.apache.dolphinscheduler dolphinscheduler-datasource-ssh diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/pom.xml b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/pom.xml new file mode 100644 index 0000000000..26cb50c884 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/pom.xml @@ -0,0 +1,50 @@ + + + + 4.0.0 + + org.apache.dolphinscheduler + dolphinscheduler-datasource-plugin + dev-SNAPSHOT + + + dolphinscheduler-datasource-snowflake + jar + ${project.artifactId} + + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + provided + + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + + + + net.snowflake + snowflake-jdbc + + + diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannel.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannel.java new file mode 100644 index 0000000000..da21967f4b --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannel.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceClient; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class SnowflakeDataSourceChannel implements DataSourceChannel { + + @Override + public DataSourceClient createDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + return new SnowflakeDataSourceClient(baseConnectionParam, dbType); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java new file mode 100644 index 0000000000..0d0c97ecd6 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactory.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannelFactory; + +import com.google.auto.service.AutoService; + +@AutoService(DataSourceChannelFactory.class) +public class SnowflakeDataSourceChannelFactory implements DataSourceChannelFactory { + + @Override + public String getName() { + return "snowflake"; + } + + @Override + public DataSourceChannel create() { + return new SnowflakeDataSourceChannel(); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceClient.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceClient.java new file mode 100644 index 0000000000..2649cc0c4d --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceClient.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake; + +import org.apache.dolphinscheduler.plugin.datasource.api.client.CommonDataSourceClient; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class SnowflakeDataSourceClient extends CommonDataSourceClient { + + public SnowflakeDataSourceClient(BaseConnectionParam baseConnectionParam, DbType dbType) { + super(baseConnectionParam, dbType); + } + +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeConnectionParam.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeConnectionParam.java new file mode 100644 index 0000000000..9a7ac81ac1 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeConnectionParam.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake.param; + +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; + +public class SnowflakeConnectionParam extends BaseConnectionParam { + + @Override + public String toString() { + return "SnowFlakeConnectionParam{" + + "user='" + user + '\'' + + ", password='" + password + '\'' + + ", address='" + address + '\'' + + ", database='" + database + '\'' + + ", jdbcUrl='" + jdbcUrl + '\'' + + ", driverLocation='" + driverLocation + '\'' + + ", driverClassName='" + driverClassName + '\'' + + ", validationQuery='" + validationQuery + '\'' + + ", other='" + other + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDatasourceParamDTO.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDatasourceParamDTO.java new file mode 100644 index 0000000000..27a72a45cd --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDatasourceParamDTO.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake.param; + +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.spi.enums.DbType; + +public class SnowflakeDatasourceParamDTO extends BaseDataSourceParamDTO { + + @Override + public String toString() { + return "SnowFlakeDatasourceParamDTO{" + + "name='" + name + '\'' + + ", note='" + note + '\'' + + ", host='" + host + '\'' + + ", port=" + port + + ", database='" + database + '\'' + + ", userName='" + userName + '\'' + + ", password='" + password + '\'' + + ", other='" + other + '\'' + + '}'; + } + + @Override + public DbType getType() { + return DbType.SNOWFLAKE; + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDatasourceProcessor.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDatasourceProcessor.java new file mode 100644 index 0000000000..02229ed6b3 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/main/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDatasourceProcessor.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake.param; + +import org.apache.dolphinscheduler.common.constants.Constants; +import org.apache.dolphinscheduler.common.constants.DataSourceConstants; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.AbstractDataSourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.BaseDataSourceParamDTO; +import org.apache.dolphinscheduler.plugin.datasource.api.datasource.DataSourceProcessor; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.apache.commons.collections4.MapUtils; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; + +import lombok.extern.slf4j.Slf4j; + +import com.google.auto.service.AutoService; +@AutoService(DataSourceProcessor.class) +@Slf4j +public class SnowflakeDatasourceProcessor extends AbstractDataSourceProcessor { + + @Override + public BaseDataSourceParamDTO castDatasourceParamDTO(String paramJson) { + return JSONUtils.parseObject(paramJson, SnowflakeDatasourceParamDTO.class); + } + + @Override + public BaseDataSourceParamDTO createDatasourceParamDTO(String connectionJson) { + SnowflakeConnectionParam snowFlakeConnectionParam = + (SnowflakeConnectionParam) createConnectionParams(connectionJson); + String[] tmpArray = snowFlakeConnectionParam.getAddress().split(Constants.DOUBLE_SLASH); + StringBuilder hosts = new StringBuilder(); + String[] hostPortArray = tmpArray[tmpArray.length - 1].split(Constants.COMMA); + for (String hostPort : hostPortArray) { + hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA); + } + hosts.deleteCharAt(hosts.length() - 1); + SnowflakeDatasourceParamDTO snowflakeDatasourceParamDTO = new SnowflakeDatasourceParamDTO(); + snowflakeDatasourceParamDTO.setPort(Integer.parseInt(hostPortArray[0].split(Constants.COLON)[1])); + snowflakeDatasourceParamDTO.setHost(hosts.toString()); + snowflakeDatasourceParamDTO.setDatabase(snowFlakeConnectionParam.getDatabase()); + snowflakeDatasourceParamDTO.setUserName(snowFlakeConnectionParam.getUser()); + snowflakeDatasourceParamDTO.setOther(snowFlakeConnectionParam.getOther()); + + return snowflakeDatasourceParamDTO; + } + + @Override + public BaseConnectionParam createConnectionParams(BaseDataSourceParamDTO datasourceParam) { + SnowflakeDatasourceParamDTO snowflakeParam = (SnowflakeDatasourceParamDTO) datasourceParam; + StringBuilder address = new StringBuilder(); + address.append(DataSourceConstants.JDBC_SNOWFLAKE); + for (String zkHost : datasourceParam.getHost().split(",")) { + address.append(String.format("%s:%s,", zkHost, datasourceParam.getPort())); + } + address.deleteCharAt(address.length() - 1); + String jdbcUrl = address.toString() + "/" + datasourceParam.getDatabase(); + SnowflakeConnectionParam snowFlakeConnectionParam = new SnowflakeConnectionParam(); + snowFlakeConnectionParam.setUser(snowflakeParam.getUserName()); + snowFlakeConnectionParam.setPassword(PasswordUtils.encodePassword(snowflakeParam.getPassword())); + snowFlakeConnectionParam.setOther(snowflakeParam.getOther()); + snowFlakeConnectionParam.setAddress(address.toString()); + snowFlakeConnectionParam.setJdbcUrl(jdbcUrl); + snowFlakeConnectionParam.setDriverClassName(getDatasourceDriver()); + snowFlakeConnectionParam.setValidationQuery(getValidationQuery()); + return snowFlakeConnectionParam; + } + + @Override + public ConnectionParam createConnectionParams(String connectionJson) { + return JSONUtils.parseObject(connectionJson, SnowflakeConnectionParam.class); + } + + @Override + public String getDatasourceDriver() { + return DataSourceConstants.NET_SNOWFLAKE_JDBC_DRIVER; + } + + @Override + public String getValidationQuery() { + return DataSourceConstants.SNOWFLAKE_VALIDATION_QUERY; + } + + @Override + public String getJdbcUrl(ConnectionParam connectionParam) { + SnowflakeConnectionParam snowFlakeConnectionParam = (SnowflakeConnectionParam) connectionParam; + if (MapUtils.isNotEmpty(snowFlakeConnectionParam.getOther())) { + return String.format("%s?%s", snowFlakeConnectionParam.getJdbcUrl(), snowFlakeConnectionParam.getOther()); + } + return snowFlakeConnectionParam.getJdbcUrl(); + } + + @Override + public Connection getConnection(ConnectionParam connectionParam) throws ClassNotFoundException, SQLException { + SnowflakeConnectionParam snowFlakeConnectionParam = (SnowflakeConnectionParam) connectionParam; + Class.forName(getDatasourceDriver()); + return DriverManager.getConnection(getJdbcUrl(connectionParam), + snowFlakeConnectionParam.getUser(), + PasswordUtils.decodePassword(snowFlakeConnectionParam.getPassword())); + } + @Override + public DataSourceProcessor create() { + return new SnowflakeDatasourceProcessor(); + } + @Override + public DbType getDbType() { + return DbType.SNOWFLAKE; + } + +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactoryTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactoryTest.java new file mode 100644 index 0000000000..1c0b6ff472 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelFactoryTest.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake; + +import org.apache.dolphinscheduler.spi.datasource.DataSourceChannel; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +public class SnowflakeDataSourceChannelFactoryTest { + + @Test + public void testCreate() { + SnowflakeDataSourceChannelFactory sourceChannelFactory = new SnowflakeDataSourceChannelFactory(); + DataSourceChannel dataSourceChannel = sourceChannelFactory.create(); + Assertions.assertNotNull(dataSourceChannel); + } + + @Test + public void testGetName() { + SnowflakeDataSourceChannelFactory sourceChannelFactory = new SnowflakeDataSourceChannelFactory(); + Assertions.assertEquals(sourceChannelFactory.getName(), "snowflake"); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelTest.java new file mode 100644 index 0000000000..42e5f056b2 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceChannelTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake; + +import org.apache.dolphinscheduler.plugin.datasource.snowflake.param.SnowflakeConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class SnowflakeDataSourceChannelTest { + + @Test + public void testCreateDataSourceClient() { + SnowflakeDataSourceChannel sourceChannel = Mockito.mock(SnowflakeDataSourceChannel.class); + SnowflakeDataSourceClient dataSourceClient = Mockito.mock(SnowflakeDataSourceClient.class); + Mockito.when(sourceChannel.createDataSourceClient(Mockito.any(), Mockito.any())).thenReturn(dataSourceClient); + Assertions + .assertNotNull(sourceChannel.createDataSourceClient(new SnowflakeConnectionParam(), DbType.SNOWFLAKE)); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceClientTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceClientTest.java new file mode 100644 index 0000000000..b7da5385f9 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/SnowflakeDataSourceClientTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake; + +import java.sql.Connection; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class SnowflakeDataSourceClientTest { + + @Mock + private SnowflakeDataSourceClient snowflakeDataSourceClient; + + @Test + public void testCheckClient() { + snowflakeDataSourceClient.checkClient(); + Mockito.verify(snowflakeDataSourceClient).checkClient(); + } + + @Test + public void testGetConnection() { + Connection connection = Mockito.mock(Connection.class); + Mockito.when(snowflakeDataSourceClient.getConnection()).thenReturn(connection); + Assertions.assertNotNull(snowflakeDataSourceClient.getConnection()); + + } + +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java new file mode 100644 index 0000000000..501c556c59 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/param/SnowflakeDataSourceProcessorTest.java @@ -0,0 +1,187 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake.param; + +import org.apache.dolphinscheduler.common.constants.DataSourceConstants; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; +import org.apache.dolphinscheduler.plugin.datasource.api.utils.PasswordUtils; +import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.MockedStatic; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class SnowflakeDataSourceProcessorTest { + + private SnowflakeDatasourceProcessor snowflakeDataSourceProcessor = new SnowflakeDatasourceProcessor(); + + @Test + public void testCheckDatasourceParam() { + SnowflakeDatasourceParamDTO snowflakeDatasourceParamDTO = new SnowflakeDatasourceParamDTO(); + snowflakeDatasourceParamDTO.setHost("localhost"); + snowflakeDatasourceParamDTO.setDatabase("default"); + Map other = new HashMap<>(); + other.put("serverTimezone", "Asia/Shanghai"); + snowflakeDatasourceParamDTO.setOther(other); + DataSourceUtils.checkDatasourceParam(snowflakeDatasourceParamDTO); + Assertions.assertTrue(true); + } + + @Test + public void testBuildConnectionParams() { + SnowflakeDatasourceParamDTO snowflakeDatasourceParamDTO = new SnowflakeDatasourceParamDTO(); + snowflakeDatasourceParamDTO.setHost("localhost"); + snowflakeDatasourceParamDTO.setDatabase("default"); + snowflakeDatasourceParamDTO.setUserName("root"); + snowflakeDatasourceParamDTO.setPort(3306); + snowflakeDatasourceParamDTO.setPassword("123456"); + try ( + MockedStatic mockedStaticPasswordUtils = Mockito.mockStatic(PasswordUtils.class); + MockedStatic mockedStaticCommonUtils = Mockito.mockStatic(CommonUtils.class)) { + mockedStaticPasswordUtils.when(() -> PasswordUtils.encodePassword(Mockito.anyString())) + .thenReturn("123456"); + mockedStaticCommonUtils.when(CommonUtils::getKerberosStartupState).thenReturn(false); + ConnectionParam connectionParam = DataSourceUtils.buildConnectionParams(snowflakeDatasourceParamDTO); + Assertions.assertNotNull(connectionParam); + } + } + + @Test + public void testBuildConnectionParams2() { + SnowflakeDatasourceParamDTO snowflakeDatasourceParamDTO = new SnowflakeDatasourceParamDTO(); + snowflakeDatasourceParamDTO.setHost("localhost"); + snowflakeDatasourceParamDTO.setDatabase("default"); + snowflakeDatasourceParamDTO.setUserName("root"); + snowflakeDatasourceParamDTO.setPort(3306); + snowflakeDatasourceParamDTO.setPassword("123456"); + ConnectionParam connectionParam = + DataSourceUtils.buildConnectionParams(DbType.SNOWFLAKE, + JSONUtils.toJsonString(snowflakeDatasourceParamDTO)); + Assertions.assertNotNull(connectionParam); + } + + @Test + public void testCreateConnectionParams() { + Map props = new HashMap<>(); + props.put("serverTimezone", "utc"); + SnowflakeDatasourceParamDTO snowflakeDataSourceParamDTO = new SnowflakeDatasourceParamDTO(); + snowflakeDataSourceParamDTO.setHost("localhost1,localhost2"); + snowflakeDataSourceParamDTO.setPort(5142); + snowflakeDataSourceParamDTO.setUserName("default"); + snowflakeDataSourceParamDTO.setDatabase("default"); + snowflakeDataSourceParamDTO.setOther(props); + try ( + MockedStatic mockedStaticPasswordUtils = Mockito.mockStatic(PasswordUtils.class); + MockedStatic mockedStaticCommonUtils = Mockito.mockStatic(CommonUtils.class)) { + mockedStaticPasswordUtils.when(() -> PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("test"); + SnowflakeConnectionParam connectionParams = (SnowflakeConnectionParam) snowflakeDataSourceProcessor + .createConnectionParams(snowflakeDataSourceParamDTO); + Assertions.assertNotNull(connectionParams); + Assertions.assertEquals("jdbc:snowflake://localhost1:5142,localhost2:5142", connectionParams.getAddress()); + } + } + + @Test + public void testCreateConnectionParams2() { + String connectionParam = + "{\"user\":\"default\",\"address\":\"jdbc:snowflake://localhost1:5142,localhost2:5142\"" + + ",\"jdbcUrl\":\"jdbc:snowflake://localhost1:5142,localhost2:5142/default\"}"; + SnowflakeConnectionParam connectionParams = (SnowflakeConnectionParam) snowflakeDataSourceProcessor + .createConnectionParams(connectionParam); + Assertions.assertNotNull(connectionParam); + Assertions.assertEquals("default", connectionParams.getUser()); + } + + @Test + public void testGetDatasourceDriver() { + Assertions.assertEquals(DataSourceConstants.NET_SNOWFLAKE_JDBC_DRIVER, + snowflakeDataSourceProcessor.getDatasourceDriver()); + } + + @Test + public void testGetJdbcUrl() { + SnowflakeConnectionParam connectionParam = new SnowflakeConnectionParam(); + connectionParam.setJdbcUrl("jdbc:snowflake://localhost1:5142,localhost2:5142/default"); + Assertions.assertEquals("jdbc:snowflake://localhost1:5142,localhost2:5142/default", + snowflakeDataSourceProcessor.getJdbcUrl(connectionParam)); + } + + @Test + public void testGetDbType() { + Assertions.assertEquals(DbType.SNOWFLAKE, snowflakeDataSourceProcessor.getDbType()); + } + + @Test + public void testGetValidationQuery() { + Assertions.assertEquals(DataSourceConstants.SNOWFLAKE_VALIDATION_QUERY, + snowflakeDataSourceProcessor.getValidationQuery()); + } + + @Test + public void testGetDatasourceUniqueId() { + SnowflakeConnectionParam connectionParam = new SnowflakeConnectionParam(); + connectionParam.setJdbcUrl("jdbc:snowflake://localhost:3306/default"); + connectionParam.setUser("root"); + connectionParam.setPassword("123456"); + try (MockedStatic mockedPasswordUtils = Mockito.mockStatic(PasswordUtils.class)) { + Mockito.when(PasswordUtils.encodePassword(Mockito.anyString())).thenReturn("123456"); + Assertions.assertEquals("snowflake@root@123456@jdbc:snowflake://localhost:3306/default", + snowflakeDataSourceProcessor.getDatasourceUniqueId(connectionParam, DbType.SNOWFLAKE)); + } + } + + @Test + public void testCreateDatasourceParamDTO() { + String connectionParam = + "{\"user\":\"default\",\"address\":\"jdbc:snowflake://localhost1:5142,localhost2:5142\"" + + ",\"jdbcUrl\":\"jdbc:snowflake://localhost1:5142,localhost2:5142/default\"}"; + SnowflakeDatasourceParamDTO snowflakeDatasourceParamDTO = + (SnowflakeDatasourceParamDTO) snowflakeDataSourceProcessor + .createDatasourceParamDTO(connectionParam); + Assertions.assertEquals("default", snowflakeDatasourceParamDTO.getUserName()); + } + + @Test + public void testDbType() { + Assertions.assertEquals(DbType.SNOWFLAKE.getCode(), 20); + Assertions.assertEquals(DbType.SNOWFLAKE.getDescp(), "snowflake"); + Assertions.assertEquals(DbType.of(20), DbType.SNOWFLAKE); + Assertions.assertEquals(DbType.ofName("SNOWFLAKE"), DbType.SNOWFLAKE); + } + + @Test + public void testBuildString() { + SnowflakeDatasourceParamDTO snowflakeDatasourceParamDT = new SnowflakeDatasourceParamDTO(); + snowflakeDatasourceParamDT.setHost("localhost"); + snowflakeDatasourceParamDT.setDatabase("default"); + snowflakeDatasourceParamDT.setUserName("root"); + snowflakeDatasourceParamDT.setPort(3306); + snowflakeDatasourceParamDT.setPassword("123456"); + Assertions.assertNotNull(snowflakeDatasourceParamDT.toString()); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/provider/SnowflakeJDBCDataSourceProviderTest.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/provider/SnowflakeJDBCDataSourceProviderTest.java new file mode 100644 index 0000000000..69dc1c0f56 --- /dev/null +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-snowflake/src/test/java/org/apache/dolphinscheduler/plugin/datasource/snowflake/provider/SnowflakeJDBCDataSourceProviderTest.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.datasource.snowflake.provider; + +import org.apache.dolphinscheduler.plugin.datasource.api.provider.JDBCDataSourceProvider; +import org.apache.dolphinscheduler.plugin.datasource.snowflake.param.SnowflakeConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.MockedStatic; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.zaxxer.hikari.HikariDataSource; + +@ExtendWith(MockitoExtension.class) +public class SnowflakeJDBCDataSourceProviderTest { + + @Test + public void testCreateJdbcDataSource() { + try ( + MockedStatic mockedJDBCDataSourceProvider = + Mockito.mockStatic(JDBCDataSourceProvider.class)) { + HikariDataSource dataSource = Mockito.mock(HikariDataSource.class); + mockedJDBCDataSourceProvider + .when(() -> JDBCDataSourceProvider.createJdbcDataSource(Mockito.any(), Mockito.any())) + .thenReturn(dataSource); + Assertions.assertNotNull( + JDBCDataSourceProvider.createJdbcDataSource(new SnowflakeConnectionParam(), DbType.SNOWFLAKE)); + } + } + + @Test + public void testCreateOneSessionJdbcDataSource() { + try ( + MockedStatic mockedJDBCDataSourceProvider = + Mockito.mockStatic(JDBCDataSourceProvider.class)) { + HikariDataSource dataSource = Mockito.mock(HikariDataSource.class); + mockedJDBCDataSourceProvider + .when(() -> JDBCDataSourceProvider.createOneSessionJdbcDataSource(Mockito.any(), Mockito.any())) + .thenReturn(dataSource); + Assertions.assertNotNull( + JDBCDataSourceProvider.createOneSessionJdbcDataSource(new SnowflakeConnectionParam(), + DbType.SNOWFLAKE)); + } + } + +} diff --git a/dolphinscheduler-datasource-plugin/pom.xml b/dolphinscheduler-datasource-plugin/pom.xml index 4e81fea5c1..332bab306a 100644 --- a/dolphinscheduler-datasource-plugin/pom.xml +++ b/dolphinscheduler-datasource-plugin/pom.xml @@ -49,6 +49,7 @@ dolphinscheduler-datasource-dameng dolphinscheduler-datasource-ssh dolphinscheduler-datasource-databend + dolphinscheduler-datasource-snowflake diff --git a/dolphinscheduler-dist/release-docs/licenses/LICENSE-snowflake-jdbc.txt b/dolphinscheduler-dist/release-docs/licenses/LICENSE-snowflake-jdbc.txt new file mode 100644 index 0000000000..96cd5d8186 --- /dev/null +++ b/dolphinscheduler-dist/release-docs/licenses/LICENSE-snowflake-jdbc.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright (c) 2013-2018 Snowflake Computing, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java index 9e8dc067f0..4360b8cfd0 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java @@ -47,7 +47,8 @@ public enum DbType { OCEANBASE(16, "oceanbase"), SSH(17, "ssh"), KYUUBI(18, "kyuubi"), - DATABEND(19, "databend"); + DATABEND(19, "databend"), + SNOWFLAKE(20, "snowflake"); private static final Map DB_TYPE_MAP = Arrays.stream(DbType.values()).collect(toMap(DbType::getCode, Functions.identity())); diff --git a/dolphinscheduler-ui/src/locales/en_US/datasource.ts b/dolphinscheduler-ui/src/locales/en_US/datasource.ts index f001dc145a..d054714410 100644 --- a/dolphinscheduler-ui/src/locales/en_US/datasource.ts +++ b/dolphinscheduler-ui/src/locales/en_US/datasource.ts @@ -23,6 +23,8 @@ export default { search_input_tips: 'Please input the keywords', datasource_name: 'Datasource Name', datasource_name_tips: 'Please enter datasource name', + datawarehouse: 'Datawarehouse', + datawarehouse_tips: 'Please enter datawarehouse', datasource_user_name: 'Owner', datasource_type: 'Datasource Type', datasource_parameter: 'Datasource Parameter', diff --git a/dolphinscheduler-ui/src/locales/zh_CN/datasource.ts b/dolphinscheduler-ui/src/locales/zh_CN/datasource.ts index 702ad9d6dd..e3daf608fa 100644 --- a/dolphinscheduler-ui/src/locales/zh_CN/datasource.ts +++ b/dolphinscheduler-ui/src/locales/zh_CN/datasource.ts @@ -23,6 +23,8 @@ export default { search_input_tips: '请输入关键字', datasource_name: '源名称', datasource_name_tips: '请输入数据源名称', + datawarehouse: '数据仓库', + datawarehouse_tips: '请输入数据仓库', datasource_user_name: '所属用户', datasource_type: '源类型', datasource_parameter: '参数', diff --git a/dolphinscheduler-ui/src/service/modules/data-source/types.ts b/dolphinscheduler-ui/src/service/modules/data-source/types.ts index 3b4246369c..ab3dc1ed07 100644 --- a/dolphinscheduler-ui/src/service/modules/data-source/types.ts +++ b/dolphinscheduler-ui/src/service/modules/data-source/types.ts @@ -34,6 +34,7 @@ type IDataBase = | 'OCEANBASE' | 'SSH' | 'DATABEND' + | 'SNOWFLAKE' type IDataBaseLabel = | 'MYSQL' @@ -80,6 +81,7 @@ interface IDataSource { dbUser?: string compatibleMode?: string publicKey?: string + datawarehouse?: string } interface ListReq { diff --git a/dolphinscheduler-ui/src/views/datasource/list/detail.tsx b/dolphinscheduler-ui/src/views/datasource/list/detail.tsx index 8b5d6d2cf5..6888dbfb5b 100644 --- a/dolphinscheduler-ui/src/views/datasource/list/detail.tsx +++ b/dolphinscheduler-ui/src/views/datasource/list/detail.tsx @@ -556,6 +556,21 @@ const DetailModal = defineComponent({ placeholder={t('datasource.database_name_tips')} /> + {detailForm.type === 'SNOWFLAKE' && ( + + + + )}