From 4ba6d667fc9a5d1fa195b91b9b1f655911274f67 Mon Sep 17 00:00:00 2001 From: zixi0825 <649790970@qq.com> Date: Thu, 27 Jan 2022 15:55:45 +0800 Subject: [PATCH] [Feature][DataQuality] Add Data quality Module #4283 (#6718) * add data quality module * add license * add package configuration in dist pom * fix license and jar import bug * replace apache/skywalking-eyes@9bd5feb SHA * refacotr jbdc-connector and writer * modify parameter name in HiveConnector * fix checkstyle error * fix checkstyle error in dolphinschesuler-dist * fix checkstyle error in dolphinschesuler-dist * fix checkstyle error in dolphinschesuler-dist * fix duplicate code bug * fix code style bug * fix code smells * add dq relevant enums and parameter * replace apache/skywalking-eyes@9bd5feb SHA * fix Constants bug * remove the unused class * add unit test * fix code style error * add unit test * refactor data quality common entity * fix code style error * add unit test * close e2e test * fix code smell bug * modify dataquality enum value to 14 in TaskType * add data qualtiy task * update * add getDatasourceOptions interface * fix checkstyle * close e2e test * add data quality task ui * update skywalking-eyes SHA * fix style * fix eslint error * fix eslint error * test e2e * add unit test and alter dataquality task result * fix checkstyle * fix process service test error * add unit test and fix code smells * fix checkstyle error * fix unit test error * fix checkstyle error * change execute sql type name * revert ui pom.xml * fix data quality task error * fix checkstyle error * fix dq task src_connector_type ui select bug * fix spark rw postgresql bug * change mysql driver scope * fix form-create json bug * fix code smell * fix DolphinException Bug * fix ui validate rule and Alert title * fix target connection param bug * fix threshold validate change * add rule input entry index * change statistic_comparison_check logic * remove check type change * add DateExpressionReplaceUtil * fix null point expetion * fix null point expetion * fix test error * add more sql driver * fix test error and remove DateExprReplaceUtil * add get datasource tables and columns * add get datasource tables and columns * remove hive-jdbc in pom.xml * fix code smells * update sql * change the pom.xml * optimize multi_table_accuracy ui * fix v-show error * fix code smells * update sql * [Feature][DataQuality] Add data quality task ui (#5054) * add data quality task ui * update skywalking-eyes SHA * fix style * fix eslint error * fix eslint error * test e2e * fix dq task src_connector_type ui select bug * fix threshold validate change * remove check type change * add get datasource tables and columns * optimize multi_table_accuracy ui * fix v-show error * fix code smells Co-authored-by: sunchaohe * [Feature][DataQuality] Add data quality module (#4830) * add data quality module * add license * add package configuration in dist pom * fix license and jar import bug * replace apache/skywalking-eyes@9bd5feb SHA * refacotr jbdc-connector and writer * modify parameter name in HiveConnector * fix checkstyle error * fix checkstyle error in dolphinschesuler-dist * fix checkstyle error in dolphinschesuler-dist * fix checkstyle error in dolphinschesuler-dist * fix duplicate code bug * fix code style bug * fix code smells * update * close e2e test * fix spark rw postgresql bug * change mysql driver scope * add more sql driver * remove hive-jdbc in pom.xml * change the pom.xml Co-authored-by: sunchaohe * [Feature][DataQuality] Add data quality task backend (#4883) * add dq relevant enums and parameter * replace apache/skywalking-eyes@9bd5feb SHA Co-authored-by: sunchaohe * refactor data_quality_module * add header license * data quality module refactor * fix unit test error * fix checkstyle error * fix unit test error * fix checkstyle error * fix unit test error * fix code smell * fix check style * fix unit test error * task statistics value add unique code * fix unit test error * fix checkstyle error * fix checkstyle * fix security hotspot * fix unit test error * fix security hotspot * fix check * add data quality task error handling * fix unit test error * add unit test * add unit test * optimize data quality result alert * fix unit test * fix sql script error * fix bug * update sql script * fix checkstyle * add license * fix checkstyle * fix checkstyle * fix unit test * add jacoco dependencies * fix unit test * fix unit test * add jacoco dependencies * add unit test * add unit test * add license * fix checkstyle * fix pom * fix checkstyle * fix checkstyle * merge dev * fix ui error * fix pom error * fix pom error * fix test error * fix test error * mssql-jdbc exclude azure-keyvault * fix test error * merge dev and add unit test * add notes * rollback the CollectionUtils * fix * update sql * fix * fix * fix query rule page error * change dq.jar path * fix sql error * fix ui error * fix(dq): jar path&task enum description * add notes on DataQualityApplication * fix dq result jump error * fix(ui): page condition * feat(ui): add show error output path * change version * remove all chinese word in sql * merge Co-authored-by: sunchaohe --- .../api/controller/DataQualityController.java | 192 +++ .../api/controller/DataSourceController.java | 28 + .../api/dto/RuleDefinition.java | 63 + .../dolphinscheduler/api/enums/Status.java | 9 +- .../api/service/DataSourceService.java | 15 + .../api/service/DqExecuteResultService.java | 35 + .../api/service/DqRuleService.java | 42 + .../service/impl/DataSourceServiceImpl.java | 197 ++- .../impl/DqExecuteResultServiceImpl.java | 101 ++ .../api/service/impl/DqRuleServiceImpl.java | 340 +++++ .../main/resources/i18n/messages.properties | 15 +- .../resources/i18n/messages_en_US.properties | 13 +- .../resources/i18n/messages_zh_CN.properties | 10 + .../controller/DataQualityControllerTest.java | 180 +++ .../exceptions/ApiExceptionHandlerTest.java | 6 +- .../service/DqExecuteResultServiceTest.java | 96 ++ .../api/service/DqRuleServiceTest.java | 237 ++++ .../dolphinscheduler/common/Constants.java | 22 + .../common/enums/TaskType.java | 5 +- .../common/task/dq/DataQualityParameters.java | 103 ++ .../common/utils/JSONUtils.java | 27 +- .../common/utils/TaskParametersUtils.java | 3 + .../src/main/resources/common.properties | 7 + .../common/task/DataQualityParameterTest.java | 132 ++ .../dao/entity/DqComparisonType.java | 151 +++ .../dao/entity/DqExecuteResult.java | 389 ++++++ .../entity/DqExecuteResultAlertContent.java | 257 ++++ .../dolphinscheduler/dao/entity/DqRule.java | 147 +++ .../dao/entity/DqRuleExecuteSql.java | 156 +++ .../dao/entity/DqRuleInputEntry.java | 300 +++++ .../dao/entity/DqTaskStatisticsValue.java | 222 ++++ .../dao/entity/TaskAlertContent.java | 156 +++ .../dao/mapper/DqComparisonTypeMapper.java | 29 + .../dao/mapper/DqExecuteResultMapper.java | 59 + .../dao/mapper/DqRuleExecuteSqlMapper.java | 39 + .../dao/mapper/DqRuleInputEntryMapper.java | 39 + .../dao/mapper/DqRuleMapper.java | 48 + .../mapper/DqTaskStatisticsValueMapper.java | 29 + .../dao/utils/DqRuleUtils.java | 57 + .../dao/mapper/DqComparisonTypeMapper.xml | 22 + .../dao/mapper/DqExecuteResultMapper.xml | 105 ++ .../dao/mapper/DqExecuteSqlMapper.xml | 27 + .../dao/mapper/DqRuleInputEntryMapper.xml | 43 + .../dao/mapper/DqRuleMapper.xml | 37 + .../mapper/DqTaskStatisticsValueMapper.xml | 22 + .../resources/sql/dolphinscheduler_h2.sql | 778 +++++++++++ .../resources/sql/dolphinscheduler_mysql.sql | 778 +++++++++++ .../sql/dolphinscheduler_postgresql.sql | 786 ++++++++++- .../mysql/dolphinscheduler_ddl.sql | 144 +- .../mysql/dolphinscheduler_dml.sql | 636 +++++++++ .../postgresql/dolphinscheduler_ddl.sql | 111 ++ .../postgresql/dolphinscheduler_dml.sql | 660 ++++++++++ dolphinscheduler-data-quality/pom.xml | 203 +++ .../data/quality/Constants.java | 62 + .../data/quality/DataQualityApplication.java | 72 + .../data/quality/config/BaseConfig.java | 66 + .../data/quality/config/Config.java | 94 ++ .../config/DataQualityConfiguration.java | 132 ++ .../data/quality/config/EnvConfig.java | 34 + .../data/quality/config/IConfig.java | 29 + .../data/quality/config/ReaderConfig.java | 32 + .../quality/config/TransformerConfig.java | 32 + .../data/quality/config/ValidateResult.java | 46 + .../data/quality/config/WriterConfig.java | 32 + .../quality/context/DataQualityContext.java | 67 + .../data/quality/enums/ReaderType.java | 40 + .../data/quality/enums/TransformerType.java | 38 + .../data/quality/enums/WriterType.java | 40 + .../exception/ConfigRuntimeException.java | 40 + .../exception/DataQualityException.java | 57 + .../data/quality/execution/Execution.java | 35 + .../execution/SparkBatchExecution.java | 132 ++ .../execution/SparkRuntimeEnvironment.java | 72 + .../data/quality/flow/Component.java | 56 + .../data/quality/flow/batch/BatchReader.java | 37 + .../quality/flow/batch/BatchTransformer.java | 38 + .../data/quality/flow/batch/BatchWriter.java | 37 + .../quality/flow/batch/reader/HiveReader.java | 69 + .../quality/flow/batch/reader/JdbcReader.java | 95 ++ .../flow/batch/reader/ReaderFactory.java | 76 ++ .../batch/transformer/SqlTransformer.java | 62 + .../batch/transformer/TransformerFactory.java | 72 + .../quality/flow/batch/writer/JdbcWriter.java | 87 ++ .../flow/batch/writer/WriterFactory.java | 81 ++ .../batch/writer/file/BaseFileWriter.java | 131 ++ .../batch/writer/file/HdfsFileWriter.java | 47 + .../batch/writer/file/LocalFileWriter.java | 47 + .../data/quality/utils/ConfigUtils.java | 56 + .../data/quality/utils/JsonUtils.java | 71 + .../src/main/resources/log4j.properties | 22 + .../quality/SparkApplicationTestBase.java | 46 + .../ConfigurationParserTest.java | 61 + .../data/quality/flow/FlowTestBase.java | 45 + .../quality/flow/reader/JdbcReaderTest.java | 99 ++ .../flow/reader/ReaderFactoryTest.java | 70 + .../quality/flow/writer/JdbcWriterTest.java | 101 ++ .../flow/writer/WriterFactoryTest.java | 54 + .../data/quality/utils/ConfigUtilsTest.java | 46 + .../datasource/api/utils/CommonUtils.java | 12 +- .../datasource/api/utils/DataSourceUtils.java | 14 + dolphinscheduler-dist/pom.xml | 5 + .../builder/TaskExecutionContextBuilder.java | 12 + .../processor/queue/TaskResponseService.java | 11 +- .../master/runner/task/BaseTaskProcessor.java | 236 +++- .../utils/DataQualityResultOperator.java | 196 +++ .../TaskResponseProcessorTestConfig.java | 35 + dolphinscheduler-server/pom.xml | 4 + .../service/alert/ProcessAlertManager.java | 87 ++ .../service/process/ProcessService.java | 99 +- .../queue/entity/TaskExecutionContext.java | 15 + .../service/process/ProcessServiceTest.java | 127 +- .../dolphinscheduler/spi/enums/DbType.java | 3 + .../spi/params/base/FormType.java | 20 +- .../spi/params/base/ParamsOptions.java | 2 + .../spi/params/base/PluginParams.java | 14 +- .../spi/params/group/GroupParam.java | 98 ++ .../spi/params/group/GroupParamsProps.java | 55 + .../spi/params/input/InputParam.java | 37 +- .../spi/params/select/SelectParam.java | 44 +- .../spi/task/TaskConstants.java | 25 +- .../spi/task/dq/enums/CheckType.java | 71 + .../spi/task/dq/enums/ConnectorType.java | 67 + .../spi/task/dq/enums/DqFailureStrategy.java | 64 + .../spi/task/dq/enums/DqTaskState.java | 69 + .../spi/task/dq/enums/ExecuteSqlType.java | 69 + .../spi/task/dq/enums/InputType.java | 71 + .../spi/task/dq/enums/OperatorType.java | 75 ++ .../spi/task/dq/enums/OptionSourceType.java | 71 + .../spi/task/dq/enums/RuleType.java | 71 + .../spi/task/dq/enums/ValueType.java | 71 + .../spi/task/dq/model/JdbcInfo.java | 96 ++ .../task/dq/utils/DataQualityConstants.java | 102 ++ .../spi/task/dq/utils/JdbcUrlParser.java | 112 ++ .../spi/task/dq/utils/Md5Utils.java | 54 + .../DataQualityTaskExecutionContext.java | 291 ++++ .../spi/task/request/TaskRequest.java | 10 + .../spi/utils/CollectionUtils.java | 1 - .../dolphinscheduler/spi/utils/Constants.java | 27 + .../spi/utils/StringUtils.java | 123 ++ .../spi/params/PluginParamsTransferTest.java | 69 +- .../spi/task/dq/enums/CheckTypeTest.java | 46 + .../spi/task/dq/enums/ConnectorTypeTest.java | 42 + .../task/dq/enums/DqFailureStrategyTest.java | 43 + .../spi/task/dq/enums/DqTaskStateTest.java | 46 + .../spi/task/dq/enums/ExecuteSqlTypeTest.java | 46 + .../spi/task/dq/enums/InputTypeTest.java | 49 + .../spi/task/dq/enums/OperatorTypeTest.java | 55 + .../task/dq/enums/OptionSourceTypeTest.java | 49 + .../spi/task/dq/enums/RuleTypeTest.java | 49 + .../spi/task/dq/enums/ValueTypeTest.java | 54 + .../spi/task/dq/utils/JdbcUrlParserTest.java | 44 + .../spi/task/dq/utils/Md5UtilsTest.java | 30 + .../spi/utils/StringUtilsTest.java | 119 ++ .../dolphinscheduler-task-api/pom.xml | 14 + .../dolphinscheduler-task-dataquality/pom.xml | 49 + .../plugin/task/dq/DataQualityParameters.java | 102 ++ .../plugin/task/dq/DataQualityTask.java | 191 +++ .../task/dq/DataQualityTaskChannel.java | 35 + .../dq/DataQualityTaskChannelFactory.java | 45 + .../dq/exception/DataQualityException.java | 44 + .../plugin/task/dq/rule/RuleManager.java | 134 ++ .../task/dq/rule/entity/DqRuleExecuteSql.java | 143 ++ .../task/dq/rule/entity/DqRuleInputEntry.java | 276 ++++ .../task/dq/rule/parameter/BaseConfig.java | 59 + .../parameter/DataQualityConfiguration.java | 121 ++ .../task/dq/rule/parameter/EnvConfig.java | 34 + .../task/dq/rule/parser/IRuleParser.java | 33 + .../task/dq/rule/parser/MappingColumn.java | 60 + .../parser/MultiTableAccuracyRuleParser.java | 92 ++ .../MultiTableComparisonRuleParser.java | 58 + .../SingleTableCustomSqlRuleParser.java | 74 ++ .../dq/rule/parser/SingleTableRuleParser.java | 82 ++ .../plugin/task/dq/utils/RuleParserUtils.java | 563 ++++++++ .../task/dq/utils/spark/ProgramType.java | 31 + .../task/dq/utils/spark/SparkArgsUtils.java | 129 ++ .../task/dq/utils/spark/SparkConstants.java | 73 + .../task/dq/utils/spark/SparkParameters.java | 241 ++++ .../task/dq/DataQualityParameterTest.java | 131 ++ .../plugin/task/dq/DataQualityTaskTest.java | 1169 +++++++++++++++++ dolphinscheduler-task-plugin/pom.xml | 1 + .../pages/dag/_source/canvas/taskbar.scss | 6 + .../js/conf/home/pages/dag/_source/config.js | 6 +- .../js/conf/home/pages/dag/_source/dag.scss | 0 .../pages/dag/_source/formModel/formModel.vue | 9 + .../_source/formModel/tasks/dataquality.vue | 621 +++++++++ .../_source/images/task-icos/data_quality.png | Bin 0 -> 1086 bytes .../images/task-icos/data_quality_hover.png | Bin 0 -> 1054 bytes .../pages/dag/img/toolbar_DATA_QUALITY.png | Bin 0 -> 960 bytes .../home/pages/dataquality/_source/common.js | 125 ++ .../dataquality/_source/conditions/result.vue | 138 ++ .../dataquality/_source/conditions/rule.vue | 63 + .../js/conf/home/pages/dataquality/index.vue | 31 + .../dataquality/pages/result/_source/list.vue | 133 ++ .../pages/dataquality/pages/result/index.vue | 182 +++ .../dataquality/pages/rule/_source/list.vue | 100 ++ .../pages/dataquality/pages/rule/index.vue | 176 +++ .../src/js/conf/home/router/index.js | 2 + .../js/conf/home/router/module/dataquality.js | 52 + .../src/js/conf/home/store/dag/actions.js | 54 + .../js/conf/home/store/dataquality/actions.js | 47 + .../js/conf/home/store/dataquality/getters.js | 18 + .../js/conf/home/store/dataquality/index.js | 29 + .../conf/home/store/dataquality/mutations.js | 19 + .../js/conf/home/store/dataquality/state.js | 18 + .../src/js/conf/home/store/index.js | 4 +- .../src/js/module/components/nav/nav.vue | 11 + .../components/secondaryMenu/_source/menu.js | 20 + .../src/js/module/i18n/locale/en_US.js | 80 +- .../src/js/module/i18n/locale/zh_CN.js | 80 +- dolphinscheduler-worker/pom.xml | 5 + .../TaskCallbackServiceTestConfig.java | 42 + pom.xml | 13 + 212 files changed, 20472 insertions(+), 69 deletions(-) create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java create mode 100644 dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqExecuteResultServiceTest.java create mode 100644 dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java create mode 100644 dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dq/DataQualityParameters.java create mode 100644 dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataQualityParameterTest.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskAlertContent.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java create mode 100644 dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java create mode 100644 dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml create mode 100644 dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml create mode 100644 dolphinscheduler-data-quality/pom.xml create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java create mode 100644 dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java create mode 100644 dolphinscheduler-data-quality/src/main/resources/log4j.properties create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java create mode 100644 dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java create mode 100644 dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/utils/DataQualityResultOperator.java create mode 100644 dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParam.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParamsProps.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategy.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskState.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueType.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/model/JdbcInfo.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/DataQualityConstants.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParser.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5Utils.java create mode 100644 dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/DataQualityTaskExecutionContext.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategyTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskStateTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueTypeTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParserTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5UtilsTest.java create mode 100644 dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/utils/StringUtilsTest.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameters.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/ProgramType.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkArgsUtils.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkConstants.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkParameters.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java create mode 100644 dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java mode change 100644 => 100755 dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/tasks/dataquality.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/images/task-icos/data_quality.png create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/images/task-icos/data_quality_hover.png create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DATA_QUALITY.png create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/common.js create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/result.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/rule.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/index.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/_source/list.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/index.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/_source/list.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/index.vue create mode 100644 dolphinscheduler-ui/src/js/conf/home/router/module/dataquality.js create mode 100644 dolphinscheduler-ui/src/js/conf/home/store/dataquality/actions.js create mode 100644 dolphinscheduler-ui/src/js/conf/home/store/dataquality/getters.js create mode 100644 dolphinscheduler-ui/src/js/conf/home/store/dataquality/index.js create mode 100644 dolphinscheduler-ui/src/js/conf/home/store/dataquality/mutations.js create mode 100644 dolphinscheduler-ui/src/js/conf/home/store/dataquality/state.js mode change 100644 => 100755 dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java new file mode 100644 index 0000000000..312d02fe23 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataQualityController.java @@ -0,0 +1,192 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.controller; + +import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_OPTIONS_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GET_RULE_FORM_CREATE_JSON_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RULE_LIST_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RULE_LIST_PAGING_ERROR; + +import org.apache.dolphinscheduler.api.exceptions.ApiException; +import org.apache.dolphinscheduler.api.service.DqExecuteResultService; +import org.apache.dolphinscheduler.api.service.DqRuleService; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.utils.ParameterUtils; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestAttribute; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.bind.annotation.RestController; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import springfox.documentation.annotations.ApiIgnore; + +/** + * data quality controller + */ +@Api(tags = "DATA_QUALITY_SERVICE") +@RestController +@RequestMapping("/data-quality") +public class DataQualityController extends BaseController { + + @Autowired + private DqRuleService dqRuleService; + + @Autowired + private DqExecuteResultService dqExecuteResultService; + + /** + * get rule from-create json + * @param ruleId ruleId + * @return from-create json + */ + @ApiOperation(value = "getRuleFormCreateJson", notes = "GET_RULE_FORM_CREATE_JSON_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "ruleId", value = "RULE_ID", dataType = "Int", example = "1") + }) + @GetMapping(value = "/getRuleFormCreateJson") + @ResponseStatus(HttpStatus.OK) + @ApiException(GET_RULE_FORM_CREATE_JSON_ERROR) + public Result getRuleFormCreateJsonById(@RequestParam(value = "ruleId") int ruleId) { + Map result = dqRuleService.getRuleFormCreateJsonById(ruleId); + return returnDataList(result); + } + + /** + * query rule list paging + * + * @param loginUser login user + * @param searchVal search value + * @param pageNo page number + * @param pageSize page size + * @return rule page + */ + @ApiOperation(value = "queryRuleListPaging", notes = "QUERY_RULE_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), + @ApiImplicitParam(name = "ruleType", value = "RULE_TYPE", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "startDate", value = "START_DATE", type = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", type = "String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "10") + }) + @GetMapping(value = "/rule/page") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RULE_LIST_PAGING_ERROR) + public Result queryRuleListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "ruleType", required = false) Integer ruleType, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + Result result = checkPageParams(pageNo, pageSize); + if (!result.checkResult()) { + return result; + } + searchVal = ParameterUtils.handleEscapes(searchVal); + + return dqRuleService.queryRuleListPaging(loginUser, searchVal, ruleType, startTime, endTime, pageNo, pageSize); + } + + /** + * query all rule list + * @return rule list + */ + @ApiOperation(value = "queryRuleList", notes = "QUERY_RULE_LIST_NOTES") + @GetMapping(value = "/ruleList") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_RULE_LIST_ERROR) + public Result queryRuleList() { + Map result = dqRuleService.queryAllRuleList(); + return returnDataList(result); + } + + /** + * query task execute result list paging + * + * @param loginUser loginUser + * @param searchVal searchVal + * @param ruleType ruleType + * @param state state + * @param startTime startTime + * @param endTime endTime + * @param pageNo pageNo + * @param pageSize pageSize + * @return + */ + @ApiOperation(value = "queryExecuteResultListPaging", notes = "QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"), + @ApiImplicitParam(name = "ruleType", value = "RULE_TYPE", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "state", value = "STATE", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "startDate", value = "START_DATE", type = "String"), + @ApiImplicitParam(name = "endDate", value = "END_DATE", type = "String"), + @ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"), + @ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "10") + }) + @GetMapping(value = "/result/page") + @ResponseStatus(HttpStatus.OK) + @ApiException(QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR) + public Result queryExecuteResultListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @RequestParam(value = "searchVal", required = false) String searchVal, + @RequestParam(value = "ruleType", required = false) Integer ruleType, + @RequestParam(value = "state", required = false) Integer state, + @RequestParam(value = "startDate", required = false) String startTime, + @RequestParam(value = "endDate", required = false) String endTime, + @RequestParam("pageNo") Integer pageNo, + @RequestParam("pageSize") Integer pageSize) { + + Result result = checkPageParams(pageNo, pageSize); + if (!result.checkResult()) { + return result; + } + searchVal = ParameterUtils.handleEscapes(searchVal); + + return dqExecuteResultService.queryResultListPaging(loginUser, searchVal, state, ruleType, startTime, endTime, pageNo, pageSize); + } + + /** + * get datasource options by id + * @param datasourceId datasourceId + * @return result + */ + @ApiOperation(value = "getDatasourceOptionsById", notes = "GET_DATASOURCE_OPTIONS_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "datasourceId", value = "DATA_SOURCE_ID", dataType = "Int", example = "1") + }) + @GetMapping(value = "/getDatasourceOptionsById") + @ResponseStatus(HttpStatus.OK) + @ApiException(GET_DATASOURCE_OPTIONS_ERROR) + public Result getDatasourceOptionsById(@RequestParam(value = "datasourceId") int datasourceId) { + Map result = dqRuleService.getDatasourceOptionsById(datasourceId); + return returnDataList(result); + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java index 8286779581..ab9d1c29c0 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/DataSourceController.java @@ -22,6 +22,8 @@ import static org.apache.dolphinscheduler.api.enums.Status.CONNECTION_TEST_FAILU import static org.apache.dolphinscheduler.api.enums.Status.CONNECT_DATASOURCE_FAILURE; import static org.apache.dolphinscheduler.api.enums.Status.CREATE_DATASOURCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.DELETE_DATA_SOURCE_FAILURE; +import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_TABLES_ERROR; +import static org.apache.dolphinscheduler.api.enums.Status.GET_DATASOURCE_TABLE_COLUMNS_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.KERBEROS_STARTUP_STATE; import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_ERROR; import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_DATASOURCE; @@ -335,4 +337,30 @@ public class DataSourceController extends BaseController { // if upload resource is HDFS and kerberos startup is true , else false return success(Status.SUCCESS.getMsg(), CommonUtils.getKerberosStartupState()); } + + @ApiOperation(value = "tables", notes = "GET_DATASOURCE_TABLES_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "datasourceId", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "1") + }) + @GetMapping(value = "/tables") + @ResponseStatus(HttpStatus.OK) + @ApiException(GET_DATASOURCE_TABLES_ERROR) + public Result getTables(@RequestParam("datasourceId") Integer datasourceId) { + Map result = dataSourceService.getTables(datasourceId); + return returnDataList(result); + } + + @ApiOperation(value = "tableColumns", notes = "GET_DATASOURCE_TABLE_COLUMNS_NOTES") + @ApiImplicitParams({ + @ApiImplicitParam(name = "datasourceId", value = "DATA_SOURCE_ID", required = true, dataType = "Int", example = "1"), + @ApiImplicitParam(name = "tableName", value = "TABLE_NAME", required = true, dataType = "String", example = "test") + }) + @GetMapping(value = "/tableColumns") + @ResponseStatus(HttpStatus.OK) + @ApiException(GET_DATASOURCE_TABLE_COLUMNS_ERROR) + public Result getTableColumns(@RequestParam("datasourceId") Integer datasourceId, + @RequestParam("tableName") String tableName) { + Map result = dataSourceService.getTableColumns(datasourceId,tableName); + return returnDataList(result); + } } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java new file mode 100644 index 0000000000..d702e88b48 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/RuleDefinition.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.dto; + +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; + +import java.util.List; + +/** + * RuleDefinition + */ +public class RuleDefinition { + + /** + * rule input entry list + */ + private List ruleInputEntryList; + + /** + * rule execute sql list + */ + private List executeSqlList; + + public RuleDefinition() { + } + + public RuleDefinition(List ruleInputEntryList,List executeSqlList) { + this.ruleInputEntryList = ruleInputEntryList; + this.executeSqlList = executeSqlList; + } + + public List getRuleInputEntryList() { + return ruleInputEntryList; + } + + public void setRuleInputEntryList(List ruleInputEntryList) { + this.ruleInputEntryList = ruleInputEntryList; + } + + public List getExecuteSqlList() { + return executeSqlList; + } + + public void setExecuteSqlList(List executeSqlList) { + this.executeSqlList = executeSqlList; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java index 12531e7984..5bb6a2fd05 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java @@ -350,7 +350,14 @@ public enum Status { QUERY_ENVIRONMENT_BY_CODE_ERROR(1200009, "not found environment [{0}] ", "查询环境编码[{0}]不存在"), QUERY_ENVIRONMENT_ERROR(1200010, "login user query environment error", "分页查询环境列表错误"), VERIFY_ENVIRONMENT_ERROR(1200011, "verify environment error", "验证环境信息错误"), - + GET_RULE_FORM_CREATE_JSON_ERROR(1200012, "get rule form create json error", "获取规则 FROM-CREATE-JSON 错误"), + QUERY_RULE_LIST_PAGING_ERROR(1200013, "query rule list paging error", "获取规则分页列表错误"), + QUERY_RULE_LIST_ERROR(1200014, "query rule list error", "获取规则列表错误"), + QUERY_RULE_INPUT_ENTRY_LIST_ERROR(1200015, "query rule list error", "获取规则列表错误"), + QUERY_EXECUTE_RESULT_LIST_PAGING_ERROR(1200016, "query execute result list paging error", "获取数据质量任务结果分页错误"), + GET_DATASOURCE_OPTIONS_ERROR(1200017, "get datasource options error", "获取数据源Options错误"), + GET_DATASOURCE_TABLES_ERROR(1200018, "get datasource tables error", "获取数据源表列表错误"), + GET_DATASOURCE_TABLE_COLUMNS_ERROR(1200019, "get datasource table columns error", "获取数据源表列名错误"), TASK_GROUP_NAME_EXSIT(130001,"this task group name is repeated in a project","该任务组名称在一个项目中已经使用"), TASK_GROUP_SIZE_ERROR(130002,"task group size error","任务组大小应该为大于1的整数"), TASK_GROUP_STATUS_ERROR(130003,"task group status error","任务组已经被关闭"), diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java index d240847155..eef169b64e 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java @@ -128,4 +128,19 @@ public interface DataSourceService { * @return authorized result code */ Map authedDatasource(User loginUser, Integer userId); + + /** + * get tables + * @param datasourceId + * @return + */ + Map getTables(Integer datasourceId); + + /** + * get table columns + * @param datasourceId + * @param tableName + * @return + */ + Map getTableColumns(Integer datasourceId,String tableName); } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java new file mode 100644 index 0000000000..d8e3468bbb --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqExecuteResultService.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; + +/** + * DqExecuteResultService + */ +public interface DqExecuteResultService { + + Result queryResultListPaging(User loginUser, + String searchVal, + Integer state, + Integer ruleType, + String startTime, + String endTime, + Integer pageNo, Integer pageSize); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java new file mode 100644 index 0000000000..d0350556ac --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DqRuleService.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.dao.entity.User; + +import java.util.Map; + +/** + * DqsRuleService + */ +public interface DqRuleService { + + Map getRuleFormCreateJsonById(int id); + + Map queryAllRuleList(); + + Result queryRuleListPaging(User loginUser, + String searchVal, + Integer ruleType, + String startTime, + String endTime, + Integer pageNo, Integer pageSize); + + Map getDatasourceOptionsById(int datasourceId); +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java index 80a9b3118a..a57f135297 100644 --- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java @@ -33,10 +33,15 @@ import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; +import org.apache.dolphinscheduler.spi.utils.StringUtils; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.collections4.CollectionUtils; import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; @@ -71,6 +76,12 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource @Autowired private DataSourceUserMapper datasourceUserMapper; + private static final String TABLE = "TABLE"; + private static final String VIEW = "VIEW"; + private static final String[] TABLE_TYPES = new String[]{TABLE, VIEW}; + private static final String TABLE_NAME = "TABLE_NAME"; + private static final String COLUMN_NAME = "COLUMN_NAME"; + /** * create data source * @@ -436,4 +447,188 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource return result; } + @Override + public Map getTables(Integer datasourceId) { + Map result = new HashMap<>(); + + DataSource dataSource = dataSourceMapper.selectById(datasourceId); + + List tableList = null; + BaseConnectionParam connectionParam = + (BaseConnectionParam) DataSourceUtils.buildConnectionParams( + dataSource.getType(), + dataSource.getConnectionParams()); + + if (null == connectionParam) { + putMsg(result, Status.DATASOURCE_CONNECT_FAILED); + return result; + } + + Connection connection = + DataSourceUtils.getConnection(dataSource.getType(), connectionParam); + ResultSet tables = null; + + try { + + if (null == connection) { + putMsg(result, Status.DATASOURCE_CONNECT_FAILED); + return result; + } + + DatabaseMetaData metaData = connection.getMetaData(); + String schema = null; + try { + schema = metaData.getConnection().getSchema(); + } catch (SQLException e) { + logger.error("cant not get the schema : {}", e.getMessage(), e); + } + + tables = metaData.getTables( + connectionParam.getDatabase(), + getDbSchemaPattern(dataSource.getType(),schema,connectionParam), + "%", TABLE_TYPES); + if (null == tables) { + putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR); + return result; + } + + tableList = new ArrayList<>(); + while (tables.next()) { + String name = tables.getString(TABLE_NAME); + tableList.add(name); + } + + } catch (Exception e) { + logger.error(e.toString(), e); + putMsg(result, Status.GET_DATASOURCE_TABLES_ERROR); + return result; + } finally { + closeResult(tables); + releaseConnection(connection); + } + + List options = getParamsOptions(tableList); + + result.put(Constants.DATA_LIST, options); + putMsg(result, Status.SUCCESS); + return result; + } + + @Override + public Map getTableColumns(Integer datasourceId,String tableName) { + Map result = new HashMap<>(); + + DataSource dataSource = dataSourceMapper.selectById(datasourceId); + BaseConnectionParam connectionParam = + (BaseConnectionParam) DataSourceUtils.buildConnectionParams( + dataSource.getType(), + dataSource.getConnectionParams()); + + if (null == connectionParam) { + putMsg(result, Status.DATASOURCE_CONNECT_FAILED); + return result; + } + + Connection connection = + DataSourceUtils.getConnection(dataSource.getType(), connectionParam); + List columnList = new ArrayList<>(); + ResultSet rs = null; + + try { + + String database = connectionParam.getDatabase(); + if (null == connection) { + return result; + } + + DatabaseMetaData metaData = connection.getMetaData(); + + if (dataSource.getType() == DbType.ORACLE) { + database = null; + } + rs = metaData.getColumns(database, null, tableName, "%"); + if (rs == null) { + return result; + } + while (rs.next()) { + columnList.add(rs.getString(COLUMN_NAME)); + } + } catch (Exception e) { + logger.error(e.toString(), e); + } finally { + closeResult(rs); + releaseConnection(connection); + } + + List options = getParamsOptions(columnList); + + result.put(Constants.DATA_LIST, options); + putMsg(result, Status.SUCCESS); + return result; + } + + private List getParamsOptions(List columnList) { + List options = null; + if (CollectionUtils.isNotEmpty(columnList)) { + options = new ArrayList<>(); + + for (String column : columnList) { + ParamsOptions childrenOption = + new ParamsOptions(column, column, false); + options.add(childrenOption); + } + } + return options; + } + + private String getDbSchemaPattern(DbType dbType,String schema,BaseConnectionParam connectionParam) { + if (dbType == null) { + return null; + } + String schemaPattern = null; + switch (dbType) { + case HIVE: + schemaPattern = connectionParam.getDatabase(); + break; + case ORACLE: + schemaPattern = connectionParam.getUser(); + if (null != schemaPattern) { + schemaPattern = schemaPattern.toUpperCase(); + } + break; + case SQLSERVER: + schemaPattern = "dbo"; + break; + case CLICKHOUSE: + case PRESTO: + if (!StringUtils.isEmpty(schema)) { + schemaPattern = schema; + } + break; + default: + break; + } + return schemaPattern; + } + + private static void releaseConnection(Connection connection) { + if (null != connection) { + try { + connection.close(); + } catch (Exception e) { + logger.error("Connection release error", e); + } + } + } + + private static void closeResult(ResultSet rs) { + if (rs != null) { + try { + rs.close(); + } catch (Exception e) { + logger.error("ResultSet close error", e); + } + } + } + } diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java new file mode 100644 index 0000000000..0b3178a3c6 --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqExecuteResultServiceImpl.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.DqExecuteResultService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import java.util.Date; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +/** + * DqExecuteResultServiceImpl + */ +@Service +public class DqExecuteResultServiceImpl extends BaseServiceImpl implements DqExecuteResultService { + + @Autowired + private DqExecuteResultMapper dqExecuteResultMapper; + + @Override + public Result queryResultListPaging(User loginUser, + String searchVal, + Integer state, + Integer ruleType, + String startTime, + String endTime, + Integer pageNo, + Integer pageSize) { + + Result result = new Result(); + int[] statusArray = null; + // filter by state + if (state != null) { + statusArray = new int[]{state}; + } + + Date start = null; + Date end = null; + try { + if (StringUtils.isNotEmpty(startTime)) { + start = DateUtils.getScheduleDate(startTime); + } + if (StringUtils.isNotEmpty(endTime)) { + end = DateUtils.getScheduleDate(endTime); + } + } catch (Exception e) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime"); + return result; + } + + Page page = new Page<>(pageNo, pageSize); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + + if (ruleType == null) { + ruleType = -1; + } + + IPage dqsResultPage = + dqExecuteResultMapper.queryResultListPaging( + page, + searchVal, + loginUser.getId(), + statusArray, + ruleType, + start, + end); + + pageInfo.setTotal((int) dqsResultPage.getTotal()); + pageInfo.setTotalList(dqsResultPage.getRecords()); + result.setData(pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } +} diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java new file mode 100644 index 0000000000..5cc2f81c5f --- /dev/null +++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DqRuleServiceImpl.java @@ -0,0 +1,340 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service.impl; + +import static org.apache.dolphinscheduler.common.Constants.DATA_LIST; +import static org.apache.dolphinscheduler.spi.utils.Constants.CHANGE; +import static org.apache.dolphinscheduler.spi.utils.Constants.SMALL; + +import org.apache.dolphinscheduler.api.dto.RuleDefinition; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.DqRuleService; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.DqComparisonType; +import org.apache.dolphinscheduler.dao.entity.DqRule; +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper; +import org.apache.dolphinscheduler.dao.utils.DqRuleUtils; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.params.base.FormType; +import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.spi.params.base.PropsType; +import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.group.GroupParam; +import org.apache.dolphinscheduler.spi.params.group.GroupParamsProps; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.input.InputParamProps; +import org.apache.dolphinscheduler.spi.params.select.SelectParam; +import org.apache.dolphinscheduler.spi.task.dq.enums.OptionSourceType; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * DqRuleServiceImpl + */ +@Service +public class DqRuleServiceImpl extends BaseServiceImpl implements DqRuleService { + + private final Logger logger = LoggerFactory.getLogger(DqRuleServiceImpl.class); + + @Autowired + private DqRuleMapper dqRuleMapper; + + @Autowired + private DqRuleInputEntryMapper dqRuleInputEntryMapper; + + @Autowired + private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper; + + @Autowired + private DataSourceMapper dataSourceMapper; + + @Autowired + private DqComparisonTypeMapper dqComparisonTypeMapper; + + @Override + public Map getRuleFormCreateJsonById(int id) { + + Map result = new HashMap<>(); + + List ruleInputEntryList = dqRuleInputEntryMapper.getRuleInputEntryList(id); + + if (ruleInputEntryList == null || ruleInputEntryList.isEmpty()) { + putMsg(result, Status.QUERY_RULE_INPUT_ENTRY_LIST_ERROR); + } else { + result.put(DATA_LIST, getRuleFormCreateJson(DqRuleUtils.transformInputEntry(ruleInputEntryList))); + putMsg(result, Status.SUCCESS); + } + + return result; + } + + @Override + public Map queryAllRuleList() { + Map result = new HashMap<>(); + + List ruleList = + dqRuleMapper.selectList(new QueryWrapper<>()); + + result.put(DATA_LIST, ruleList); + putMsg(result, Status.SUCCESS); + + return result; + } + + @Override + public Map getDatasourceOptionsById(int datasourceId) { + Map result = new HashMap<>(); + + List dataSourceList = dataSourceMapper.listAllDataSourceByType(datasourceId); + List options = null; + if (CollectionUtils.isNotEmpty(dataSourceList)) { + options = new ArrayList<>(); + + for (DataSource dataSource: dataSourceList) { + ParamsOptions childrenOption = + new ParamsOptions(dataSource.getName(),dataSource.getId(),false); + options.add(childrenOption); + } + } + + result.put(DATA_LIST, options); + putMsg(result, Status.SUCCESS); + + return result; + } + + @Override + public Result queryRuleListPaging(User loginUser, + String searchVal, + Integer ruleType, + String startTime, + String endTime, + Integer pageNo, + Integer pageSize) { + Result result = new Result(); + + Date start = null; + Date end = null; + try { + if (StringUtils.isNotEmpty(startTime)) { + start = DateUtils.getScheduleDate(startTime); + } + if (StringUtils.isNotEmpty(endTime)) { + end = DateUtils.getScheduleDate(endTime); + } + } catch (Exception e) { + putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startTime,endTime"); + return result; + } + + Page page = new Page<>(pageNo, pageSize); + PageInfo pageInfo = new PageInfo<>(pageNo, pageSize); + + if (ruleType == null) { + ruleType = -1; + } + + IPage dqRulePage = + dqRuleMapper.queryRuleListPaging( + page, + searchVal, + ruleType, + start, + end); + if (dqRulePage != null) { + List dataList = dqRulePage.getRecords(); + dataList.forEach(dqRule -> { + List ruleInputEntryList = + DqRuleUtils.transformInputEntry(dqRuleInputEntryMapper.getRuleInputEntryList(dqRule.getId())); + List ruleExecuteSqlList = dqRuleExecuteSqlMapper.getExecuteSqlList(dqRule.getId()); + + RuleDefinition ruleDefinition = new RuleDefinition(ruleInputEntryList,ruleExecuteSqlList); + dqRule.setRuleJson(JSONUtils.toJsonString(ruleDefinition)); + }); + + pageInfo.setTotal((int) dqRulePage.getTotal()); + pageInfo.setTotalList(dataList); + } + + result.setData(pageInfo); + putMsg(result, Status.SUCCESS); + return result; + } + + private String getRuleFormCreateJson(List ruleInputEntryList) { + List params = new ArrayList<>(); + + for (DqRuleInputEntry inputEntry : ruleInputEntryList) { + if (Boolean.TRUE.equals(inputEntry.getShow())) { + switch (FormType.of(inputEntry.getType())) { + case INPUT: + params.add(getInputParam(inputEntry)); + break; + case SELECT: + params.add(getSelectParam(inputEntry)); + break; + case TEXTAREA: + params.add(getTextareaParam(inputEntry)); + break; + case GROUP: + params.add(getGroupParam(inputEntry)); + break; + default: + break; + } + } + } + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + String result = null; + + try { + result = mapper.writeValueAsString(params); + } catch (JsonProcessingException e) { + logger.error("json parse error : {}", e.getMessage(), e); + } + + return result; + } + + private InputParam getTextareaParam(DqRuleInputEntry inputEntry) { + + InputParamProps paramProps = + new InputParamProps(); + paramProps.setDisabled(!inputEntry.getCanEdit()); + paramProps.setSize(SMALL); + paramProps.setType(PropsType.TEXTAREA.getPropsType()); + paramProps.setRows(1); + + return InputParam + .newBuilder(inputEntry.getField(),inputEntry.getTitle()) + .addValidate(Validate.newBuilder() + .setRequired(inputEntry.getValidate()) + .build()) + .setProps(paramProps) + .setValue(inputEntry.getValue()) + .setPlaceholder(inputEntry.getPlaceholder()) + .setEmit(Boolean.TRUE.equals(inputEntry.getEmit()) ? Collections.singletonList(CHANGE) : null) + .build(); + } + + private SelectParam getSelectParam(DqRuleInputEntry inputEntry) { + List options = null; + + switch (OptionSourceType.of(inputEntry.getOptionSourceType())) { + case DEFAULT: + String optionStr = inputEntry.getOptions(); + if (StringUtils.isNotEmpty(optionStr)) { + options = JSONUtils.toList(optionStr, ParamsOptions.class); + } + break; + case DATASOURCE_TYPE: + options = new ArrayList<>(); + ParamsOptions paramsOptions = null; + for (DbType dbtype: DbType.values()) { + paramsOptions = new ParamsOptions(dbtype.name(),dbtype.getCode(),false); + options.add(paramsOptions); + } + break; + case COMPARISON_TYPE: + options = new ArrayList<>(); + ParamsOptions comparisonOptions = null; + List list = dqComparisonTypeMapper.selectList(new QueryWrapper().orderByAsc("id")); + + for (DqComparisonType type: list) { + comparisonOptions = new ParamsOptions(type.getType(), type.getId(),false); + options.add(comparisonOptions); + } + break; + default: + break; + } + + return SelectParam + .newBuilder(inputEntry.getField(),inputEntry.getTitle()) + .setOptions(options) + .setValue(inputEntry.getValue()) + .setSize(SMALL) + .setPlaceHolder(inputEntry.getPlaceholder()) + .setEmit(Boolean.TRUE.equals(inputEntry.getEmit()) ? Collections.singletonList(CHANGE) : null) + .build(); + } + + private InputParam getInputParam(DqRuleInputEntry inputEntry) { + InputParamProps paramProps = + new InputParamProps(); + paramProps.setDisabled(!inputEntry.getCanEdit()); + paramProps.setSize(SMALL); + paramProps.setRows(2); + + return InputParam + .newBuilder(inputEntry.getField(),inputEntry.getTitle()) + .addValidate(Validate.newBuilder() + .setRequired(inputEntry.getValidate()) + .build()) + .setProps(paramProps) + .setValue(inputEntry.getValue()) + .setPlaceholder(inputEntry.getPlaceholder()) + .setEmit(Boolean.TRUE.equals(inputEntry.getEmit()) ? Collections.singletonList(CHANGE) : null) + .build(); + } + + private GroupParam getGroupParam(DqRuleInputEntry inputEntry) { + return GroupParam + .newBuilder(inputEntry.getField(),inputEntry.getTitle()) + .addValidate(Validate.newBuilder() + .setRequired(inputEntry.getValidate()) + .build()) + .setProps(new GroupParamsProps().setRules(JSONUtils.toList(inputEntry.getOptions(),PluginParams.class)).setFontSize(20)) + .setEmit(Boolean.TRUE.equals(inputEntry.getEmit()) ? Collections.singletonList(CHANGE) : null) + .build(); + } +} diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages.properties b/dolphinscheduler-api/src/main/resources/i18n/messages.properties index 9bc6ce7ded..5534809557 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages.properties @@ -57,7 +57,7 @@ COUNT_TASK_STATE_NOTES=count task state COUNT_PROCESS_INSTANCE_NOTES=count process instance state COUNT_PROCESS_DEFINITION_BY_USER_NOTES=count process definition by user COUNT_COMMAND_STATE_NOTES=count command state -COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue\ +COUNT_QUEUE_STATE_NOTES=count the running status of the task in the queue ACCESS_TOKEN_TAG=access token related operation MONITOR_TAG=monitor related operation @@ -196,7 +196,7 @@ PAGE_NO=page no PROCESS_INSTANCE_ID=process instance id PROCESS_INSTANCE_JSON=process instance info(json format) SCHEDULE_TIME=schedule time -SYNC_DEFINE=update the information of the process instance to the process definition\ +SYNC_DEFINE=update the information of the process instance to the process definition RECOVERY_PROCESS_INSTANCE_FLAG=whether to recovery process instance SEARCH_VAL=search val @@ -282,6 +282,17 @@ DELETE_PROCESS_DEFINITION_VERSION_NOTES=delete process definition version QUERY_PROCESS_DEFINITION_VERSIONS_NOTES=query process definition versions SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version VERSION=version +GET_RULE_FORM_CREATE_JSON_NOTES=get rule form-create json +QUERY_RULE_LIST_PAGING_NOTES=query rule list paging +QUERY_RULE_LIST_NOTES=query rule list +QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=query execute result list paging +RULE_ID=rule id +RULE_TYPE=rule type +STATE=state +GET_DATASOURCE_OPTIONS_NOTES=get datasource options +GET_DATASOURCE_TABLES_NOTES=get datasource table +GET_DATASOURCE_TABLE_COLUMNS_NOTES=get datasource table columns +TABLE_NAME=table name AUDIT_LOG_TAG=audit log related operation MODULE_TYPE=module type OPERATION_TYPE=operation type diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties index 645525169f..5c568ae1ab 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_en_US.properties @@ -18,8 +18,7 @@ QUERY_SCHEDULE_LIST_NOTES=query schedule list EXECUTE_PROCESS_TAG=execute process related operation PROCESS_INSTANCE_EXECUTOR_TAG=process instance executor related operation RUN_PROCESS_INSTANCE_NOTES=run process instance -BATCH_RUN_PROCESS_INSTANCE_NOTES=batch run process instance(If any processDefinitionCode cannot be found, the failure\ - \ information is returned and the status is set to failed. The successful task will run normally and will not stop) +BATCH_RUN_PROCESS_INSTANCE_NOTES=batch run process instance(If any processDefinitionCode cannot be found, the failure information is returned and the status is set to failed. The successful task will run normally and will not stop) START_NODE_LIST=start node list(node name) TASK_DEPEND_TYPE=task depend type COMMAND_TYPE=command type @@ -340,6 +339,16 @@ SWITCH_PROCESS_DEFINITION_VERSION_NOTES=switch process definition version VERSION=version TASK_GROUP_QUEUEID=task group queue id TASK_GROUP_QUEUE_PRIORITY=task group queue priority +GET_RULE_FORM_CREATE_JSON_NOTES=get rule form-create json +QUERY_RULE_LIST_PAGING_NOTES=query rule list paging +QUERY_RULE_LIST_NOTES=query rule list +QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=query execute result list paging +RULE_ID=rule id +RULE_TYPE=rule type +GET_DATASOURCE_OPTIONS_NOTES=get datasource options +GET_DATASOURCE_TABLES_NOTES=get datasource table +GET_DATASOURCE_TABLE_COLUMNS_NOTES=get datasource table columns +TABLE_NAME=table name QUERY_AUDIT_LOG=query audit log AUDIT_LOG_TAG=audit log related operation MODULE_TYPE=module type diff --git a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties index b031434427..033621c021 100644 --- a/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties +++ b/dolphinscheduler-api/src/main/resources/i18n/messages_zh_CN.properties @@ -336,6 +336,16 @@ SWITCH_PROCESS_DEFINITION_VERSION_NOTES=切换流程版本 VERSION=版本号 TASK_GROUP_QUEUEID=任务组队列id TASK_GROUP_QUEUE_PRIORITY=任务队列优先级 +GET_RULE_FORM_CREATE_JSON_NOTES=获取规则form-create json +QUERY_RULE_LIST_PAGING_NOTES=查询规则分页列表 +QUERY_RULE_LIST_NOTES=查询规则列表 +QUERY_EXECUTE_RESULT_LIST_PAGING_NOTES=查询数据质量任务结果分页列表 +RULE_ID=规则ID +RULE_TYPE=规则类型 +GET_DATASOURCE_OPTIONS_NOTES=获取数据源OPTIONS +GET_DATASOURCE_TABLES_NOTES=获取数据源表列表 +GET_DATASOURCE_TABLE_COLUMNS_NOTES=获取数据源表列名 +TABLE_NAME=表名 QUERY_AUDIT_LOG=查询审计日志 AUDIT_LOG_TAG=审计日志执行相关操作 MODULE_TYPE=模块类型 diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java new file mode 100644 index 0000000000..541a95c8df --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/DataQualityControllerTest.java @@ -0,0 +1,180 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.controller; + +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.DqExecuteResultServiceImpl; +import org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl; +import org.apache.dolphinscheduler.api.utils.PageInfo; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.dao.entity.DqRule; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.spi.task.dq.enums.RuleType; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnitRunner; + +/** + * process definition controller test + */ +@RunWith(MockitoJUnitRunner.Silent.class) +public class DataQualityControllerTest { + + @InjectMocks + private DataQualityController dataQualityController; + + @Mock + private DqRuleServiceImpl dqRuleService; + + @Mock + private DqExecuteResultServiceImpl dqExecuteResultService; + + protected User user; + + @Before + public void before() { + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.GENERAL_USER); + loginUser.setUserName("admin"); + + user = loginUser; + } + + @Test + public void testGetRuleFormCreateJsonById() throws Exception { + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, 1); + + Mockito.when(dqRuleService.getRuleFormCreateJsonById(1)).thenReturn(result); + + Result response = dataQualityController.getRuleFormCreateJsonById(1); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); + } + + private void putMsg(Map result, Status status, Object... statusParams) { + result.put(Constants.STATUS, status); + if (statusParams != null && statusParams.length > 0) { + result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.put(Constants.MSG, status.getMsg()); + } + } + + public void putMsg(Result result, Status status, Object... statusParams) { + result.setCode(status.getCode()); + if (statusParams != null && statusParams.length > 0) { + result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); + } else { + result.setMsg(status.getMsg()); + } + } + + private List getRuleList() { + List list = new ArrayList<>(); + DqRule rule = new DqRule(); + rule.setId(1); + rule.setName("空值检测"); + rule.setType(RuleType.SINGLE_TABLE.getCode()); + rule.setUserId(1); + rule.setUserName("admin"); + rule.setCreateTime(new Date()); + rule.setUpdateTime(new Date()); + + list.add(rule); + + return list; + } + + @Test + public void testQueryRuleListPaging() throws Exception { + + String searchVal = ""; + int ruleType = 0; + String start = "2020-01-01 00:00:00"; + String end = "2020-01-02 00:00:00"; + + PageInfo pageInfo = new PageInfo<>(1,10); + pageInfo.setTotal(10); + pageInfo.setTotalList(getRuleList()); + + Result result = new Result(); + result.setData(pageInfo); + putMsg(result, Status.SUCCESS); + + when(dqRuleService.queryRuleListPaging( + user, searchVal, ruleType, start, end,1, 10)).thenReturn(result); + + Result response = dataQualityController.queryRuleListPaging(user, searchVal, ruleType,start,end,1,10); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); + } + + @Test + public void testQueryRuleList() throws Exception { + + Map result = new HashMap<>(); + putMsg(result, Status.SUCCESS); + result.put(Constants.DATA_LIST, getRuleList()); + + when(dqRuleService.queryAllRuleList()).thenReturn(result); + + Result response = dataQualityController.queryRuleList(); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); + } + + @Test + public void testQueryResultListPaging() throws Exception { + + String searchVal = ""; + int ruleType = 0; + String start = "2020-01-01 00:00:00"; + String end = "2020-01-02 00:00:00"; + + PageInfo pageInfo = new PageInfo<>(1,10); + pageInfo.setTotal(10); + + Result result = new Result(); + result.setData(pageInfo); + putMsg(result, Status.SUCCESS); + + when(dqExecuteResultService.queryResultListPaging( + user, searchVal, 0,ruleType, start, end,1, 10)).thenReturn(result); + + Result response = dataQualityController.queryExecuteResultListPaging(user, searchVal, ruleType,0,start,end,1,10); + Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java index b5ebd5a156..4b4fd613d4 100644 --- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/exceptions/ApiExceptionHandlerTest.java @@ -14,6 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.dolphinscheduler.api.exceptions; import org.apache.dolphinscheduler.api.controller.AccessTokenController; @@ -21,12 +22,13 @@ import org.apache.dolphinscheduler.api.controller.ProcessDefinitionController; import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.utils.Result; import org.apache.dolphinscheduler.dao.entity.User; + +import java.lang.reflect.Method; + import org.junit.Assert; import org.junit.Test; import org.springframework.web.method.HandlerMethod; -import java.lang.reflect.Method; - public class ApiExceptionHandlerTest { @Test diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqExecuteResultServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqExecuteResultServiceTest.java new file mode 100644 index 0000000000..b300a707cd --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqExecuteResultServiceTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.DqExecuteResultServiceImpl; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper; +import org.apache.dolphinscheduler.spi.task.dq.enums.DqTaskState; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.test.context.SpringBootTest; + +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +@RunWith(MockitoJUnitRunner.Silent.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class DqExecuteResultServiceTest { + private static final Logger logger = LoggerFactory.getLogger(DqExecuteResultServiceTest.class); + + @InjectMocks + private DqExecuteResultServiceImpl dqExecuteResultService; + + @Mock + DqExecuteResultMapper dqExecuteResultMapper; + + @Test + public void testQueryResultListPaging() { + + String searchVal = ""; + int ruleType = 0; + Date start = DateUtils.getScheduleDate("2020-01-01 00:00:00"); + Date end = DateUtils.getScheduleDate("2020-01-02 00:00:00"); + + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.ADMIN_USER); + + Page page = new Page<>(1, 10); + page.setTotal(1); + page.setRecords(getExecuteResultList()); + when(dqExecuteResultMapper.queryResultListPaging( + any(IPage.class), eq(""), eq(loginUser.getId()), any(),eq(ruleType), eq(start), eq(end))).thenReturn(page); + + Result result = dqExecuteResultService.queryResultListPaging( + loginUser,searchVal,1,0,"2020-01-01 00:00:00","2020-01-02 00:00:00",1,10); + Assert.assertEquals(Integer.valueOf(Status.SUCCESS.getCode()),result.getCode()); + } + + public List getExecuteResultList() { + + List list = new ArrayList<>(); + DqExecuteResult dqExecuteResult = new DqExecuteResult(); + dqExecuteResult.setId(1); + dqExecuteResult.setState(DqTaskState.FAILURE.getCode()); + list.add(dqExecuteResult); + + return list; + } +} diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java new file mode 100644 index 0000000000..069edca0c1 --- /dev/null +++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DqRuleServiceTest.java @@ -0,0 +1,237 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.api.service; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.when; + +import org.apache.dolphinscheduler.api.ApiApplicationServer; +import org.apache.dolphinscheduler.api.enums.Status; +import org.apache.dolphinscheduler.api.service.impl.DqRuleServiceImpl; +import org.apache.dolphinscheduler.api.utils.Result; +import org.apache.dolphinscheduler.common.Constants; +import org.apache.dolphinscheduler.common.enums.UserType; +import org.apache.dolphinscheduler.common.utils.DateUtils; +import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.DqRule; +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; +import org.apache.dolphinscheduler.dao.entity.User; +import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.params.base.FormType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.dq.enums.InputType; +import org.apache.dolphinscheduler.spi.task.dq.enums.OptionSourceType; +import org.apache.dolphinscheduler.spi.task.dq.enums.RuleType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ValueType; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.springframework.boot.test.context.SpringBootTest; + +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; +import com.baomidou.mybatisplus.core.metadata.IPage; +import com.baomidou.mybatisplus.extension.plugins.pagination.Page; + +@RunWith(MockitoJUnitRunner.Silent.class) +@SpringBootTest(classes = ApiApplicationServer.class) +public class DqRuleServiceTest { + + @InjectMocks + private DqRuleServiceImpl dqRuleService; + + @Mock + DqRuleMapper dqRuleMapper; + + @Mock + DqRuleInputEntryMapper dqRuleInputEntryMapper; + + @Mock + DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper; + + @Mock + DataSourceMapper dataSourceMapper; + + @Test + public void testGetRuleFormCreateJsonById() { + String json = "[{\"field\":\"src_connector_type\",\"name\":\"源数据类型\",\"props\":{\"placeholder\":" + + "\"Please select the source connector type\",\"size\":\"small\"},\"type\":\"select\",\"title\":" + + "\"源数据类型\",\"value\":\"JDBC\",\"emit\":[\"change\"],\"options\":[{\"label\":\"HIVE\",\"value\":" + + "\"HIVE\",\"disabled\":false},{\"label\":\"JDBC\",\"value\":\"JDBC\",\"disabled\":false}]},{\"props\":" + + "{\"disabled\":false,\"rows\":2,\"placeholder\":\"Please enter statistics name, the alias in " + + "statistics execute sql\",\"size\":\"small\"},\"field\":\"statistics_name\",\"name\":" + + "\"统计值名\",\"type\":\"input\",\"title\":\"统计值名\",\"validate\":[{\"required\":true,\"type\":" + + "\"string\",\"trigger\":\"blur\"}]},{\"props\":{\"disabled\":false,\"type\":\"textarea\",\"rows\":" + + "1,\"placeholder\":\"Please enter the statistics execute sql\",\"size\":\"small\"},\"field\":" + + "\"statistics_execute_sql\",\"name\":\"统计值计算SQL\",\"type\":\"input\",\"title\":" + + "\"统计值计算SQL\",\"validate\":[{\"required\":true,\"type\":\"string\",\"trigger\":\"blur\"}]}]"; + when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList()); + Map result = dqRuleService.getRuleFormCreateJsonById(1); + Assert.assertEquals(json,result.get(Constants.DATA_LIST)); + } + + @Test + public void testQueryAllRuleList() { + when(dqRuleMapper.selectList(new QueryWrapper<>())).thenReturn(getRuleList()); + Map result = dqRuleService.queryAllRuleList(); + Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + } + + @Test + public void testGetDatasourceOptionsById() { + when(dataSourceMapper.listAllDataSourceByType(DbType.MYSQL.getCode())).thenReturn(dataSourceList()); + Map result = dqRuleService.queryAllRuleList(); + Assert.assertEquals(Status.SUCCESS,result.get(Constants.STATUS)); + } + + @Test + public void testQueryRuleListPaging() { + + String searchVal = ""; + int ruleType = 0; + Date start = DateUtils.getScheduleDate("2020-01-01 00:00:00"); + Date end = DateUtils.getScheduleDate("2020-01-02 00:00:00"); + + User loginUser = new User(); + loginUser.setId(1); + loginUser.setUserType(UserType.ADMIN_USER); + + Page page = new Page<>(1, 10); + page.setTotal(1); + page.setRecords(getRuleList()); + + when(dqRuleMapper.queryRuleListPaging( + any(IPage.class), eq(""), eq(ruleType), eq(start), eq(end))).thenReturn(page); + + when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList()); + when(dqRuleExecuteSqlMapper.getExecuteSqlList(1)).thenReturn(getRuleExecuteSqlList()); + + Result result = dqRuleService.queryRuleListPaging( + loginUser,searchVal,0,"2020-01-01 00:00:00","2020-01-02 00:00:00",1,10); + Assert.assertEquals(Integer.valueOf(Status.SUCCESS.getCode()),result.getCode()); + } + + private List dataSourceList() { + List dataSourceList = new ArrayList<>(); + DataSource dataSource = new DataSource(); + dataSource.setId(1); + dataSource.setName("dolphinscheduler"); + dataSource.setType(DbType.MYSQL); + dataSource.setUserId(1); + dataSource.setUserName("admin"); + dataSource.setConnectionParams(""); + dataSource.setCreateTime(new Date()); + dataSource.setUpdateTime(new Date()); + dataSourceList.add(dataSource); + + return dataSourceList; + } + + private List getRuleList() { + List list = new ArrayList<>(); + DqRule rule = new DqRule(); + rule.setId(1); + rule.setName("空值检测"); + rule.setType(RuleType.SINGLE_TABLE.getCode()); + rule.setUserId(1); + rule.setUserName("admin"); + rule.setCreateTime(new Date()); + rule.setUpdateTime(new Date()); + + list.add(rule); + + return list; + } + + private List getRuleInputEntryList() { + List list = new ArrayList<>(); + + DqRuleInputEntry srcConnectorType = new DqRuleInputEntry(); + srcConnectorType.setTitle("源数据类型"); + srcConnectorType.setField("src_connector_type"); + srcConnectorType.setType(FormType.SELECT.getFormType()); + srcConnectorType.setCanEdit(true); + srcConnectorType.setShow(true); + srcConnectorType.setValue("JDBC"); + srcConnectorType.setPlaceholder("Please select the source connector type"); + srcConnectorType.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcConnectorType.setOptions("[{\"label\":\"HIVE\",\"value\":\"HIVE\"},{\"label\":\"JDBC\",\"value\":\"JDBC\"}]"); + srcConnectorType.setInputType(InputType.DEFAULT.getCode()); + srcConnectorType.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setEmit(true); + srcConnectorType.setValidate(true); + + DqRuleInputEntry statisticsName = new DqRuleInputEntry(); + statisticsName.setTitle("统计值名"); + statisticsName.setField("statistics_name"); + statisticsName.setType(FormType.INPUT.getFormType()); + statisticsName.setCanEdit(true); + statisticsName.setShow(true); + statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql"); + statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsName.setInputType(InputType.DEFAULT.getCode()); + statisticsName.setValueType(ValueType.STRING.getCode()); + statisticsName.setEmit(false); + statisticsName.setValidate(true); + + DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry(); + statisticsExecuteSql.setTitle("统计值计算SQL"); + statisticsExecuteSql.setField("statistics_execute_sql"); + statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType()); + statisticsExecuteSql.setCanEdit(true); + statisticsExecuteSql.setShow(true); + statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql"); + statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsExecuteSql.setValueType(ValueType.LIKE_SQL.getCode()); + statisticsExecuteSql.setEmit(false); + statisticsExecuteSql.setValidate(true); + + list.add(srcConnectorType); + list.add(statisticsName); + list.add(statisticsExecuteSql); + + return list; + } + + private List getRuleExecuteSqlList() { + List list = new ArrayList<>(); + + DqRuleExecuteSql executeSqlDefinition = new DqRuleExecuteSql(); + executeSqlDefinition.setIndex(0); + executeSqlDefinition.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})"); + executeSqlDefinition.setTableAlias("total_count"); + executeSqlDefinition.setType(ExecuteSqlType.COMPARISON.getCode()); + list.add(executeSqlDefinition); + + return list; + } +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java index af4938f4dc..7da88c4cc5 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java @@ -159,6 +159,16 @@ public final class Constants { */ public static final String COLON = ":"; + /** + * QUESTION ? + */ + public static final String QUESTION = "?"; + + /** + * SPACE " " + */ + public static final String SPACE = " "; + /** * SINGLE_SLASH / */ @@ -174,6 +184,11 @@ public final class Constants { */ public static final String EQUAL_SIGN = "="; + /** + * AT SIGN + */ + public static final String AT_SIGN = "@"; + /** * date format of yyyy-MM-dd HH:mm:ss */ @@ -710,6 +725,8 @@ public final class Constants { public static final String ADDRESS = "address"; public static final String DATABASE = "database"; public static final String OTHER = "other"; + public static final String USER = "user"; + public static final String JDBC_URL = "jdbcUrl"; /** * session timeout @@ -785,6 +802,11 @@ public final class Constants { public static final int DRY_RUN_FLAG_NO = 0; public static final int DRY_RUN_FLAG_YES = 1; + /** + * data.quality.error.output.path + */ + public static final String DATA_QUALITY_ERROR_OUTPUT_PATH = "data-quality.error.output.path"; + public static final String CACHE_KEY_VALUE_ALL = "'all'"; } diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java index bbf00f6895..13231bedbb 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskType.java @@ -38,7 +38,9 @@ public enum TaskType { * 11 CONDITIONS * 12 SQOOP * 13 SEATUNNEL + * 14 SWITCH * 15 PIGEON + * 16 DATA_QUALITY */ SHELL(0, "SHELL"), SQL(1, "SQL"), @@ -55,7 +57,8 @@ public enum TaskType { SQOOP(12, "SQOOP"), SEATUNNEL(13, "SEATUNNEL"), SWITCH(14, "SWITCH"), - PIGEON(15, "PIGEON"); + PIGEON(15, "PIGEON"), + DATA_QUALITY(16, "DATA_QUALITY"); TaskType(int code, String desc) { this.code = code; diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dq/DataQualityParameters.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dq/DataQualityParameters.java new file mode 100644 index 0000000000..db4baddeee --- /dev/null +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/dq/DataQualityParameters.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.task.dq; + +import org.apache.dolphinscheduler.common.process.ResourceInfo; +import org.apache.dolphinscheduler.common.task.AbstractParameters; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; + +import org.apache.commons.collections.MapUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * DataQualityParameters + */ +public class DataQualityParameters extends AbstractParameters { + + private static final Logger logger = LoggerFactory.getLogger(DataQualityParameters.class); + + /** + * rule id + */ + private int ruleId; + /** + * rule input entry value map + */ + private Map ruleInputParameter; + /** + * spark parameters + */ + private SparkParameters sparkParameters; + + public int getRuleId() { + return ruleId; + } + + public void setRuleId(int ruleId) { + this.ruleId = ruleId; + } + + public Map getRuleInputParameter() { + return ruleInputParameter; + } + + public void setRuleInputParameter(Map ruleInputParameter) { + this.ruleInputParameter = ruleInputParameter; + } + + /** + * In this function ,we need more detailed check every parameter, + * if the parameter is non-conformant will return false + * @return boolean result + */ + @Override + public boolean checkParameters() { + + if (ruleId == 0) { + logger.error("rule id is null"); + return false; + } + + if (MapUtils.isEmpty(ruleInputParameter)) { + logger.error("rule input parameter is empty"); + return false; + } + + return sparkParameters != null; + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } + + public SparkParameters getSparkParameters() { + return sparkParameters; + } + + public void setSparkParameters(SparkParameters sparkParameters) { + this.sparkParameters = sparkParameters; + } + +} diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java index 7bcdf43d3c..d2a3e3051b 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/JSONUtils.java @@ -24,7 +24,7 @@ import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKN import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; -import org.apache.commons.lang.StringUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import java.io.IOException; import java.util.ArrayList; @@ -223,6 +223,31 @@ public class JSONUtils { return parseObject(json, new TypeReference>() {}); } + /** + * json to map + * + * @param json json + * @param classK classK + * @param classV classV + * @param K + * @param V + * @return to map + */ + public static Map toMap(String json, Class classK, Class classV) { + if (StringUtils.isEmpty(json)) { + return Collections.emptyMap(); + } + + try { + return objectMapper.readValue(json, new TypeReference>() { + }); + } catch (Exception e) { + logger.error("json to map exception!", e); + } + + return Collections.emptyMap(); + } + /** * from the key-value generated json to get the str value no matter the real type of value * @param json the json str diff --git a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java index 792f6a5577..2f5777d31e 100644 --- a/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java +++ b/dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TaskParametersUtils.java @@ -21,6 +21,7 @@ import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; import org.apache.dolphinscheduler.common.task.dependent.DependentParameters; +import org.apache.dolphinscheduler.common.task.dq.DataQualityParameters; import org.apache.dolphinscheduler.common.task.flink.FlinkParameters; import org.apache.dolphinscheduler.common.task.http.HttpParameters; import org.apache.dolphinscheduler.common.task.mr.MapReduceParameters; @@ -84,6 +85,8 @@ public class TaskParametersUtils { return JSONUtils.parseObject(parameter, ConditionsParameters.class); case "SQOOP": return JSONUtils.parseObject(parameter, SqoopParameters.class); + case "DATA_QUALITY": + return JSONUtils.parseObject(parameter, DataQualityParameters.class); case "SWITCH": return JSONUtils.parseObject(parameter, SwitchParameters.class); case "PIGEON": diff --git a/dolphinscheduler-common/src/main/resources/common.properties b/dolphinscheduler-common/src/main/resources/common.properties index 0085ec7e8c..427b3bd116 100644 --- a/dolphinscheduler-common/src/main/resources/common.properties +++ b/dolphinscheduler-common/src/main/resources/common.properties @@ -75,6 +75,13 @@ datasource.encryption.enable=false # datasource encryption salt datasource.encryption.salt=!@#$%^&* +# data quality option +data-quality.jar.name=/worker-server/libs/dolphinscheduler-data-quality-2.0.4-SNAPSHOT.jar + +#data-quality.error.output.path=/tmp/data-quality-error-data + +# Network IP gets priority, default inner outer + # Whether hive SQL is executed in the same session support.hive.oneSession=false diff --git a/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataQualityParameterTest.java b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataQualityParameterTest.java new file mode 100644 index 0000000000..f402209add --- /dev/null +++ b/dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/task/DataQualityParameterTest.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.common.task; + +import org.apache.dolphinscheduler.common.task.dq.DataQualityParameters; +import org.apache.dolphinscheduler.common.task.spark.SparkParameters; +import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.spi.params.base.TriggerType; +import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.input.InputParamProps; +import org.apache.dolphinscheduler.spi.params.select.SelectParam; +import org.apache.dolphinscheduler.spi.params.select.SelectParamProps; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * DataQualityParameterTest + */ +public class DataQualityParameterTest { + + private DataQualityParameters dataQualityParameters = null; + + @Before + public void before() { + dataQualityParameters = new DataQualityParameters(); + dataQualityParameters.setRuleId(1); + dataQualityParameters.setSparkParameters(new SparkParameters()); + } + + @Test + public void testCheckParameterNormal() { + + Map inputParameterValue = new HashMap<>(); + inputParameterValue.put("src_connector_type","JDBC"); + inputParameterValue.put("src_datasource_id","1"); + inputParameterValue.put("src_table","test1"); + inputParameterValue.put("src_filter","date=2012-10-05"); + inputParameterValue.put("src_field","id"); + + inputParameterValue.put("rule_type","1"); + inputParameterValue.put("process_definition_id","1"); + inputParameterValue.put("task_instance_id","1"); + inputParameterValue.put("check_type","1"); + inputParameterValue.put("threshold","1000"); + inputParameterValue.put("create_time","2012-10-05"); + inputParameterValue.put("update_time","2012-10-05"); + + dataQualityParameters.setRuleInputParameter(inputParameterValue); + + Assert.assertTrue(dataQualityParameters.checkParameters()); + } + + @Test + public void testRuleInputParameter() { + String formCreateJson = "[{\"field\":\"src_connector_type\",\"name\":\"源数据类型\"," + + "\"props\":{\"disabled\":false,\"multiple\":false,\"size\":\"small\"}," + + "\"type\":\"select\",\"title\":\"源数据类型\",\"value\":\"JDBC\"," + + "\"options\":[{\"label\":\"HIVE\",\"value\":\"HIVE\",\"disabled\":false}," + + "{\"label\":\"JDBC\",\"value\":\"JDBC\",\"disabled\":false}]}," + + "{\"props\":{\"disabled\":false,\"rows\":0,\"placeholder\":\"Please enter source table name\"," + + "\"size\":\"small\"},\"field\":\"src_table\",\"name\":\"源数据表\"," + + "\"type\":\"input\",\"title\":\"源数据表\",\"validate\":[{\"required\":true,\"type\":\"string\"," + + "\"trigger\":\"blur\"}]}]"; + + List pluginParamsList = new ArrayList<>(); + SelectParamProps selectParamProps = new SelectParamProps(); + selectParamProps.setMultiple(false); + selectParamProps.setDisabled(false); + selectParamProps.setSize("small"); + + SelectParam srcConnectorType = SelectParam.newBuilder("src_connector_type","源数据类型") + .setProps(selectParamProps) + .addOptions(new ParamsOptions("HIVE","HIVE",false)) + .addOptions(new ParamsOptions("JDBC","JDBC",false)) + .setValue("JDBC") + .build(); + + InputParamProps inputParamProps = new InputParamProps(); + inputParamProps.setPlaceholder("Please enter source table name"); + inputParamProps.setDisabled(false); + inputParamProps.setSize("small"); + inputParamProps.setRows(0); + + InputParam srcTable = InputParam.newBuilder("src_table","源数据表") + .setProps(inputParamProps) + .addValidate(Validate.newBuilder().setType("string").setRequired(true).setTrigger(TriggerType.BLUR.getTriggerType()).build()) + .build(); + + pluginParamsList.add(srcConnectorType); + pluginParamsList.add(srcTable); + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + String result = null; + + try { + result = mapper.writeValueAsString(pluginParamsList); + } catch (JsonProcessingException e) { + Assert.fail(); + } + + Assert.assertEquals(formCreateJson,result); + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java new file mode 100644 index 0000000000..396d3e3085 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqComparisonType.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonFormat; + +@TableName("t_ds_dq_comparison_type") +public class DqComparisonType implements Serializable { + /** + * primary key + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + /** + * type + */ + @TableField(value = "type") + private String type; + /** + * execute sql + */ + @TableField(value = "execute_sql") + private String executeSql; + /** + * output table + */ + @TableField(value = "output_table") + private String outputTable; + /** + * comparison name + */ + @TableField(value = "name") + private String name; + /** + * is inner source + */ + @TableField(value = "is_inner_source") + private Boolean isInnerSource; + /** + * create_time + */ + @TableField(value = "create_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update_time + */ + @TableField(value = "update_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getExecuteSql() { + return executeSql; + } + + public void setExecuteSql(String executeSql) { + this.executeSql = executeSql; + } + + public String getOutputTable() { + return outputTable; + } + + public void setOutputTable(String outputTable) { + this.outputTable = outputTable; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Boolean getInnerSource() { + return isInnerSource; + } + + public void setInnerSource(Boolean innerSource) { + isInnerSource = innerSource; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DqComparisonType{" + + "id=" + id + + ", type='" + type + '\'' + + ", executeSql='" + executeSql + '\'' + + ", outputTable='" + outputTable + '\'' + + ", name='" + name + '\'' + + ", isInnerSource='" + isInnerSource + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java new file mode 100644 index 0000000000..4ee694701e --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResult.java @@ -0,0 +1,389 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonFormat; + +@TableName("t_ds_dq_execute_result") +public class DqExecuteResult implements Serializable { + /** + * primary key + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + /** + * process defined id + */ + @TableField(value = "process_definition_id") + private long processDefinitionId; + /** + * process definition name + */ + @TableField(exist = false) + private String processDefinitionName; + /** + * process definition code + */ + @TableField(exist = false) + private long processDefinitionCode; + /** + * process instance id + */ + @TableField(value = "process_instance_id") + private long processInstanceId; + /** + * process instance name + */ + @TableField(exist = false) + private String processInstanceName; + /** + * project code + */ + @TableField(exist = false) + private long projectCode; + /** + * task instance id + */ + @TableField(value = "task_instance_id") + private long taskInstanceId; + /** + * task name + */ + @TableField(exist = false) + private String taskName; + /** + * rule type + */ + @TableField(value = "rule_type") + private int ruleType; + /** + * rule name + */ + @TableField(value = "rule_name") + private String ruleName; + /** + * statistics value + */ + @TableField(value = "statistics_value") + private double statisticsValue; + /** + * comparison value + */ + @TableField(value = "comparison_value") + private double comparisonValue; + /** + * comparison type + */ + @TableField(value = "comparison_type") + private int comparisonType; + /** + * comparison type name + */ + @TableField(exist = false) + private String comparisonTypeName; + /** + * check type + */ + @TableField(value = "check_type") + private int checkType; + /** + * threshold + */ + @TableField(value = "threshold") + private double threshold; + /** + * operator + */ + @TableField(value = "operator") + private int operator; + /** + * failure strategy + */ + @TableField(value = "failure_strategy") + private int failureStrategy; + /** + * user id + */ + @TableField(value = "user_id") + private int userId; + /** + * user name + */ + @TableField(exist = false) + private String userName; + /** + * state + */ + @TableField(value = "state") + private int state; + /** + * error output path + */ + @TableField(value = "error_output_path") + private String errorOutputPath; + /** + * create_time + */ + @TableField(value = "create_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update_time + */ + @TableField(value = "update_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public long getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(long processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + public long getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(long taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public long getProcessInstanceId() { + return processInstanceId; + } + + public void setProcessInstanceId(long processInstanceId) { + this.processInstanceId = processInstanceId; + } + + public String getProcessInstanceName() { + return processInstanceName; + } + + public void setProcessInstanceName(String processInstanceName) { + this.processInstanceName = processInstanceName; + } + + public long getProjectCode() { + return projectCode; + } + + public void setProjectCode(long projectCode) { + this.projectCode = projectCode; + } + + public String getRuleName() { + return ruleName; + } + + public void setRuleName(String ruleName) { + this.ruleName = ruleName; + } + + public double getStatisticsValue() { + return statisticsValue; + } + + public void setStatisticsValue(double statisticsValue) { + this.statisticsValue = statisticsValue; + } + + public double getComparisonValue() { + return comparisonValue; + } + + public void setComparisonValue(double comparisonValue) { + this.comparisonValue = comparisonValue; + } + + public double getThreshold() { + return threshold; + } + + public void setThreshold(double threshold) { + this.threshold = threshold; + } + + public int getOperator() { + return operator; + } + + public void setOperator(int operator) { + this.operator = operator; + } + + public int getFailureStrategy() { + return failureStrategy; + } + + public void setFailureStrategy(int failureStrategy) { + this.failureStrategy = failureStrategy; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public int getRuleType() { + return ruleType; + } + + public void setRuleType(int ruleType) { + this.ruleType = ruleType; + } + + public int getCheckType() { + return checkType; + } + + public void setCheckType(int checkType) { + this.checkType = checkType; + } + + public int getState() { + return state; + } + + public void setState(int state) { + this.state = state; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getProcessDefinitionName() { + return processDefinitionName; + } + + public void setProcessDefinitionName(String processDefinitionName) { + this.processDefinitionName = processDefinitionName; + } + + public long getProcessDefinitionCode() { + return processDefinitionCode; + } + + public void setProcessDefinitionCode(long processDefinitionCode) { + this.processDefinitionCode = processDefinitionCode; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public int getComparisonType() { + return comparisonType; + } + + public void setComparisonType(int comparisonType) { + this.comparisonType = comparisonType; + } + + public String getComparisonTypeName() { + return comparisonTypeName; + } + + public void setComparisonTypeName(String comparisonTypeName) { + this.comparisonTypeName = comparisonTypeName; + } + + public String getErrorOutputPath() { + return errorOutputPath; + } + + public void setErrorOutputPath(String errorOutputPath) { + this.errorOutputPath = errorOutputPath; + } + + @Override + public String toString() { + return "DqExecuteResult{" + + "id=" + id + + ", processDefinitionId=" + processDefinitionId + + ", processDefinitionName='" + processDefinitionName + '\'' + + ", processDefinitionCode='" + processDefinitionCode + '\'' + + ", processInstanceId=" + processInstanceId + + ", processInstanceName='" + processInstanceName + '\'' + + ", projectCode='" + projectCode + '\'' + + ", taskInstanceId=" + taskInstanceId + + ", taskName='" + taskName + '\'' + + ", ruleType=" + ruleType + + ", ruleName='" + ruleName + '\'' + + ", statisticsValue=" + statisticsValue + + ", comparisonValue=" + comparisonValue + + ", comparisonType=" + comparisonType + + ", comparisonTypeName=" + comparisonTypeName + + ", checkType=" + checkType + + ", threshold=" + threshold + + ", operator=" + operator + + ", failureStrategy=" + failureStrategy + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", state=" + state + + ", errorOutputPath=" + errorOutputPath + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java new file mode 100644 index 0000000000..cd7f24b081 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqExecuteResultAlertContent.java @@ -0,0 +1,257 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import java.io.Serializable; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class DqExecuteResultAlertContent implements Serializable { + + /** + * process_defined_id + */ + @JsonProperty(value = "processDefinitionId") + private long processDefinitionId; + /** + * process define name + */ + @JsonProperty("processDefinitionName") + private String processDefinitionName; + /** + * process_instance_id + */ + @JsonProperty(value = "processInstanceId") + private long processInstanceId; + /** + * process instance name + */ + @JsonProperty("processInstanceName") + private String processInstanceName; + /** + * task_instance_id + */ + @JsonProperty(value = "taskInstanceId") + private long taskInstanceId; + /** + * task name + */ + @JsonProperty("taskName") + private String taskName; + /** + * rule_type + */ + @JsonProperty(value = "ruleType") + private int ruleType; + /** + * rule_name + */ + @JsonProperty(value = "ruleName") + private String ruleName; + /** + * statistics_value + */ + @JsonProperty(value = "statisticsValue") + private double statisticsValue; + /** + * comparison_value + */ + @JsonProperty(value = "comparisonValue") + private double comparisonValue; + /** + * check_type + */ + @JsonProperty(value = "checkType") + private int checkType; + /** + * task_instance_id + */ + @JsonProperty(value = "threshold") + private double threshold; + /** + * operator + */ + @JsonProperty(value = "operator") + private int operator; + /** + * operator + */ + @JsonProperty(value = "failureStrategy") + private int failureStrategy; + /** + * user id + */ + @JsonProperty(value = "userId") + private int userId; + /** + * user_name + */ + @JsonProperty("userName") + private String userName; + /** + * state + */ + @JsonProperty(value = "state") + private int state; + + @JsonProperty(value = "errorDataPath") + private String errorDataPath; + + public DqExecuteResultAlertContent(Builder builder) { + this.processDefinitionId = builder.processDefinitionId; + this.processDefinitionName = builder.processDefinitionName; + this.processInstanceId = builder.processInstanceId; + this.processInstanceName = builder.processInstanceName; + this.taskInstanceId = builder.taskInstanceId; + this.taskName = builder.taskName; + this.ruleType = builder.ruleType; + this.ruleName = builder.ruleName; + this.statisticsValue = builder.statisticsValue; + this.comparisonValue = builder.comparisonValue; + this.checkType = builder.checkType; + this.threshold = builder.threshold; + this.operator = builder.operator; + this.failureStrategy = builder.failureStrategy; + this.userId = builder.userId; + this.userName = builder.userName; + this.state = builder.state; + this.errorDataPath = builder.errorDataPath; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static class Builder { + private long processDefinitionId; + private String processDefinitionName; + private long processInstanceId; + private String processInstanceName; + private long taskInstanceId; + private String taskName; + private int ruleType; + private String ruleName; + private double statisticsValue; + private double comparisonValue; + private int checkType; + private double threshold; + private int operator; + private int failureStrategy; + private int userId; + private String userName; + private int state; + private String errorDataPath; + + public Builder processDefinitionId(long processDefinitionId) { + this.processDefinitionId = processDefinitionId; + return this; + } + + public Builder processDefinitionName(String processDefinitionName) { + this.processDefinitionName = processDefinitionName; + return this; + } + + public Builder processInstanceId(long processInstanceId) { + this.processInstanceId = processInstanceId; + return this; + } + + public Builder processInstanceName(String processInstanceName) { + this.processInstanceName = processInstanceName; + return this; + } + + public Builder taskInstanceId(long taskInstanceId) { + this.taskInstanceId = taskInstanceId; + return this; + } + + public Builder taskName(String taskName) { + this.taskName = taskName; + return this; + } + + public Builder ruleType(int ruleType) { + this.ruleType = ruleType; + return this; + } + + public Builder ruleName(String ruleName) { + this.ruleName = ruleName; + return this; + } + + public Builder statisticsValue(double statisticsValue) { + this.statisticsValue = statisticsValue; + return this; + } + + public Builder comparisonValue(double comparisonValue) { + this.comparisonValue = comparisonValue; + return this; + } + + public Builder checkType(int checkType) { + this.checkType = checkType; + return this; + } + + public Builder threshold(double threshold) { + this.threshold = threshold; + return this; + } + + public Builder operator(int operator) { + this.operator = operator; + return this; + } + + public Builder failureStrategy(int failureStrategy) { + this.failureStrategy = failureStrategy; + return this; + } + + public Builder userId(int userId) { + this.userId = userId; + return this; + } + + public Builder userName(String userName) { + this.userName = userName; + return this; + } + + public Builder state(int state) { + this.state = state; + return this; + } + + public Builder errorDataPath(String errorDataPath) { + this.errorDataPath = errorDataPath; + return this; + } + + public DqExecuteResultAlertContent build() { + return new DqExecuteResultAlertContent(this); + } + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java new file mode 100644 index 0000000000..bb87db257c --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRule.java @@ -0,0 +1,147 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; + +@TableName("t_ds_dq_rule") +public class DqRule implements Serializable { + /** + * primary key + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + /** + * name + */ + @TableField(value = "name") + private String name; + /** + * type + */ + @TableField(value = "type") + private int type; + /** + * type + */ + @TableField(exist = false) + private String ruleJson; + /** + * user_id + */ + @TableField(value = "user_id") + private int userId; + /** + * user_name + */ + @TableField(exist = false) + private String userName; + /** + * create_time + */ + @TableField(value = "create_time") + private Date createTime; + /** + * update_time + */ + @TableField(value = "update_time") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public int getType() { + return type; + } + + public void setType(int type) { + this.type = type; + } + + public String getRuleJson() { + return ruleJson; + } + + public void setRuleJson(String ruleJson) { + this.ruleJson = ruleJson; + } + + public int getUserId() { + return userId; + } + + public void setUserId(int userId) { + this.userId = userId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + @Override + public String toString() { + return "DqRule{" + + "id=" + id + + ", name='" + name + '\'' + + ", type=" + type + + ", userId=" + userId + + ", userName='" + userName + '\'' + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java new file mode 100644 index 0000000000..4ce1d4adcb --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleExecuteSql.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonFormat; + +/** + * RuleExecuteSql + */ +@TableName("t_ds_dq_rule_execute_sql") +public class DqRuleExecuteSql implements Serializable { + /** + * primary key + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + /** + * index,ensure the execution order of sql + */ + @TableField(value = "index") + private int index; + /** + * SQL Statement + */ + @TableField(value = "sql") + private String sql; + /** + * table alias name + */ + @TableField(value = "table_alias") + private String tableAlias; + /** + * input entry type: default,statistics,comparison,check + */ + @TableField(value = "type") + private int type = ExecuteSqlType.MIDDLE.getCode(); + /** + * is error output sql + */ + @TableField(value = "is_error_output_sql") + private boolean isErrorOutputSql; + /** + * create_time + */ + @TableField(value = "create_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update_time + */ + @TableField(value = "update_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getIndex() { + return index; + } + + public void setIndex(int index) { + this.index = index; + } + + public String getSql() { + return sql; + } + + public void setSql(String sql) { + this.sql = sql; + } + + public String getTableAlias() { + return tableAlias; + } + + public void setTableAlias(String tableAlias) { + this.tableAlias = tableAlias; + } + + public int getType() { + return type; + } + + public void setType(int type) { + this.type = type; + } + + public boolean isErrorOutputSql() { + return isErrorOutputSql; + } + + public void setErrorOutputSql(boolean errorOutputSql) { + isErrorOutputSql = errorOutputSql; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DqRuleExecuteSql{" + + "id=" + id + + ", index=" + index + + ", sql='" + sql + '\'' + + ", tableAlias='" + tableAlias + '\'' + + ", type=" + type + + ", isErrorOutputSql=" + isErrorOutputSql + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java new file mode 100644 index 0000000000..d27e6fa734 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqRuleInputEntry.java @@ -0,0 +1,300 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.spi.task.dq.enums.InputType; +import org.apache.dolphinscheduler.spi.task.dq.enums.OptionSourceType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ValueType; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonFormat; + +/** + * RuleInputEntry + */ +@TableName("t_ds_dq_rule_input_entry") +public class DqRuleInputEntry implements Serializable { + /** + * primary key + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + /** + * form field name + */ + @TableField(value = "field") + private String field; + /** + * form type + */ + @TableField(value = "type") + private String type; + /** + * form title + */ + @TableField(value = "title") + private String title; + /** + * default value,can be null + */ + @TableField(value = "value") + private String value; + /** + * default options,can be null + * [{label:"",value:""}] + */ + @TableField(value = "options") + private String options; + /** + * ${field} + */ + @TableField(value = "placeholder") + private String placeholder; + /** + * the source type of options,use default options or other + */ + @TableField(value = "option_source_type") + private int optionSourceType = OptionSourceType.DEFAULT.getCode(); + /** + * input entry type: string,array,number .etc + */ + @TableField(value = "value_type") + private int valueType = ValueType.NUMBER.getCode(); + /** + * input entry type: default,statistics,comparison + */ + @TableField(value = "input_type") + private int inputType = InputType.DEFAULT.getCode(); + /** + * whether to display on the front end + */ + @TableField(value = "is_show") + private Boolean isShow; + /** + * whether to edit on the front end + */ + @TableField(value = "can_edit") + private Boolean canEdit; + /** + * is emit event + */ + @TableField(value = "is_emit") + private Boolean isEmit; + /** + * is validate + */ + @TableField(value = "is_validate") + private Boolean isValidate; + /** + * values map + */ + @TableField(exist = false) + private String valuesMap; + + /** + * values map + */ + @TableField(exist = false) + private Integer index; + /** + * create_time + */ + @TableField(value = "create_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update_time + */ + @TableField(value = "update_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getOptions() { + return options; + } + + public void setOptions(String options) { + this.options = options; + } + + public String getPlaceholder() { + return placeholder; + } + + public void setPlaceholder(String placeholder) { + this.placeholder = placeholder; + } + + public int getOptionSourceType() { + return optionSourceType; + } + + public void setOptionSourceType(int optionSourceType) { + this.optionSourceType = optionSourceType; + } + + public int getValueType() { + return valueType; + } + + public void setValueType(int valueType) { + this.valueType = valueType; + } + + public int getInputType() { + return inputType; + } + + public void setInputType(int inputType) { + this.inputType = inputType; + } + + public Boolean getShow() { + return isShow; + } + + public void setShow(Boolean show) { + isShow = show; + } + + public Boolean getCanEdit() { + return canEdit; + } + + public void setCanEdit(Boolean canEdit) { + this.canEdit = canEdit; + } + + public Boolean getEmit() { + return isEmit; + } + + public void setEmit(Boolean emit) { + isEmit = emit; + } + + public Boolean getValidate() { + return isValidate; + } + + public void setValidate(Boolean validate) { + isValidate = validate; + } + + public String getValuesMap() { + return valuesMap; + } + + public void setValuesMap(String valuesMap) { + this.valuesMap = valuesMap; + } + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DqRuleInputEntry{" + + "id=" + id + + ", field='" + field + '\'' + + ", type=" + type + + ", title='" + title + '\'' + + ", value='" + value + '\'' + + ", options='" + options + '\'' + + ", placeholder='" + placeholder + '\'' + + ", optionSourceType=" + optionSourceType + + ", valueType=" + valueType + + ", inputType=" + inputType + + ", isShow=" + isShow + + ", canEdit=" + canEdit + + ", isEmit=" + isEmit + + ", isValidate=" + isValidate + + ", valuesMap='" + valuesMap + '\'' + + ", index=" + index + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} \ No newline at end of file diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java new file mode 100644 index 0000000000..924aec860c --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/DqTaskStatisticsValue.java @@ -0,0 +1,222 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import java.io.Serializable; +import java.util.Date; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonFormat; + +@TableName("t_ds_dq_task_statistics_value") +public class DqTaskStatisticsValue implements Serializable { + /** + * primary key + */ + @TableId(value = "id", type = IdType.AUTO) + private int id; + /** + * process defined id + */ + @TableField(value = "process_definition_id") + private long processDefinitionId; + /** + * process definition name + */ + @TableField(exist = false) + private String processDefinitionName; + /** + * task instance id + */ + @TableField(value = "task_instance_id") + private long taskInstanceId; + /** + * task name + */ + @TableField(exist = false) + private String taskName; + /** + * rule id + */ + @TableField(value = "rule_id") + private long ruleId; + /** + * rule type + */ + @TableField(exist = false) + private int ruleType; + /** + * rule name + */ + @TableField(exist = false) + private String ruleName; + /** + * statistics value + */ + @TableField(value = "statistics_value") + private double statisticsValue; + /** + * comparison value + */ + @TableField(value = "statistics_name") + private String statisticsName; + /** + * data time + */ + @TableField(value = "data_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date dataTime; + /** + * create time + */ + @TableField(value = "create_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update time + */ + @TableField(value = "update_time") + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public long getProcessDefinitionId() { + return processDefinitionId; + } + + public void setProcessDefinitionId(long processDefinitionId) { + this.processDefinitionId = processDefinitionId; + } + + public String getProcessDefinitionName() { + return processDefinitionName; + } + + public void setProcessDefinitionName(String processDefinitionName) { + this.processDefinitionName = processDefinitionName; + } + + public long getTaskInstanceId() { + return taskInstanceId; + } + + public void setTaskInstanceId(long taskInstanceId) { + this.taskInstanceId = taskInstanceId; + } + + public String getTaskName() { + return taskName; + } + + public void setTaskName(String taskName) { + this.taskName = taskName; + } + + public long getRuleId() { + return ruleId; + } + + public void setRuleId(long ruleId) { + this.ruleId = ruleId; + } + + public int getRuleType() { + return ruleType; + } + + public void setRuleType(int ruleType) { + this.ruleType = ruleType; + } + + public String getRuleName() { + return ruleName; + } + + public void setRuleName(String ruleName) { + this.ruleName = ruleName; + } + + public double getStatisticsValue() { + return statisticsValue; + } + + public void setStatisticsValue(double statisticsValue) { + this.statisticsValue = statisticsValue; + } + + public String getStatisticsName() { + return statisticsName; + } + + public void setStatisticsName(String statisticsName) { + this.statisticsName = statisticsName; + } + + public Date getDataTime() { + return dataTime; + } + + public void setDataTime(Date dataTime) { + this.dataTime = dataTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DqTaskStatisticsValue{" + + "id=" + id + + ", processDefinitionId=" + processDefinitionId + + ", processDefinitionName='" + processDefinitionName + '\'' + + ", taskInstanceId=" + taskInstanceId + + ", taskName='" + taskName + '\'' + + ", ruleId=" + ruleId + + ", ruleType=" + ruleType + + ", ruleName='" + ruleName + '\'' + + ", statisticsValue=" + statisticsValue + + ", statisticsName='" + statisticsName + '\'' + + ", dataTime=" + dataTime + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskAlertContent.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskAlertContent.java new file mode 100644 index 0000000000..ab37bf2564 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskAlertContent.java @@ -0,0 +1,156 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.entity; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; + +import java.io.Serializable; +import java.util.Date; + +import com.fasterxml.jackson.annotation.JsonFormat; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonInclude(Include.NON_NULL) +public class TaskAlertContent implements Serializable { + @JsonProperty("taskInstanceId") + private int taskInstanceId; + @JsonProperty("taskName") + private String taskName; + @JsonProperty("taskType") + private String taskType; + @JsonProperty("processDefinitionId") + private int processDefinitionId; + @JsonProperty("processDefinitionName") + private String processDefinitionName; + @JsonProperty("processInstanceId") + private int processInstanceId; + @JsonProperty("processInstanceName") + private String processInstanceName; + @JsonProperty("state") + private ExecutionStatus state; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @JsonProperty("startTime") + private Date startTime; + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + @JsonProperty("endTime") + private Date endTime; + @JsonProperty("host") + private String host; + @JsonProperty("logPath") + private String logPath; + + private TaskAlertContent(Builder builder) { + this.taskInstanceId = builder.taskInstanceId; + this.taskName = builder.taskName; + this.taskType = builder.taskType; + this.processDefinitionId = builder.processDefinitionId; + this.processDefinitionName = builder.processDefinitionName; + this.processInstanceId = builder.processInstanceId; + this.processInstanceName = builder.processInstanceName; + this.state = builder.state; + this.startTime = builder.startTime; + this.endTime = builder.endTime; + this.host = builder.host; + this.logPath = builder.logPath; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static class Builder { + private int taskInstanceId; + private String taskName; + private String taskType; + private int processDefinitionId; + private String processDefinitionName; + private int processInstanceId; + private String processInstanceName; + private ExecutionStatus state; + private Date startTime; + private Date endTime; + private String host; + private String logPath; + + public Builder taskInstanceId(int taskInstanceId) { + this.taskInstanceId = taskInstanceId; + return this; + } + + public Builder taskName(String taskName) { + this.taskName = taskName; + return this; + } + + public Builder taskType(String taskType) { + this.taskType = taskType; + return this; + } + + public Builder processDefinitionId(int processDefinitionId) { + this.processDefinitionId = processDefinitionId; + return this; + } + + public Builder processDefinitionName(String processDefinitionName) { + this.processDefinitionName = processDefinitionName; + return this; + } + + public Builder processInstanceId(int processInstanceId) { + this.processInstanceId = processInstanceId; + return this; + } + + public Builder processInstanceName(String processInstanceName) { + this.processInstanceName = processInstanceName; + return this; + } + + public Builder state(ExecutionStatus state) { + this.state = state; + return this; + } + + public Builder startTime(Date startTime) { + this.startTime = startTime; + return this; + } + + public Builder endTime(Date endTime) { + this.endTime = endTime; + return this; + } + + public Builder host(String host) { + this.host = host; + return this; + } + + public Builder logPath(String logPath) { + this.logPath = logPath; + return this; + } + + public TaskAlertContent build() { + return new TaskAlertContent(this); + } + } +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java new file mode 100644 index 0000000000..946a86e4f2 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DqComparisonType; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * DqComparisonTypeMapper + */ +public interface DqComparisonTypeMapper extends BaseMapper { + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java new file mode 100644 index 0000000000..39fc26e57a --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; + +import org.apache.ibatis.annotations.Param; + +import java.util.Date; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; + +/** + * DqExecuteResultMapper + */ +public interface DqExecuteResultMapper extends BaseMapper { + + /** + * data quality task execute result page + * + * @param page page + * @param searchVal searchVal + * @param userId userId + * @param statusArray states + * @param ruleType ruleType + * @param startTime startTime + * @return endTime endTime + */ + IPage queryResultListPaging(IPage page, + @Param("searchVal") String searchVal, + @Param("userId") int userId, + @Param("states") int[] statusArray, + @Param("ruleType") int ruleType, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime); + + /** + * get execute result by id + * @param taskInstanceId taskInstanceId + * @return DqExecuteResult + */ + DqExecuteResult getExecuteResultById(@Param("taskInstanceId") int taskInstanceId); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java new file mode 100644 index 0000000000..7179f2e503 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleExecuteSqlMapper.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * DqRuleExecuteSqlMapper + */ +public interface DqRuleExecuteSqlMapper extends BaseMapper { + + /** + * get execute sql list by rule id + * + * @param ruleId Integer + */ + List getExecuteSqlList(@Param("ruleId") Integer ruleId); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java new file mode 100644 index 0000000000..7d8c032bb5 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * DqRuleInputEntryMapper + */ +public interface DqRuleInputEntryMapper extends BaseMapper { + + /** + * get rule input entry list by rule id + * + * @param ruleId Integer + */ + List getRuleInputEntryList(@Param("ruleId") Integer ruleId); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java new file mode 100644 index 0000000000..d5ce113925 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DqRule; + +import org.apache.ibatis.annotations.Param; + +import java.util.Date; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; +import com.baomidou.mybatisplus.core.metadata.IPage; + +/** + * DqRuleMapper + */ +public interface DqRuleMapper extends BaseMapper { + + /** + * data quality rule page + * + * @param page page + * @param searchVal searchVal + * @param ruleType ruleType + * @param startTime startTime + * @return endTime endTime + */ + IPage queryRuleListPaging(IPage page, + @Param("searchVal") String searchVal, + @Param("ruleType") int ruleType, + @Param("startTime") Date startTime, + @Param("endTime") Date endTime); +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java new file mode 100644 index 0000000000..3c7f35f019 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.mapper; + +import org.apache.dolphinscheduler.dao.entity.DqTaskStatisticsValue; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; + +/** + * DqTaskStatisticsValueMapper + */ +public interface DqTaskStatisticsValueMapper extends BaseMapper { + +} diff --git a/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java new file mode 100644 index 0000000000..4f6b1a43f9 --- /dev/null +++ b/dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/utils/DqRuleUtils.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.dao.utils; + +import org.apache.dolphinscheduler.common.utils.JSONUtils; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; + +import java.util.List; +import java.util.Map; + +/** + * DqRuleUtils + */ +public class DqRuleUtils { + + private DqRuleUtils() { + throw new IllegalStateException("Utility class"); + } + + public static List transformInputEntry(List ruleInputEntryList) { + for (DqRuleInputEntry dqRuleInputEntry : ruleInputEntryList) { + Map valuesMap = JSONUtils.toMap(dqRuleInputEntry.getValuesMap(),String.class,Object.class); + if (valuesMap != null) { + + if (valuesMap.get(dqRuleInputEntry.getField()) != null) { + String value = String.valueOf(valuesMap.get(dqRuleInputEntry.getField())); + dqRuleInputEntry.setValue(value); + } + + if (valuesMap.get("is_show") != null) { + dqRuleInputEntry.setShow(Boolean.parseBoolean(String.valueOf(valuesMap.get("is_show")))); + } + + if (valuesMap.get("can_edit") != null) { + dqRuleInputEntry.setCanEdit(Boolean.parseBoolean(String.valueOf(valuesMap.get("can_edit")))); + } + } + } + + return ruleInputEntryList; + } +} diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml new file mode 100644 index 0000000000..fc6e87a7f4 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqComparisonTypeMapper.xml @@ -0,0 +1,22 @@ + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml new file mode 100644 index 0000000000..4fa09a3c4f --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteResultMapper.xml @@ -0,0 +1,105 @@ + + + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml new file mode 100644 index 0000000000..6424a8df25 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqExecuteSqlMapper.xml @@ -0,0 +1,27 @@ + + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml new file mode 100644 index 0000000000..9da5e76532 --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleInputEntryMapper.xml @@ -0,0 +1,43 @@ + + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml new file mode 100644 index 0000000000..1a1380d82c --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqRuleMapper.xml @@ -0,0 +1,37 @@ + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml new file mode 100644 index 0000000000..aa64be42ef --- /dev/null +++ b/dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DqTaskStatisticsValueMapper.xml @@ -0,0 +1,22 @@ + + + + + + + diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql index 3fa4d4b50e..45a452f643 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_h2.sql @@ -1012,6 +1012,784 @@ CREATE TABLE t_ds_alert_plugin_instance PRIMARY KEY (id) ); +-- +-- Table structure for table `t_ds_dq_comparison_type` +-- +DROP TABLE IF EXISTS `t_ds_dq_comparison_type`; +CREATE TABLE `t_ds_dq_comparison_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `type` varchar(100) NOT NULL, + `execute_sql` text DEFAULT NULL, + `output_table` varchar(100) DEFAULT NULL, + `name` varchar(100) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + `is_inner_source` tinyint(1) DEFAULT '0', + PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(2, 'DailyFluctuation', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(3, 'WeeklyFluctuation', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); + +-- +-- Table structure for table `t_ds_dq_execute_result` +-- +DROP TABLE IF EXISTS `t_ds_dq_execute_result`; +CREATE TABLE `t_ds_dq_execute_result` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `process_definition_id` int(11) DEFAULT NULL, + `process_instance_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_type` int(11) DEFAULT NULL, + `rule_name` varchar(255) DEFAULT NULL, + `statistics_value` double DEFAULT NULL, + `comparison_value` double DEFAULT NULL, + `check_type` int(11) DEFAULT NULL, + `threshold` double DEFAULT NULL, + `operator` int(11) DEFAULT NULL, + `failure_strategy` int(11) DEFAULT NULL, + `state` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `comparison_type` int(11) DEFAULT NULL, + `error_output_path` text DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_rule +-- +DROP TABLE IF EXISTS `t_ds_dq_rule`; +CREATE TABLE `t_ds_dq_rule` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(100) DEFAULT NULL, + `type` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); + +-- +-- Table structure for table `t_ds_dq_rule_execute_sql` +-- +DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`; +CREATE TABLE `t_ds_dq_rule_execute_sql` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `index` int(11) DEFAULT NULL, + `sql` text DEFAULT NULL, + `table_alias` varchar(255) DEFAULT NULL, + `type` int(11) DEFAULT NULL, + `is_error_output_sql` tinyint(1) DEFAULT '0', + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table `t_ds_dq_rule_input_entry` +-- +DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`; +CREATE TABLE `t_ds_dq_rule_input_entry` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `field` varchar(255) DEFAULT NULL, + `type` varchar(255) DEFAULT NULL, + `title` varchar(255) DEFAULT NULL, + `value` varchar(255) DEFAULT NULL, + `options` text DEFAULT NULL, + `placeholder` varchar(255) DEFAULT NULL, + `option_source_type` int(11) DEFAULT NULL, + `value_type` int(11) DEFAULT NULL, + `input_type` int(11) DEFAULT NULL, + `is_show` tinyint(1) DEFAULT '1', + `can_edit` tinyint(1) DEFAULT '1', + `is_emit` tinyint(1) DEFAULT '0', + `is_validate` tinyint(1) DEFAULT '1', + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); + +-- +-- Table structure for table `t_ds_dq_task_statistics_value` +-- +DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`; +CREATE TABLE `t_ds_dq_task_statistics_value` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `process_definition_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_id` int(11) NOT NULL, + `unique_code` varchar(255) NULL, + `statistics_name` varchar(255) NULL, + `statistics_value` double NULL, + `data_time` datetime DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table `t_ds_relation_rule_execute_sql` +-- +DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`; +CREATE TABLE `t_ds_relation_rule_execute_sql` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `rule_id` int(11) DEFAULT NULL, + `execute_sql_id` int(11) DEFAULT NULL, + `create_time` datetime NULL, + `update_time` datetime NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table `t_ds_relation_rule_input_entry` +-- +DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`; +CREATE TABLE `t_ds_relation_rule_input_entry` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `rule_id` int(11) DEFAULT NULL, + `rule_input_entry_id` int(11) DEFAULT NULL, + `values_map` text DEFAULT NULL, + `index` int(11) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); + -- -- Table structure for table t_ds_environment -- diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql index 0cbfca99c0..ca3724b989 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_mysql.sql @@ -996,6 +996,784 @@ CREATE TABLE `t_ds_alert_plugin_instance` ( PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; +-- +-- Table structure for table `t_ds_dq_comparison_type` +-- +DROP TABLE IF EXISTS `t_ds_dq_comparison_type`; +CREATE TABLE `t_ds_dq_comparison_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `type` varchar(100) NOT NULL, + `execute_sql` text DEFAULT NULL, + `output_table` varchar(100) DEFAULT NULL, + `name` varchar(100) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + `is_inner_source` tinyint(1) DEFAULT '0', + PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(2, 'DailyFluctuation', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(3, 'WeeklyFluctuation', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); + +-- +-- Table structure for table `t_ds_dq_execute_result` +-- +DROP TABLE IF EXISTS `t_ds_dq_execute_result`; +CREATE TABLE `t_ds_dq_execute_result` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `process_definition_id` int(11) DEFAULT NULL, + `process_instance_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_type` int(11) DEFAULT NULL, + `rule_name` varchar(255) DEFAULT NULL, + `statistics_value` double DEFAULT NULL, + `comparison_value` double DEFAULT NULL, + `check_type` int(11) DEFAULT NULL, + `threshold` double DEFAULT NULL, + `operator` int(11) DEFAULT NULL, + `failure_strategy` int(11) DEFAULT NULL, + `state` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `comparison_type` int(11) DEFAULT NULL, + `error_output_path` text DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_rule +-- +DROP TABLE IF EXISTS `t_ds_dq_rule`; +CREATE TABLE `t_ds_dq_rule` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(100) DEFAULT NULL, + `type` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); + +-- +-- Table structure for table `t_ds_dq_rule_execute_sql` +-- +DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`; +CREATE TABLE `t_ds_dq_rule_execute_sql` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `index` int(11) DEFAULT NULL, + `sql` text DEFAULT NULL, + `table_alias` varchar(255) DEFAULT NULL, + `type` int(11) DEFAULT NULL, + `is_error_output_sql` tinyint(1) DEFAULT '0', + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table `t_ds_dq_rule_input_entry` +-- +DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`; +CREATE TABLE `t_ds_dq_rule_input_entry` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `field` varchar(255) DEFAULT NULL, + `type` varchar(255) DEFAULT NULL, + `title` varchar(255) DEFAULT NULL, + `value` varchar(255) DEFAULT NULL, + `options` text DEFAULT NULL, + `placeholder` varchar(255) DEFAULT NULL, + `option_source_type` int(11) DEFAULT NULL, + `value_type` int(11) DEFAULT NULL, + `input_type` int(11) DEFAULT NULL, + `is_show` tinyint(1) DEFAULT '1', + `can_edit` tinyint(1) DEFAULT '1', + `is_emit` tinyint(1) DEFAULT '0', + `is_validate` tinyint(1) DEFAULT '1', + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); + +-- +-- Table structure for table `t_ds_dq_task_statistics_value` +-- +DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`; +CREATE TABLE `t_ds_dq_task_statistics_value` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `process_definition_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_id` int(11) NOT NULL, + `unique_code` varchar(255) NULL, + `statistics_name` varchar(255) NULL, + `statistics_value` double NULL, + `data_time` datetime DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table `t_ds_relation_rule_execute_sql` +-- +DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`; +CREATE TABLE `t_ds_relation_rule_execute_sql` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `rule_id` int(11) DEFAULT NULL, + `execute_sql_id` int(11) DEFAULT NULL, + `create_time` datetime NULL, + `update_time` datetime NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table `t_ds_relation_rule_input_entry` +-- +DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`; +CREATE TABLE `t_ds_relation_rule_input_entry` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `rule_id` int(11) DEFAULT NULL, + `rule_input_entry_id` int(11) DEFAULT NULL, + `values_map` text DEFAULT NULL, + `index` int(11) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); + -- ---------------------------- -- Table structure for t_ds_environment -- ---------------------------- diff --git a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql index ec132543a9..1e371d728e 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/dolphinscheduler_postgresql.sql @@ -983,14 +983,786 @@ CREATE TABLE t_ds_plugin_define ( DROP TABLE IF EXISTS t_ds_alert_plugin_instance; CREATE TABLE t_ds_alert_plugin_instance ( - id serial NOT NULL, - plugin_define_id int4 NOT NULL, - plugin_instance_params text NULL, - create_time timestamp NULL, - update_time timestamp NULL, - instance_name varchar(200) NULL, - CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id) + id serial NOT NULL, + plugin_define_id int4 NOT NULL, + plugin_instance_params text NULL, + create_time timestamp NULL, + update_time timestamp NULL, + instance_name varchar(200) NULL, + CONSTRAINT t_ds_alert_plugin_instance_pk PRIMARY KEY (id) +); + +-- +-- Table structure for table t_ds_dq_comparison_type +-- +DROP TABLE IF EXISTS t_ds_dq_comparison_type; +CREATE TABLE t_ds_dq_comparison_type ( + id serial NOT NULL, + "type" varchar NOT NULL, + execute_sql varchar NULL, + output_table varchar NULL, + "name" varchar NULL, + create_time timestamp NULL, + update_time timestamp NULL, + is_inner_source bool NULL, + CONSTRAINT t_ds_dq_comparison_type_pk PRIMARY KEY (id) +); +INSERT INTO t_ds_dq_comparison_type +(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) +VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO t_ds_dq_comparison_type +(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) +VALUES(2, 'DailyFluctuation', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO t_ds_dq_comparison_type +(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) +VALUES(3, 'WeeklyFluctuation', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO t_ds_dq_comparison_type +(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) +VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO t_ds_dq_comparison_type +(id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) +VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); + +-- +-- Table structure for table t_ds_dq_execute_result +-- +DROP TABLE IF EXISTS t_ds_dq_execute_result; +CREATE TABLE t_ds_dq_execute_result ( + id serial NOT NULL, + process_definition_id int4 NULL, + process_instance_id int4 NULL, + task_instance_id int4 NULL, + rule_type int4 NULL, + rule_name varchar(255) DEFAULT NULL, + statistics_value float8 NULL, + comparison_value float8 NULL, + check_type int4 NULL, + threshold float8 NULL, + "operator" int4 NULL, + failure_strategy int4 NULL, + state int4 NULL, + user_id int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + comparison_type int4 NULL, + error_output_path text NULL, + CONSTRAINT t_ds_dq_execute_result_pk PRIMARY KEY (id) +); + +-- +-- Table structure for table t_ds_dq_rule +-- +DROP TABLE IF EXISTS t_ds_dq_rule; +CREATE TABLE t_ds_dq_rule ( + id serial NOT NULL, + "name" varchar(100) DEFAULT NULL, + "type" int4 NULL, + user_id int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_dq_rule_pk PRIMARY KEY (id) +); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO t_ds_dq_rule +(id, "name", "type", user_id, create_time, update_time) +VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); + +-- +-- Table structure for table t_ds_dq_rule_execute_sql +-- +DROP TABLE IF EXISTS t_ds_dq_rule_execute_sql; +CREATE TABLE t_ds_dq_rule_execute_sql ( + id serial NOT NULL, + "index" int4 NULL, + "sql" text NULL, + table_alias varchar(255) DEFAULT NULL, + "type" int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + is_error_output_sql bool NULL, + CONSTRAINT t_ds_dq_rule_execute_sql_pk PRIMARY KEY (id) +); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_execute_sql +(id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) +VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table t_ds_dq_rule_input_entry +-- +DROP TABLE IF EXISTS t_ds_dq_rule_input_entry; +CREATE TABLE t_ds_dq_rule_input_entry ( + id serial NOT NULL, + field varchar(255) DEFAULT NULL, + "type" varchar(255) DEFAULT NULL, + title varchar(255) DEFAULT NULL, + value varchar(255) DEFAULT NULL, + "options" text DEFAULT NULL, + placeholder varchar(255) DEFAULT NULL, + option_source_type int4 NULL, + value_type int4 NULL, + input_type int4 NULL, + is_show int2 NULL DEFAULT '1'::smallint, + can_edit int2 NULL DEFAULT '1'::smallint, + is_emit int2 NULL DEFAULT '0'::smallint, + is_validate int2 NULL DEFAULT '0'::smallint, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_dq_rule_input_entry_pk PRIMARY KEY (id) +); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_dq_rule_input_entry +(id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) +VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table t_ds_dq_task_statistics_value +-- +DROP TABLE IF EXISTS t_ds_dq_task_statistics_value; +CREATE TABLE t_ds_dq_task_statistics_value ( + id serial NOT NULL, + process_definition_id int4 NOT NULL, + task_instance_id int4 NULL, + rule_id int4 NOT NULL, + unique_code varchar NOT NULL, + statistics_name varchar NULL, + statistics_value float8 NULL, + data_time timestamp(0) NULL, + create_time timestamp(0) NULL, + update_time timestamp(0) NULL, + CONSTRAINT t_ds_dq_task_statistics_value_pk PRIMARY KEY (id) +); + +-- +-- Table structure for table t_ds_relation_rule_execute_sql +-- +DROP TABLE IF EXISTS t_ds_relation_rule_execute_sql; +CREATE TABLE t_ds_relation_rule_execute_sql ( + id serial NOT NULL, + rule_id int4 NULL, + execute_sql_id int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_relation_rule_execute_sql_pk PRIMARY KEY (id) +); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_execute_sql +(id, rule_id, execute_sql_id, create_time, update_time) +VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +-- +-- Table structure for table t_ds_relation_rule_input_entry +-- +DROP TABLE IF EXISTS t_ds_relation_rule_input_entry; +CREATE TABLE t_ds_relation_rule_input_entry ( + id serial NOT NULL, + rule_id int4 NULL, + rule_input_entry_id int4 NULL, + values_map text NULL, + "index" int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_relation_rule_input_entry_pk PRIMARY KEY (id) ); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) +VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); -- -- Table structure for table t_ds_environment diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_ddl.sql index 22ae472a19..e12b696090 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_ddl.sql @@ -25,4 +25,146 @@ ALTER TABLE `t_ds_task_definition_log` ADD INDEX `idx_code_version` (`code`,`ver alter table t_ds_task_definition_log add `task_group_id` int(11) DEFAULT NULL COMMENT 'task group id' AFTER `resource_ids`; alter table t_ds_task_definition_log add `task_group_priority` int(11) DEFAULT NULL COMMENT 'task group id' AFTER `task_group_id`; alter table t_ds_task_definition add `task_group_id` int(11) DEFAULT NULL COMMENT 'task group id' AFTER `resource_ids`; -alter table t_ds_task_definition add `task_group_priority` int(11) DEFAULT '0' COMMENT 'task group id' AFTER `task_group_id`; \ No newline at end of file +alter table t_ds_task_definition add `task_group_priority` int(11) DEFAULT '0' COMMENT 'task group id' AFTER `task_group_id`; + +-- +-- Table structure for table `t_ds_dq_comparison_type` +-- +DROP TABLE IF EXISTS `t_ds_dq_comparison_type`; +CREATE TABLE `t_ds_dq_comparison_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `type` varchar(100) NOT NULL, + `execute_sql` text DEFAULT NULL, + `output_table` varchar(100) DEFAULT NULL, + `name` varchar(100) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + `is_inner_source` tinyint(1) DEFAULT '0', + PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_execute_result +-- +DROP TABLE IF EXISTS `t_ds_dq_execute_result`; +CREATE TABLE `t_ds_dq_execute_result` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `process_definition_id` int(11) DEFAULT NULL, + `process_instance_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_type` int(11) DEFAULT NULL, + `rule_name` varchar(255) DEFAULT NULL, + `statistics_value` double DEFAULT NULL, + `comparison_value` double DEFAULT NULL, + `check_type` int(11) DEFAULT NULL, + `threshold` double DEFAULT NULL, + `operator` int(11) DEFAULT NULL, + `failure_strategy` int(11) DEFAULT NULL, + `state` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `comparison_type` int(11) DEFAULT NULL, + `error_output_path` text DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_rule +-- +DROP TABLE IF EXISTS `t_ds_dq_rule`; +CREATE TABLE `t_ds_dq_rule` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(100) DEFAULT NULL, + `type` int(11) DEFAULT NULL, + `user_id` int(11) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_rule_execute_sql +-- +DROP TABLE IF EXISTS `t_ds_dq_rule_execute_sql`; +CREATE TABLE `t_ds_dq_rule_execute_sql` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `index` int(11) DEFAULT NULL, + `sql` text DEFAULT NULL, + `table_alias` varchar(255) DEFAULT NULL, + `type` int(11) DEFAULT NULL, + `is_error_output_sql` tinyint(1) DEFAULT '0', + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_rule_input_entry +-- +DROP TABLE IF EXISTS `t_ds_dq_rule_input_entry`; +CREATE TABLE `t_ds_dq_rule_input_entry` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `field` varchar(255) DEFAULT NULL, + `type` varchar(255) DEFAULT NULL, + `title` varchar(255) DEFAULT NULL, + `value` varchar(255) DEFAULT NULL, + `options` text DEFAULT NULL, + `placeholder` varchar(255) DEFAULT NULL, + `option_source_type` int(11) DEFAULT NULL, + `value_type` int(11) DEFAULT NULL, + `input_type` int(11) DEFAULT NULL, + `is_show` tinyint(1) DEFAULT '1', + `can_edit` tinyint(1) DEFAULT '1', + `is_emit` tinyint(1) DEFAULT '0', + `is_validate` tinyint(1) DEFAULT '1', + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_dq_task_statistics_value +-- +DROP TABLE IF EXISTS `t_ds_dq_task_statistics_value`; +CREATE TABLE `t_ds_dq_task_statistics_value` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `process_definition_id` int(11) DEFAULT NULL, + `task_instance_id` int(11) DEFAULT NULL, + `rule_id` int(11) NOT NULL, + `unique_code` varchar(255) NULL, + `statistics_name` varchar(255) NULL, + `statistics_value` double NULL, + `data_time` datetime DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_relation_rule_execute_sql +-- +DROP TABLE IF EXISTS `t_ds_relation_rule_execute_sql`; +CREATE TABLE `t_ds_relation_rule_execute_sql` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `rule_id` int(11) DEFAULT NULL, + `execute_sql_id` int(11) DEFAULT NULL, + `create_time` datetime NULL, + `update_time` datetime NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; + +-- +-- Table structure for table t_ds_relation_rule_input_entry +-- +DROP TABLE IF EXISTS `t_ds_relation_rule_input_entry`; +CREATE TABLE `t_ds_relation_rule_input_entry` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `rule_id` int(11) DEFAULT NULL, + `rule_input_entry_id` int(11) DEFAULT NULL, + `values_map` text DEFAULT NULL, + `index` int(11) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `update_time` datetime DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_dml.sql index 4a14f326b9..d6c145d0a9 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_dml.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/mysql/dolphinscheduler_dml.sql @@ -14,3 +14,639 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(1, 'FixValue', NULL, NULL, NULL, '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(2, 'DailyFluctuation', 'select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'day_range', 'day_range.day_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(3, 'WeeklyFluctuation', 'select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''', 'last_thirty_days', 'last_thirty_days.last_30_avg', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', true); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(7, 'SrcTableTotalRows', 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); +INSERT INTO `t_ds_dq_comparison_type` +(`id`, `type`, `execute_sql`, `output_table`, `name`, `create_time`, `update_time`, `is_inner_source`) +VALUES(8, 'TargetTableTotalRows', 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 'total_count.total', '2021-06-30 00:00:00.000', '2021-06-30 00:00:00.000', false); + +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(1, '$t(null_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(2, '$t(custom_sql)', 1, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(3, '$t(multi_table_accuracy)', 2, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(4, '$t(multi_table_value_comparison)', 3, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(5, '$t(field_length_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(6, '$t(uniqueness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(7, '$t(regexp_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(8, '$t(timeliness_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(9, '$t(enumeration_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); +INSERT INTO `t_ds_dq_rule` +(`id`, `name`, `type`, `user_id`, `create_time`, `update_time`) +VALUES(10, '$t(table_count_check)', 0, 1, '2020-01-12 00:00:00.000', '2020-01-12 00:00:00.000'); + +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(1, 1, 'SELECT COUNT(*) AS nulls FROM null_items', 'null_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(2, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(3, 1, 'SELECT COUNT(*) AS miss from miss_items', 'miss_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(4, 1, 'SELECT COUNT(*) AS valids FROM invalid_length_items', 'invalid_length_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(5, 1, 'SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})', 'total_count', 2, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(6, 1, 'SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1', 'duplicate_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(7, 1, 'SELECT COUNT(*) AS duplicates FROM duplicate_items', 'duplicate_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(8, 1, 'SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}', 'miss_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(9, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ', 'regexp_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(10, 1, 'SELECT COUNT(*) AS regexps FROM regexp_items', 'regexp_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(11, 1, 'SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ', 'timeliness_items', 0, 1, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(12, 1, 'SELECT COUNT(*) AS timeliness FROM timeliness_items', 'timeliness_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(13, 1, 'SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ', 'enum_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(14, 1, 'SELECT COUNT(*) AS enums FROM enum_items', 'enum_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(15, 1, 'SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})', 'table_count', 1, false, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(16, 1, 'SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})', 'null_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_execute_sql` +(`id`, `index`, `sql`, `table_alias`, `type`, `is_error_output_sql`, `create_time`, `update_time`) +VALUES(17, 1, 'SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})', 'invalid_length_items', 0, true, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(1, 'src_connector_type', 'select', '$t(src_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'please select source connector type', 2, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(2, 'src_datasource_id', 'select', '$t(src_datasource_id)', '', NULL, 'please select source datasource id', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(3, 'src_table', 'select', '$t(src_table)', NULL, NULL, 'Please enter source table name', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(4, 'src_filter', 'input', '$t(src_filter)', NULL, NULL, 'Please enter filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(5, 'src_field', 'select', '$t(src_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(6, 'statistics_name', 'input', '$t(statistics_name)', NULL, NULL, 'Please enter statistics name, the alias in statistics execute sql', 0, 0, 1, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(7, 'check_type', 'select', '$t(check_type)', '0', '[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]', 'please select check type', 0, 0, 3, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(8, 'operator', 'select', '$t(operator)', '0', '[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]', 'please select operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(9, 'threshold', 'input', '$t(threshold)', NULL, NULL, 'Please enter threshold, number is needed', 0, 2, 3, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(10, 'failure_strategy', 'select', '$t(failure_strategy)', '0', '[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]', 'please select failure strategy', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(11, 'target_connector_type', 'select', '$t(target_connector_type)', '', '[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]', 'Please select target connector type', 2, 0, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(12, 'target_datasource_id', 'select', '$t(target_datasource_id)', '', NULL, 'Please select target datasource', 1, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(13, 'target_table', 'select', '$t(target_table)', NULL, NULL, 'Please enter target table', 0, 0, 0, 1, 1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(14, 'target_filter', 'input', '$t(target_filter)', NULL, NULL, 'Please enter target filter expression', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(15, 'mapping_columns', 'group', '$t(mapping_columns)', NULL, '[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]', 'please enter mapping columns', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(16, 'statistics_execute_sql', 'textarea', '$t(statistics_execute_sql)', NULL, NULL, 'Please enter statistics execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(17, 'comparison_name', 'input', '$t(comparison_name)', NULL, NULL, 'Please enter comparison name, the alias in comparison execute sql', 0, 0, 0, 0, 0, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(18, 'comparison_execute_sql', 'textarea', '$t(comparison_execute_sql)', NULL, NULL, 'Please enter comparison execute sql', 0, 3, 0, 1, 1, 0, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(19, 'comparison_type', 'select', '$t(comparison_type)', '', NULL, 'Please enter comparison title', 3, 0, 2, 1, 0, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(20, 'writer_connector_type', 'select', '$t(writer_connector_type)', '', '[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]', 'please select writer connector type', 0, 2, 0, 1, 1, 1, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(21, 'writer_datasource_id', 'select', '$t(writer_datasource_id)', '', NULL, 'please select writer datasource id', 1, 2, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(22, 'target_field', 'select', '$t(target_field)', NULL, NULL, 'Please enter column, only single column is supported', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(23, 'field_length', 'input', '$t(field_length)', NULL, NULL, 'Please enter length limit', 0, 3, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(24, 'logic_operator', 'select', '$t(logic_operator)', '=', '[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]', 'please select logic operator', 0, 0, 3, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(25, 'regexp_pattern', 'input', '$t(regexp_pattern)', NULL, NULL, 'Please enter regexp pattern', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(26, 'deadline', 'input', '$t(deadline)', NULL, NULL, 'Please enter deadline', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(27, 'datetime_format', 'input', '$t(datetime_format)', NULL, NULL, 'Please enter datetime format', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(28, 'enum_list', 'input', '$t(enum_list)', NULL, NULL, 'Please enter enumeration', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_dq_rule_input_entry` +(`id`, `field`, `type`, `title`, `value`, `options`, `placeholder`, `option_source_type`, `value_type`, `input_type`, `is_show`, `can_edit`, `is_emit`, `is_validate`, `create_time`, `update_time`) +VALUES(29, 'begin_time', 'input', '$t(begin_time)', NULL, NULL, 'Please enter begin time', 0, 0, 0, 1, 1, 0, 0, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); + +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(1, 1, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(3, 5, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(2, 3, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(4, 3, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(5, 6, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(6, 6, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(7, 7, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(8, 7, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(9, 8, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(10, 8, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(11, 9, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(12, 9, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(13, 10, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(14, 1, 16, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_execute_sql` +(`id`, `rule_id`, `execute_sql_id`, `create_time`, `update_time`) +VALUES(15, 5, 17, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); + +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(1, 1, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(2, 1, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(3, 1, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(4, 1, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(5, 1, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(6, 1, 6, '{"statistics_name":"null_count.nulls"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(7, 1, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(8, 1, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(9, 1, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(10, 1, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(11, 1, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(12, 1, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(13, 2, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(14, 2, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(15, 2, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(16, 2, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(17, 2, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(18, 2, 4, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(19, 2, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(20, 2, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(21, 2, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(22, 2, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(24, 2, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(25, 3, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(26, 3, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(27, 3, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(28, 3, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(29, 3, 11, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(30, 3, 12, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(31, 3, 13, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(32, 3, 14, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(33, 3, 15, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(34, 3, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(35, 3, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(36, 3, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(37, 3, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(38, 3, 17, '{"comparison_name":"total_count.total"}', 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(39, 3, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(40, 4, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(41, 4, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(42, 4, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(43, 4, 6, '{"is_show":"true","can_edit":"true"}', 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(44, 4, 16, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(45, 4, 11, NULL, 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(46, 4, 12, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(47, 4, 13, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(48, 4, 17, '{"is_show":"true","can_edit":"true"}', 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(49, 4, 18, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(50, 4, 7, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(51, 4, 8, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(52, 4, 9, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(53, 4, 10, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(62, 3, 6, '{"statistics_name":"miss_count.miss"}', 18, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(63, 5, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(64, 5, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(65, 5, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(66, 5, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(67, 5, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(68, 5, 6, '{"statistics_name":"invalid_length_count.valids"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(69, 5, 24, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(70, 5, 23, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(71, 5, 7, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(72, 5, 8, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(73, 5, 9, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(74, 5, 10, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(75, 5, 17, '', 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(76, 5, 19, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(79, 6, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(80, 6, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(81, 6, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(82, 6, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(83, 6, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(84, 6, 6, '{"statistics_name":"duplicate_count.duplicates"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(85, 6, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(86, 6, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(87, 6, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(88, 6, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(89, 6, 17, '', 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(90, 6, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(93, 7, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(94, 7, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(95, 7, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(96, 7, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(97, 7, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(98, 7, 6, '{"statistics_name":"regexp_count.regexps"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(99, 7, 25, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(100, 7, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(101, 7, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(102, 7, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(103, 7, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(104, 7, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(105, 7, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(108, 8, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(109, 8, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(110, 8, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(111, 8, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(112, 8, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(113, 8, 6, '{"statistics_name":"timeliness_count.timeliness"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(114, 8, 26, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(115, 8, 27, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(116, 8, 7, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(117, 8, 8, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(118, 8, 9, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(119, 8, 10, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(120, 8, 17, NULL, 14, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(121, 8, 19, NULL, 15, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(124, 9, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(125, 9, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(126, 9, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(127, 9, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(128, 9, 5, NULL, 5, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(129, 9, 6, '{"statistics_name":"enum_count.enums"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(130, 9, 28, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(131, 9, 7, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(132, 9, 8, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(133, 9, 9, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(134, 9, 10, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(135, 9, 17, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(136, 9, 19, NULL, 13, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(139, 10, 1, NULL, 1, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(140, 10, 2, NULL, 2, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(141, 10, 3, NULL, 3, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(142, 10, 4, NULL, 4, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(143, 10, 6, '{"statistics_name":"table_count.total"}', 6, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(144, 10, 7, NULL, 7, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(145, 10, 8, NULL, 8, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(146, 10, 9, NULL, 9, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(147, 10, 10, NULL, 10, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(148, 10, 17, NULL, 11, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO `t_ds_relation_rule_input_entry` +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(149, 10, 19, NULL, 12, '2021-03-03 11:31:24.000', '2021-03-03 11:31:24.000'); +INSERT INTO t_ds_relation_rule_input_entry +(`id`, `rule_id`, `rule_input_entry_id`, `values_map`, `index`, `create_time`, `update_time`) +VALUES(150, 8, 29, NULL, 7, '2021-03-03 11:31:24.0', '2021-03-03 11:31:24.0'); diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_ddl.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_ddl.sql index b76908de39..3e35d60566 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_ddl.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_ddl.sql @@ -38,12 +38,123 @@ EXECUTE 'CREATE INDEX IF NOT EXISTS process_task_relation_log_idx_project_code_p EXECUTE 'DROP INDEX IF EXISTS "idx_task_definition_log_code_version"'; EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_definition_log_code_version ON ' || quote_ident(v_schema) ||'.t_ds_task_definition_log USING Btree("code","version")'; +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_comparison_type" ( + id serial NOT NULL, + "type" varchar NOT NULL, + execute_sql varchar NULL, + output_table varchar NULL, + "name" varchar NULL, + create_time timestamp NULL, + update_time timestamp NULL, + is_inner_source bool NULL, + CONSTRAINT t_ds_dq_comparison_type_pk PRIMARY KEY (id) + )'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_execute_result" ( + id serial NOT NULL, + process_definition_id int4 NULL, + process_instance_id int4 NULL, + task_instance_id int4 NULL, + rule_type int4 NULL, + rule_name varchar(255) DEFAULT NULL, + statistics_value float8 NULL, + comparison_value float8 NULL, + check_type int4 NULL, + threshold float8 NULL, + "operator" int4 NULL, + failure_strategy int4 NULL, + state int4 NULL, + user_id int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + comparison_type int4 NULL, + error_output_path text NULL, + CONSTRAINT t_ds_dq_execute_result_pk PRIMARY KEY (id) +)'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_rule" ( + id serial NOT NULL, + "name" varchar(100) DEFAULT NULL, + "type" int4 NULL, + user_id int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_dq_rule_pk PRIMARY KEY (id) +)'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_rule_execute_sql" ( + id serial NOT NULL, + "index" int4 NULL, + "sql" text NULL, + table_alias varchar(255) DEFAULT NULL, + "type" int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + is_error_output_sql bool NULL, + CONSTRAINT t_ds_dq_rule_execute_sql_pk PRIMARY KEY (id) +)'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_rule_input_entry" ( + id serial NOT NULL, + field varchar(255) DEFAULT NULL, + "type" varchar(255) DEFAULT NULL, + title varchar(255) DEFAULT NULL, + value varchar(255) DEFAULT NULL, + "options" text DEFAULT NULL, + placeholder varchar(255) DEFAULT NULL, + option_source_type int4 NULL, + value_type int4 NULL, + input_type int4 NULL, + is_show int2 NULL DEFAULT "1"::smallint, + can_edit int2 NULL DEFAULT "1"::smallint, + is_emit int2 NULL DEFAULT "0"::smallint, + is_validate int2 NULL DEFAULT "0"::smallint, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_dq_rule_input_entry_pk PRIMARY KEY (id) +)'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_dq_task_statistics_value" ( + id serial NOT NULL, + process_definition_id int4 NOT NULL, + task_instance_id int4 NULL, + rule_id int4 NOT NULL, + unique_code varchar NOT NULL, + statistics_name varchar NULL, + statistics_value float8 NULL, + data_time timestamp(0) NULL, + create_time timestamp(0) NULL, + update_time timestamp(0) NULL, + CONSTRAINT t_ds_dq_task_statistics_value_pk PRIMARY KEY (id) +)'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_relation_rule_execute_sql" ( + id serial NOT NULL, + rule_id int4 NULL, + execute_sql_id int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_relation_rule_execute_sql_pk PRIMARY KEY (id) +)'; + +EXECUTE 'CREATE TABLE IF NOT EXISTS' || quote_ident(v_schema) ||'."t_ds_relation_rule_input_entry" ( + id serial NOT NULL, + rule_id int4 NULL, + rule_input_entry_id int4 NULL, + values_map text NULL, + "index" int4 NULL, + create_time timestamp NULL, + update_time timestamp NULL, + CONSTRAINT t_ds_relation_rule_input_entry_pk PRIMARY KEY (id) +)'; + EXECUTE 'DROP INDEX IF EXISTS "idx_task_definition_log_project_code"'; EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_definition_log_project_code ON ' || quote_ident(v_schema) ||'.t_ds_task_definition_log USING Btree("project_code")'; EXECUTE 'DROP INDEX IF EXISTS "idx_task_instance_code_version"'; EXECUTE 'CREATE INDEX IF NOT EXISTS idx_task_instance_code_version ON' || quote_ident(v_schema) ||'.t_ds_task_instance USING Btree("task_code","task_definition_version")'; + return 'Success!'; exception when others then ---Raise EXCEPTION '(%)',SQLERRM; diff --git a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_dml.sql b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_dml.sql index 4a14f326b9..7f7080ae10 100644 --- a/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_dml.sql +++ b/dolphinscheduler-dao/src/main/resources/sql/upgrade/2.1.0_schema/postgresql/dolphinscheduler_dml.sql @@ -14,3 +14,663 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +delimiter d// +CREATE OR REPLACE FUNCTION public.dolphin_insert_dq_initial_data( + ) + RETURNS character varying + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE +AS $BODY$ +DECLARE +v_schema varchar; +BEGIN + ---get schema name + v_schema =current_schema(); + +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_comparison_type + (id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) + VALUES(1, "FixValue", NULL, NULL, NULL, "2021-06-30 00:00:00.000", "2021-06-30 00:00:00.000", false)'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_comparison_type + (id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) + VALUES(2, "DailyFluctuation", "select round(avg(statistics_value),2) as day_avg from t_ds_dq_task_statistics_value where data_time >=date_trunc(''DAY'', ${data_time}) and data_time < date_add(date_trunc(''day'', ${data_time}),1) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''", "day_range", "day_range.day_avg", "2021-06-30 00:00:00.000", "2021-06-30 00:00:00.000", true)'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_comparison_type + (id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) + VALUES(3, "WeeklyFluctuation", "select round(avg(statistics_value),2) as week_avg from t_ds_dq_task_statistics_value where data_time >= date_trunc(''WEEK'', ${data_time}) and data_time = date_trunc(''MONTH'', ${data_time}) and data_time = date_add(date_trunc(''day'', ${data_time}),-7) and data_time = date_add(date_trunc(''day'', ${data_time}),-30) and data_time < date_trunc(''day'', ${data_time}) and unique_code = ${unique_code} and statistics_name = ''${statistics_name}''", "last_thirty_days", "last_thirty_days.last_30_avg", "2021-06-30 00:00:00.000", "2021-06-30 00:00:00.000", true)'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_comparison_type + (id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) + VALUES(7, "SrcTableTotalRows", "SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})@, "total_count", "total_count.total", "2021-06-30 00:00:00.000", "2021-06-30 00:00:00.000", false)'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_comparison_type + (id, "type", execute_sql, output_table, "name", create_time, update_time, is_inner_source) + VALUES(8, "TargetTableTotalRows", "SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})@, "total_count", "total_count.total", "2021-06-30 00:00:00.000", "2021-06-30 00:00:00.000", false)'; + +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(1, "$t(null_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(2, "$t(custom_sql)", 1, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(3, "$t(multi_table_accuracy)", 2, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(4, "$t(multi_table_value_comparison)", 3, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(5, "$t(field_length_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(6, "$t(uniqueness_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(7, "$t(regexp_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(8, "$t(timeliness_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(9, "$t(enumeration_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule + (id, "name", "type", user_id, create_time, update_time) + VALUES(10, "$t(table_count_check)", 0, 1, "2020-01-12 00:00:00.000", "2020-01-12 00:00:00.000")'; + +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(1, 1, "SELECT COUNT(*) AS nulls FROM null_items", "null_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(2, 1, "SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})@, "total_count", 2, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(3, 1, "SELECT COUNT(*) AS miss from miss_items", "miss_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(4, 1, "SELECT COUNT(*) AS valids FROM invalid_length_items", "invalid_length_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(5, 1, "SELECT COUNT(*) AS total FROM ${target_table} WHERE (${target_filter})@, "total_count", 2, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(6, 1, "SELECT ${src_field} FROM ${src_table} group by ${src_field} having count(*) > 1", "duplicate_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(7, 1, "SELECT COUNT(*) AS duplicates FROM duplicate_items", "duplicate_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(8, 1, "SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) ${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) ${target_table} ON ${on_clause} WHERE ${where_clause}", "miss_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(9, 1, "SELECT * FROM ${src_table} WHERE (${src_field} not regexp ''${regexp_pattern}'') AND (${src_filter}) ", "regexp_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(10, 1, "SELECT COUNT(*) AS regexps FROM regexp_items", "regexp_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(11, 1, "SELECT * FROM ${src_table} WHERE (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${deadline}'', ''${datetime_format}'') <= 0) AND (to_unix_timestamp(${src_field}, ''${datetime_format}'')-to_unix_timestamp(''${begin_time}'', ''${datetime_format}'') >= 0) AND (${src_filter}) ", "timeliness_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(12, 1, "SELECT COUNT(*) AS timeliness FROM timeliness_items", "timeliness_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(13, 1, "SELECT * FROM ${src_table} where (${src_field} not in ( ${enum_list} ) or ${src_field} is null) AND (${src_filter}) ", "enum_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(14, 1, "SELECT COUNT(*) AS enums FROM enum_items", "enum_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(15, 1, "SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})@, "table_count", 1, false, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(16, 1, "SELECT * FROM ${src_table} WHERE (${src_field} is null or ${src_field} = '''') AND (${src_filter})@, "null_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_execute_sql + (id, "index", "sql", table_alias, "type", is_error_output_sql, create_time, update_time) + VALUES(17, 1, "SELECT * FROM ${src_table} WHERE (length(${src_field}) ${logic_operator} ${field_length}) AND (${src_filter})@, "invalid_length_items", 0, true, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; + +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(1, "src_connector_type", "select", "$t(src_connector_type)", '', "[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]", "please select source connector type", 2, 2, 0, 1, 1, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(2, "src_datasource_id", "select", "$t(src_datasource_id)", '', NULL, "please select source datasource id", 1, 2, 0, 1, 1, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(3, "src_table", "select", "$t(src_table)", NULL, NULL, "Please enter source table name", 0, 0, 0, 1, 1, 1, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(4, "src_filter", "input", "$t(src_filter)", NULL, NULL, "Please enter filter expression", 0, 3, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(5, "src_field", "select", "$t(src_field)", NULL, NULL, "Please enter column, only single column is supported", 0, 0, 0, 1, 1, 0, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(6, "statistics_name", "input", "$t(statistics_name)", NULL, NULL, "Please enter statistics name, the alias in statistics execute sql", 0, 0, 1, 0, 0, 0, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(7, "check_type", "select", "$t(check_type)", "0", "[{"label":"Expected - Actual","value":"0"},{"label":"Actual - Expected","value":"1"},{"label":"Actual / Expected","value":"2"},{"label":"(Expected - Actual) / Expected","value":"3"}]", "please select check type", 0, 0, 3, 1, 1, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(8, "operator", "select", "$t(operator)", "0", "[{"label":"=","value":"0"},{"label":"<","value":"1"},{"label":"<=","value":"2"},{"label":">","value":"3"},{"label":">=","value":"4"},{"label":"!=","value":"5"}]", "please select operator", 0, 0, 3, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(9, "threshold", "input", "$t(threshold)", NULL, NULL, "Please enter threshold, number is needed", 0, 2, 3, 1, 1, 0, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(10, "failure_strategy", "select", "$t(failure_strategy)", "0", "[{"label":"Alert","value":"0"},{"label":"Block","value":"1"}]", "please select failure strategy", 0, 0, 3, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(11, "target_connector_type", "select", "$t(target_connector_type)", '', "[{"label":"HIVE","value":"HIVE"},{"label":"JDBC","value":"JDBC"}]", "Please select target connector type", 2, 0, 0, 1, 1, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(12, "target_datasource_id", "select", "$t(target_datasource_id)", '', NULL, "Please select target datasource", 1, 2, 0, 1, 1, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(13, "target_table", "select", "$t(target_table)", NULL, NULL, "Please enter target table", 0, 0, 0, 1, 1, 1, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(14, "target_filter", "input", "$t(target_filter)", NULL, NULL, "Please enter target filter expression", 0, 3, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(15, "mapping_columns", "group", "$t(mapping_columns)", NULL, "[{"field":"src_field","props":{"placeholder":"Please input src field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"src_field"},{"field":"operator","props":{"placeholder":"Please input operator","rows":0,"disabled":false,"size":"small"},"type":"input","title":"operator"},{"field":"target_field","props":{"placeholder":"Please input target field","rows":0,"disabled":false,"size":"small"},"type":"input","title":"target_field"}]", "please enter mapping columns", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(16, "statistics_execute_sql", "textarea", "$t(statistics_execute_sql)", NULL, NULL, "Please enter statistics execute sql", 0, 3, 0, 1, 1, 0, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(17, "comparison_name", "input", "$t(comparison_name)", NULL, NULL, "Please enter comparison name, the alias in comparison execute sql", 0, 0, 0, 0, 0, 0, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(18, "comparison_execute_sql", "textarea", "$t(comparison_execute_sql)", NULL, NULL, "Please enter comparison execute sql", 0, 3, 0, 1, 1, 0, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(19, "comparison_type", "select", "$t(comparison_type)", '', NULL, "Please enter comparison title", 3, 0, 2, 1, 0, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(20, "writer_connector_type", "select", "$t(writer_connector_type)", '', "[{"label":"MYSQL","value":"0"},{"label":"POSTGRESQL","value":"1"}]", "please select writer connector type", 0, 2, 0, 1, 1, 1, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(21, "writer_datasource_id", "select", "$t(writer_datasource_id)", '', NULL, "please select writer datasource id", 1, 2, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(22, "target_field", "select", "$t(target_field)", NULL, NULL, "Please enter column, only single column is supported", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(23, "field_length", "input", "$t(field_length)", NULL, NULL, "Please enter length limit", 0, 3, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(24, "logic_operator", "select", "$t(logic_operator)", "=", "[{"label":"=","value":"="},{"label":"<","value":"<"},{"label":"<=","value":"<="},{"label":">","value":">"},{"label":">=","value":">="},{"label":"<>","value":"<>"}]", "please select logic operator", 0, 0, 3, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(25, "regexp_pattern", "input", "$t(regexp_pattern)", NULL, NULL, "Please enter regexp pattern", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(26, "deadline", "input", "$t(deadline)", NULL, NULL, "Please enter deadline", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(27, "datetime_format", "input", "$t(datetime_format)", NULL, NULL, "Please enter datetime format", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, "options", placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(28, "enum_list", "input", "$t(enum_list)", NULL, NULL, "Please enter enumeration", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_dq_rule_input_entry + (id, field, "type", title, value, `options`, placeholder, option_source_type, value_type, input_type, is_show, can_edit, is_emit, is_validate, create_time, update_time) + VALUES(29, "begin_time", "input", "$t(begin_time)", NULL, NULL, "Please enter begin time", 0, 0, 0, 1, 1, 0, 0, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; + +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(1, 1, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(3, 5, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(2, 3, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(4, 3, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(5, 6, 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(6, 6, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(7, 7, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(8, 7, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(9, 8, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(10, 8, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(11, 9, 13, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(12, 9, 14, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(13, 10, 15, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(14, 1, 16, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_execute_sql + (id, rule_id, execute_sql_id, create_time, update_time) + VALUES(15, 5, 17, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; + +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(1, 1, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(2, 1, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(3, 1, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(4, 1, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(5, 1, 5, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(6, 1, 6, "{"statistics_name":"null_count.nulls"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(7, 1, 7, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(8, 1, 8, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(9, 1, 9, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(10, 1, 10, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(11, 1, 17, '', 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(12, 1, 19, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(13, 2, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(14, 2, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(15, 2, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(16, 2, 6, "{"is_show":"true","can_edit":"true"}", 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(17, 2, 16, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(18, 2, 4, NULL, 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(19, 2, 7, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(20, 2, 8, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(21, 2, 9, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(22, 2, 10, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(24, 2, 19, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(25, 3, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(26, 3, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(27, 3, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(28, 3, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(29, 3, 11, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(30, 3, 12, NULL, 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(31, 3, 13, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(32, 3, 14, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(33, 3, 15, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(34, 3, 7, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(35, 3, 8, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(36, 3, 9, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(37, 3, 10, NULL, 13, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(38, 3, 17, "{"comparison_name":"total_count.total"}", 14, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(39, 3, 19, NULL, 15, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(40, 4, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(41, 4, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(42, 4, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(43, 4, 6, "{"is_show":"true","can_edit":"true"}", 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(44, 4, 16, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(45, 4, 11, NULL, 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(46, 4, 12, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(47, 4, 13, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(48, 4, 17, "{"is_show":"true","can_edit":"true"}", 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(49, 4, 18, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(50, 4, 7, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(51, 4, 8, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(52, 4, 9, NULL, 13, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(53, 4, 10, NULL, 14, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(62, 3, 6, "{"statistics_name":"miss_count.miss"}", 18, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(63, 5, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(64, 5, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(65, 5, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(66, 5, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(67, 5, 5, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(68, 5, 6, "{"statistics_name":"invalid_length_count.valids"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(69, 5, 24, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(70, 5, 23, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(71, 5, 7, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(72, 5, 8, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(73, 5, 9, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(74, 5, 10, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(75, 5, 17, '', 13, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(76, 5, 19, NULL, 14, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(79, 6, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(80, 6, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(81, 6, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(82, 6, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(83, 6, 5, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(84, 6, 6, "{"statistics_name":"duplicate_count.duplicates"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(85, 6, 7, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(86, 6, 8, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(87, 6, 9, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(88, 6, 10, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(89, 6, 17, '', 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(90, 6, 19, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(93, 7, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(94, 7, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(95, 7, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(96, 7, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(97, 7, 5, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(98, 7, 6, "{"statistics_name":"regexp_count.regexps"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(99, 7, 25, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(100, 7, 7, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(101, 7, 8, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(102, 7, 9, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(103, 7, 10, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(104, 7, 17, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(105, 7, 19, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(108, 8, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(109, 8, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(110, 8, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(111, 8, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(112, 8, 5, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(113, 8, 6, "{"statistics_name":"timeliness_count.timeliness"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(114, 8, 26, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(115, 8, 27, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(116, 8, 7, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(117, 8, 8, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(118, 8, 9, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(119, 8, 10, NULL, 13, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(120, 8, 17, NULL, 14, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(121, 8, 19, NULL, 15, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(124, 9, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(125, 9, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(126, 9, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(127, 9, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(128, 9, 5, NULL, 5, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(129, 9, 6, "{"statistics_name":"enum_count.enums"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(130, 9, 28, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(131, 9, 7, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(132, 9, 8, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(133, 9, 9, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(134, 9, 10, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(135, 9, 17, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(136, 9, 19, NULL, 13, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(139, 10, 1, NULL, 1, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(140, 10, 2, NULL, 2, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(141, 10, 3, NULL, 3, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(142, 10, 4, NULL, 4, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(143, 10, 6, "{"statistics_name":"table_count.total"}", 6, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(144, 10, 7, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(145, 10, 8, NULL, 8, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(146, 10, 9, NULL, 9, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(147, 10, 10, NULL, 10, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(148, 10, 17, NULL, 11, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(149, 10, 19, NULL, 12, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; +EXECUTE 'INSERT INTO' || quote_ident(v_schema) ||'.t_ds_relation_rule_input_entry + (id, rule_id, rule_input_entry_id, values_map, "index", create_time, update_time) + VALUES(150, 8, 29, NULL, 7, "2021-03-03 11:31:24.000", "2021-03-03 11:31:24.000")'; + +return 'Success!'; +exception when others then + ---Raise EXCEPTION '(%)',SQLERRM; + return SQLERRM; +END; +$BODY$; + +select dolphin_insert_dq_initial_data(); + +d// \ No newline at end of file diff --git a/dolphinscheduler-data-quality/pom.xml b/dolphinscheduler-data-quality/pom.xml new file mode 100644 index 0000000000..2dedfa25d5 --- /dev/null +++ b/dolphinscheduler-data-quality/pom.xml @@ -0,0 +1,203 @@ + + + + + dolphinscheduler + org.apache.dolphinscheduler + 2.0.4-SNAPSHOT + + 4.0.0 + dolphinscheduler-data-quality + dolphinscheduler-data-quality + + jar + + + 2.11 + 2.4.0 + 2.9.0 + provided + + + + + org.apache.spark + spark-core_${scala.binary.version} + ${spark.version} + ${scope} + + + jackson-module-scala_2.11 + com.fasterxml.jackson.module + + + + + + org.apache.spark + spark-sql_${scala.binary.version} + ${spark.version} + ${scope} + + + jackson-core + com.fasterxml.jackson.core + + + + + + org.apache.spark + spark-hive_${scala.binary.version} + ${spark.version} + + + commons-httpclient + commons-httpclient + + + org.apache.httpcomponents + httpclient + + + jackson-core-asl + org.codehaus.jackson + + + jackson-mapper-asl + org.codehaus.jackson + + + ${scope} + + + + com.h2database + h2 + test + + + + mysql + mysql-connector-java + + + + org.postgresql + postgresql + + + + ru.yandex.clickhouse + clickhouse-jdbc + + + jackson-core + com.fasterxml.jackson.core + + + + + + com.microsoft.sqlserver + mssql-jdbc + + + azure-keyvault + com.microsoft.azure + + + + + + com.facebook.presto + presto-jdbc + + + + com.google.guava + guava + + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + ${scope} + + + jackson-core + com.fasterxml.jackson.core + + + + + + com.fasterxml.jackson.module + jackson-module-scala_2.11 + ${jackson.version} + ${scope} + + + jackson-core + com.fasterxml.jackson.core + + + + + + org.codehaus.janino + janino + 3.0.8 + ${scope} + + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.2 + + false + + jar-with-dependencies + + + + org.apache.dolphinscheduler.data.quality.DataQualityApplication + + + + + + make-assembly + package + + assembly + + + + + + + \ No newline at end of file diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java new file mode 100644 index 0000000000..87d1994407 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/Constants.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality; + +/** + * Constants + */ +public final class Constants { + + private Constants() { + throw new IllegalStateException("Construct Constants"); + } + + public static final String DATABASE = "database"; + + public static final String TABLE = "table"; + + public static final String URL = "url"; + + public static final String USER = "user"; + + public static final String PASSWORD = "password"; + + public static final String DRIVER = "driver"; + + public static final String EMPTY = ""; + + public static final String SQL = "sql"; + + public static final String DOTS = "."; + + public static final String INPUT_TABLE = "input_table"; + + public static final String OUTPUT_TABLE = "output_table"; + + public static final String TMP_TABLE = "tmp_table"; + + public static final String DB_TABLE = "dbtable"; + + public static final String JDBC = "jdbc"; + + public static final String SAVE_MODE = "save_mode"; + + public static final String APPEND = "append"; + + public static final String SPARK_APP_NAME = "spark.app.name"; +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java new file mode 100644 index 0000000000..cf37df8f7b --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/DataQualityApplication.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality; + +import static org.apache.dolphinscheduler.data.quality.Constants.SPARK_APP_NAME; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration; +import org.apache.dolphinscheduler.data.quality.config.EnvConfig; +import org.apache.dolphinscheduler.data.quality.context.DataQualityContext; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.utils.JsonUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Strings; + +/** + * DataQualityApplication is spark application. + * It mainly includes three components: reader, transformer and writer. + * These three components realize the functions of connecting data, executing intermediate SQL + * and writing execution results and error data to the specified storage engine + */ +public class DataQualityApplication { + + private static final Logger logger = LoggerFactory.getLogger(DataQualityApplication.class); + + public static void main(String[] args) throws Exception { + + if (args.length < 1) { + logger.error("Can not find DataQualityConfiguration"); + System.exit(-1); + } + + String dataQualityParameter = args[0]; + + DataQualityConfiguration dataQualityConfiguration = JsonUtils.fromJson(dataQualityParameter,DataQualityConfiguration.class); + if (dataQualityConfiguration == null) { + logger.info("DataQualityConfiguration is null"); + System.exit(-1); + } else { + dataQualityConfiguration.validate(); + } + + EnvConfig envConfig = dataQualityConfiguration.getEnvConfig(); + Config config = new Config(envConfig.getConfig()); + config.put("type",envConfig.getType()); + if (Strings.isNullOrEmpty(config.getString(SPARK_APP_NAME))) { + config.put(SPARK_APP_NAME,dataQualityConfiguration.getName()); + } + + SparkRuntimeEnvironment sparkRuntimeEnvironment = new SparkRuntimeEnvironment(config); + DataQualityContext dataQualityContext = new DataQualityContext(sparkRuntimeEnvironment,dataQualityConfiguration); + dataQualityContext.execute(); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java new file mode 100644 index 0000000000..647016abf6 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/BaseConfig.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; + +/** + * BaseConfig + */ +public class BaseConfig implements IConfig { + + @JsonProperty("type") + private String type; + + @JsonProperty("config") + private Map config; + + public BaseConfig() { + } + + public BaseConfig(String type, Map config) { + this.type = type; + this.config = config; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Map getConfig() { + return config; + } + + public void setConfig(Map config) { + this.config = config; + } + + @Override + public void validate() { + Preconditions.checkArgument(!Strings.isNullOrEmpty(type), "type should not be empty"); + Preconditions.checkArgument(config != null, "config should not be empty"); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java new file mode 100644 index 0000000000..fbf7e70f84 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/Config.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +/** + * Config + */ +public class Config { + + private Map configuration = new HashMap<>(); + + public Config() { + + } + + public Config(Map configuration) { + if (configuration != null) { + this.configuration = configuration; + } + } + + public String getString(String key) { + return configuration.get(key) == null ? null : String.valueOf(configuration.get(key)); + } + + public List getStringList(String key) { + return (List)configuration.get(key); + } + + public Integer getInt(String key) { + return Integer.valueOf(String.valueOf(configuration.get(key))); + } + + public Boolean getBoolean(String key) { + return Boolean.valueOf(String.valueOf(configuration.get(key))); + } + + public Double getDouble(String key) { + return Double.valueOf(String.valueOf(configuration.get(key))); + } + + public Long getLong(String key) { + return Long.valueOf(String.valueOf(configuration.get(key))); + } + + public Boolean has(String key) { + return configuration.get(key) != null; + } + + public Set> entrySet() { + return configuration.entrySet(); + } + + public boolean isEmpty() { + return configuration.size() <= 0; + } + + public boolean isNotEmpty() { + return configuration.size() > 0; + } + + public void put(String key, Object value) { + this.configuration.put(key,value); + } + + public void merge(Map configuration) { + configuration.forEach(this.configuration::putIfAbsent); + } + + public Map configurationMap() { + return this.configuration; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java new file mode 100644 index 0000000000..6ffd955663 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/DataQualityConfiguration.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.base.Preconditions; +import com.google.common.base.Strings; + +/** + * DataQualityConfiguration + */ +public class DataQualityConfiguration implements IConfig { + + @JsonProperty("name") + private String name; + + @JsonProperty("env") + private EnvConfig envConfig; + + @JsonProperty("readers") + private List readerConfigs; + + @JsonProperty("transformers") + private List transformerConfigs; + + @JsonProperty("writers") + private List writerConfigs; + + public DataQualityConfiguration() {} + + public DataQualityConfiguration(String name, + EnvConfig envConfig, + List readerConfigs, + List writerConfigs, + List transformerConfigs) { + this.name = name; + this.envConfig = envConfig; + this.readerConfigs = readerConfigs; + this.writerConfigs = writerConfigs; + this.transformerConfigs = transformerConfigs; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public EnvConfig getEnvConfig() { + return envConfig; + } + + public void setEnvConfig(EnvConfig envConfig) { + this.envConfig = envConfig; + } + + public List getReaderConfigs() { + return readerConfigs; + } + + public void setReaderConfigs(List readerConfigs) { + this.readerConfigs = readerConfigs; + } + + public List getTransformerConfigs() { + return transformerConfigs; + } + + public void setTransformerConfigs(List transformerConfigs) { + this.transformerConfigs = transformerConfigs; + } + + public List getWriterConfigs() { + return writerConfigs; + } + + public void setWriterConfigs(List writerConfigs) { + this.writerConfigs = writerConfigs; + } + + @Override + public void validate() { + Preconditions.checkArgument(!Strings.isNullOrEmpty(name), "name should not be empty"); + + Preconditions.checkArgument(envConfig != null, "env config should not be empty"); + + Preconditions.checkArgument(readerConfigs != null, "reader config should not be empty"); + for (ReaderConfig readerConfig : readerConfigs) { + readerConfig.validate(); + } + + Preconditions.checkArgument(transformerConfigs != null, "transform config should not be empty"); + for (TransformerConfig transformParameter : transformerConfigs) { + transformParameter.validate(); + } + + Preconditions.checkArgument(writerConfigs != null, "writer config should not be empty"); + for (WriterConfig writerConfig :writerConfigs) { + writerConfig.validate(); + } + } + + @Override + public String toString() { + return "DataQualityConfiguration{" + + "name='" + name + '\'' + + ", envConfig=" + envConfig + + ", readerConfigs=" + readerConfigs + + ", transformerConfigs=" + transformerConfigs + + ", writerConfigs=" + writerConfigs + + '}'; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java new file mode 100644 index 0000000000..91d6854992 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/EnvConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.Map; + +/** + * EnvConfig + */ +public class EnvConfig extends BaseConfig { + + public EnvConfig() { + } + + public EnvConfig(String type, Map config) { + super(type,config); + } + +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java new file mode 100644 index 0000000000..fede660f8a --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/IConfig.java @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +/** + * IConfig + */ +public interface IConfig { + + /** + * check the parameter + */ + void validate(); +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java new file mode 100644 index 0000000000..72aa556865 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ReaderConfig.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.Map; + +/** + * ReaderConfig + */ +public class ReaderConfig extends BaseConfig { + + public ReaderConfig() {} + + public ReaderConfig(String type, Map config) { + super(type, config); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java new file mode 100644 index 0000000000..59d24c6b3b --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/TransformerConfig.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.Map; + +/** + * TransformerConfig + */ +public class TransformerConfig extends BaseConfig { + + public TransformerConfig() {} + + public TransformerConfig(String type, Map config) { + super(type, config); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java new file mode 100644 index 0000000000..d74534e803 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/ValidateResult.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +public class ValidateResult { + + private boolean success; + + private String msg; + + public ValidateResult(boolean success, String msg) { + this.success = success; + this.msg = msg; + } + + public boolean isSuccess() { + return success; + } + + public void setSuccess(boolean success) { + this.success = success; + } + + public String getMsg() { + return msg; + } + + public void setMsg(String msg) { + this.msg = msg; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java new file mode 100644 index 0000000000..75c5049311 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/config/WriterConfig.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.config; + +import java.util.Map; + +/** + * WriterConfig + */ +public class WriterConfig extends BaseConfig { + + public WriterConfig() {} + + public WriterConfig(String type, Map config) { + super(type, config); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java new file mode 100644 index 0000000000..494bbad364 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/context/DataQualityContext.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.context; + +import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration; +import org.apache.dolphinscheduler.data.quality.exception.DataQualityException; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; +import org.apache.dolphinscheduler.data.quality.flow.batch.reader.ReaderFactory; +import org.apache.dolphinscheduler.data.quality.flow.batch.transformer.TransformerFactory; +import org.apache.dolphinscheduler.data.quality.flow.batch.writer.WriterFactory; + +import java.util.List; + +/** + * DataQualityContext + */ +public class DataQualityContext { + + private SparkRuntimeEnvironment sparkRuntimeEnvironment; + + private DataQualityConfiguration dataQualityConfiguration; + + public DataQualityContext() { + } + + public DataQualityContext(SparkRuntimeEnvironment sparkRuntimeEnvironment, + DataQualityConfiguration dataQualityConfiguration) { + this.sparkRuntimeEnvironment = sparkRuntimeEnvironment; + this.dataQualityConfiguration = dataQualityConfiguration; + } + + public void execute() throws DataQualityException { + List readers = ReaderFactory + .getInstance() + .getReaders(this.sparkRuntimeEnvironment,dataQualityConfiguration.getReaderConfigs()); + List transformers = TransformerFactory + .getInstance() + .getTransformer(this.sparkRuntimeEnvironment,dataQualityConfiguration.getTransformerConfigs()); + List writers = WriterFactory + .getInstance() + .getWriters(this.sparkRuntimeEnvironment,dataQualityConfiguration.getWriterConfigs()); + + if (sparkRuntimeEnvironment.isBatch()) { + sparkRuntimeEnvironment.getBatchExecution().execute(readers,transformers,writers); + } else { + throw new DataQualityException("stream mode is not supported now"); + } + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java new file mode 100644 index 0000000000..b541367d96 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/ReaderType.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.enums; + +/** + * ReaderType + */ +public enum ReaderType { + /** + * JDBC + * HIVE + */ + JDBC, + HIVE; + + public static ReaderType getType(String name) { + for (ReaderType type: ReaderType.values()) { + if (type.name().equalsIgnoreCase(name)) { + return type; + } + } + + return null; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java new file mode 100644 index 0000000000..b98adc4163 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/TransformerType.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.enums; + +/** + * TransformerType + */ +public enum TransformerType { + /** + * JDBC + */ + SQL; + + public static TransformerType getType(String name) { + for (TransformerType type: TransformerType.values()) { + if (type.name().equalsIgnoreCase(name)) { + return type; + } + } + + return null; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java new file mode 100644 index 0000000000..369de959be --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/enums/WriterType.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.enums; + +/** + * WriterType + */ +public enum WriterType { + /** + * JDBC + */ + JDBC, + LOCAL_FILE, + HDFS_FILE; + + public static WriterType getType(String name) { + for (WriterType type: WriterType.values()) { + if (type.name().equalsIgnoreCase(name)) { + return type; + } + } + + return null; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java new file mode 100644 index 0000000000..057090138b --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/ConfigRuntimeException.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.exception; + +/** + * ConfigRuntimeException + */ +public class ConfigRuntimeException extends RuntimeException { + + public ConfigRuntimeException() { + super(); + } + + public ConfigRuntimeException(String message) { + super(message); + } + + public ConfigRuntimeException(String message, Throwable cause) { + super(message, cause); + } + + public ConfigRuntimeException(Throwable cause) { + super(cause); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java new file mode 100644 index 0000000000..34df8ad6cf --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/exception/DataQualityException.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.exception; + +/** + * data quality exception + */ +public class DataQualityException extends Exception { + + public DataQualityException() { + super(); + } + + /** + * Construct a new runtime exception with the detail message + * + * @param message detail message + */ + public DataQualityException(String message) { + super(message); + } + + /** + * Construct a new runtime exception with the detail message and cause + * + * @param message the detail message + * @param cause the cause + * @since 1.4 + */ + public DataQualityException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Construct a new runtime exception with throwable + * + * @param cause the cause + */ + public DataQualityException(Throwable cause) { + super(cause); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java new file mode 100644 index 0000000000..1644b2adde --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/Execution.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.execution; + +import org.apache.dolphinscheduler.data.quality.flow.Component; + +import java.util.List; + +/** + * Execution + */ +public interface Execution { + /** + * execute + * @param readers readers + * @param transformers transformers + * @param writers writers + */ + void execute(List readers, List transformers, List writers); +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java new file mode 100644 index 0000000000..b805253dc3 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkBatchExecution.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.execution; + +import static org.apache.dolphinscheduler.data.quality.Constants.INPUT_TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.OUTPUT_TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.TMP_TABLE; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.exception.ConfigRuntimeException; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.List; + +/** + * SparkBatchExecution is responsible for executing readers、transformers and writers + */ +public class SparkBatchExecution implements Execution { + + private final SparkRuntimeEnvironment environment; + + public SparkBatchExecution(SparkRuntimeEnvironment environment) throws ConfigRuntimeException { + this.environment = environment; + } + + @Override + public void execute(List readers, List transformers, List writers) { + readers.forEach(reader -> registerInputTempView(reader, environment)); + + if (!readers.isEmpty()) { + Dataset ds = readers.get(0).read(environment); + for (BatchTransformer tf:transformers) { + ds = executeTransformer(environment, tf, ds); + registerTransformTempView(tf, ds); + } + + for (BatchWriter sink: writers) { + executeWriter(environment, sink, ds); + } + } + + environment.sparkSession().stop(); + } + + private void registerTempView(String tableName, Dataset ds) { + if (ds != null) { + ds.createOrReplaceTempView(tableName); + } else { + throw new ConfigRuntimeException("dataset is null, can not createOrReplaceTempView"); + } + } + + private void registerInputTempView(BatchReader reader, SparkRuntimeEnvironment environment) { + Config conf = reader.getConfig(); + if (Boolean.TRUE.equals(conf.has(OUTPUT_TABLE))) { + String tableName = conf.getString(OUTPUT_TABLE); + registerTempView(tableName, reader.read(environment)); + } else { + throw new ConfigRuntimeException( + "[" + reader.getClass().getName() + "] must be registered as dataset, please set \"output_table\" config"); + } + } + + private Dataset executeTransformer(SparkRuntimeEnvironment environment, BatchTransformer transformer, Dataset dataset) { + Config config = transformer.getConfig(); + Dataset inputDataset; + Dataset outputDataset = null; + if (Boolean.TRUE.equals(config.has(INPUT_TABLE))) { + String[] tableNames = config.getString(INPUT_TABLE).split(","); + + for (String sourceTableName: tableNames) { + inputDataset = environment.sparkSession().read().table(sourceTableName); + + if (outputDataset == null) { + outputDataset = inputDataset; + } else { + outputDataset = outputDataset.union(inputDataset); + } + } + } else { + outputDataset = dataset; + } + + if (Boolean.TRUE.equals(config.has(TMP_TABLE))) { + if (outputDataset == null) { + outputDataset = dataset; + } + String tableName = config.getString(TMP_TABLE); + registerTempView(tableName, outputDataset); + } + + return transformer.transform(outputDataset, environment); + } + + private void registerTransformTempView(BatchTransformer transformer, Dataset ds) { + Config config = transformer.getConfig(); + if (Boolean.TRUE.equals(config.has(OUTPUT_TABLE))) { + String tableName = config.getString(OUTPUT_TABLE); + registerTempView(tableName, ds); + } + } + + private void executeWriter(SparkRuntimeEnvironment environment, BatchWriter writer, Dataset ds) { + Config config = writer.getConfig(); + Dataset inputDataSet = ds; + if (Boolean.TRUE.equals(config.has(INPUT_TABLE))) { + String sourceTableName = config.getString(INPUT_TABLE); + inputDataSet = environment.sparkSession().read().table(sourceTableName); + } + writer.write(inputDataSet, environment); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java new file mode 100644 index 0000000000..82cf941aed --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/execution/SparkRuntimeEnvironment.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.execution; + +import org.apache.dolphinscheduler.data.quality.config.Config; + +import org.apache.spark.SparkConf; +import org.apache.spark.sql.SparkSession; + +/** + * The SparkRuntimeEnvironment is responsible for creating SparkSession and SparkExecution + */ +public class SparkRuntimeEnvironment { + + private static final String TYPE = "type"; + private static final String BATCH = "batch"; + + private SparkSession sparkSession; + + private Config config = new Config(); + + public SparkRuntimeEnvironment(Config config) { + if (config != null) { + this.config = config; + } + + this.prepare(); + } + + public Config getConfig() { + return this.config; + } + + public void prepare() { + sparkSession = SparkSession.builder().config(createSparkConf()).getOrCreate(); + } + + private SparkConf createSparkConf() { + SparkConf conf = new SparkConf(); + this.config.entrySet() + .forEach(entry -> conf.set(entry.getKey(), String.valueOf(entry.getValue()))); + conf.set("spark.sql.crossJoin.enabled","true"); + return conf; + } + + public SparkSession sparkSession() { + return sparkSession; + } + + public boolean isBatch() { + return BATCH.equalsIgnoreCase(config.getString(TYPE)); + } + + public SparkBatchExecution getBatchExecution() { + return new SparkBatchExecution(this); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java new file mode 100644 index 0000000000..ee5cae205a --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/Component.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Component + */ +public interface Component { + + Config getConfig(); + + ValidateResult validateConfig(); + + default ValidateResult validate(List requiredOptions) { + List nonExistsOptions = new ArrayList<>(); + requiredOptions.forEach(x -> { + if (Boolean.FALSE.equals(getConfig().has(x))) { + nonExistsOptions.add(x); + } + }); + + if (!nonExistsOptions.isEmpty()) { + return new ValidateResult( + false, + nonExistsOptions.stream().map(option -> + "[" + option + "]").collect(Collectors.joining(",")) + " is not exist"); + } else { + return new ValidateResult(true, ""); + } + } + + void prepare(SparkRuntimeEnvironment prepareEnv); +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java new file mode 100644 index 0000000000..d785910798 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchReader.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch; + +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.Component; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +/** + * BatchReader + */ +public interface BatchReader extends Component { + + /** + * read data from source return dataset + * @param env env + * @return Dataset + */ + Dataset read(SparkRuntimeEnvironment env); +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java new file mode 100644 index 0000000000..e6a04839b0 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchTransformer.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch; + +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.Component; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +/** + * BatchTransformer + */ +public interface BatchTransformer extends Component { + + /** + * transform the dataset + * @param data data + * @param env env + * @return Dataset + */ + Dataset transform(Dataset data, SparkRuntimeEnvironment env); +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java new file mode 100644 index 0000000000..c7a3efc708 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/BatchWriter.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch; + +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.Component; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +/** + * BatchWriter + */ +public interface BatchWriter extends Component { + + /** + * write data to target storage + * @param data data + * @param environment environment + */ + void write(Dataset data, SparkRuntimeEnvironment environment); +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java new file mode 100644 index 0000000000..cd76edc364 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/HiveReader.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.reader; + +import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE; +import static org.apache.dolphinscheduler.data.quality.Constants.SQL; +import static org.apache.dolphinscheduler.data.quality.Constants.TABLE; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.Arrays; + +import com.google.common.base.Strings; + +/** + * HiveReader + */ +public class HiveReader implements BatchReader { + + private final Config config; + + public HiveReader(Config config) { + this.config = config; + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public ValidateResult validateConfig() { + return validate(Arrays.asList(DATABASE, TABLE)); + } + + @Override + public void prepare(SparkRuntimeEnvironment prepareEnv) { + if (Strings.isNullOrEmpty(config.getString(SQL))) { + config.put(SQL,"select * from " + config.getString(DATABASE) + "." + config.getString(TABLE)); + } + } + + @Override + public Dataset read(SparkRuntimeEnvironment env) { + return env.sparkSession().sql(config.getString(SQL)); + } + +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java new file mode 100644 index 0000000000..0484c15fa2 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/JdbcReader.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.reader; + +import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.DOTS; +import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER; +import static org.apache.dolphinscheduler.data.quality.Constants.JDBC; +import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD; +import static org.apache.dolphinscheduler.data.quality.Constants.TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.URL; +import static org.apache.dolphinscheduler.data.quality.Constants.USER; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; +import org.apache.dolphinscheduler.data.quality.utils.ConfigUtils; + +import org.apache.spark.sql.DataFrameReader; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; +import org.apache.spark.sql.SparkSession; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +/** + * AbstractJdbcSource + */ +public class JdbcReader implements BatchReader { + + private final Config config; + + public JdbcReader(Config config) { + this.config = config; + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public ValidateResult validateConfig() { + return validate(Arrays.asList(URL, TABLE, USER, PASSWORD)); + } + + @Override + public void prepare(SparkRuntimeEnvironment prepareEnv) { + // Do nothing + } + + @Override + public Dataset read(SparkRuntimeEnvironment env) { + return jdbcReader(env.sparkSession()).load(); + } + + private DataFrameReader jdbcReader(SparkSession sparkSession) { + + DataFrameReader reader = sparkSession.read() + .format(JDBC) + .option(URL, config.getString(URL)) + .option(DB_TABLE, config.getString(TABLE)) + .option(USER, config.getString(USER)) + .option(PASSWORD, config.getString(PASSWORD)) + .option(DRIVER, config.getString(DRIVER)); + + Config jdbcConfig = ConfigUtils.extractSubConfig(config, JDBC + DOTS, false); + + if (!config.isEmpty()) { + Map optionMap = new HashMap<>(16); + jdbcConfig.entrySet().forEach(x -> optionMap.put(x.getKey(),String.valueOf(x.getValue()))); + reader.options(optionMap); + } + + return reader; + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java new file mode 100644 index 0000000000..9d9eb3d8e7 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/reader/ReaderFactory.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.reader; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ReaderConfig; +import org.apache.dolphinscheduler.data.quality.enums.ReaderType; +import org.apache.dolphinscheduler.data.quality.exception.DataQualityException; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; + +import java.util.ArrayList; +import java.util.List; + +/** + * ReaderFactory + */ +public class ReaderFactory { + + private static class Singleton { + static ReaderFactory instance = new ReaderFactory(); + } + + public static ReaderFactory getInstance() { + return Singleton.instance; + } + + public List getReaders(SparkRuntimeEnvironment sparkRuntimeEnvironment, List readerConfigs) throws DataQualityException { + + List readerList = new ArrayList<>(); + + for (ReaderConfig readerConfig : readerConfigs) { + BatchReader reader = getReader(readerConfig); + if (reader != null) { + reader.validateConfig(); + reader.prepare(sparkRuntimeEnvironment); + readerList.add(reader); + } + } + + return readerList; + } + + private BatchReader getReader(ReaderConfig readerConfig) throws DataQualityException { + ReaderType readerType = ReaderType.getType(readerConfig.getType()); + Config config = new Config(readerConfig.getConfig()); + if (readerType != null) { + switch (readerType) { + case JDBC: + return new JdbcReader(config); + case HIVE: + return new HiveReader(config); + default: + throw new DataQualityException("reader type " + readerType + " is not supported!"); + } + } + + return null; + } + +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java new file mode 100644 index 0000000000..756a7bc07f --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/SqlTransformer.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.transformer; + +import static org.apache.dolphinscheduler.data.quality.Constants.SQL; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.Collections; + +/** + * SqlTransformer + */ +public class SqlTransformer implements BatchTransformer { + + private final Config config; + + public SqlTransformer(Config config) { + this.config = config; + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public ValidateResult validateConfig() { + return validate(Collections.singletonList(SQL)); + } + + @Override + public void prepare(SparkRuntimeEnvironment prepareEnv) { + // Do nothing + } + + @Override + public Dataset transform(Dataset data, SparkRuntimeEnvironment env) { + return env.sparkSession().sql(config.getString(SQL)); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java new file mode 100644 index 0000000000..cb5e45eaed --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/transformer/TransformerFactory.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.transformer; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.TransformerConfig; +import org.apache.dolphinscheduler.data.quality.enums.TransformerType; +import org.apache.dolphinscheduler.data.quality.exception.DataQualityException; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchTransformer; + +import java.util.ArrayList; +import java.util.List; + +/** + * WriterFactory + */ +public class TransformerFactory { + + private static class Singleton { + static TransformerFactory instance = new TransformerFactory(); + } + + public static TransformerFactory getInstance() { + return Singleton.instance; + } + + public List getTransformer(SparkRuntimeEnvironment sparkRuntimeEnvironment, List transformerConfigs) throws DataQualityException { + + List transformers = new ArrayList<>(); + + for (TransformerConfig transformerConfig:transformerConfigs) { + BatchTransformer transformer = getTransformer(transformerConfig); + if (transformer != null) { + transformer.validateConfig(); + transformer.prepare(sparkRuntimeEnvironment); + transformers.add(transformer); + } + } + + return transformers; + } + + private BatchTransformer getTransformer(TransformerConfig transformerConfig) throws DataQualityException { + TransformerType transformerType = TransformerType.getType(transformerConfig.getType()); + Config config = new Config(transformerConfig.getConfig()); + if (transformerType != null) { + if (transformerType == TransformerType.SQL) { + return new SqlTransformer(config); + } + throw new DataQualityException("transformer type " + transformerType + " is not supported!"); + } + + return null; + } + +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java new file mode 100644 index 0000000000..f1c098965c --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/JdbcWriter.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.writer; + +import static org.apache.dolphinscheduler.data.quality.Constants.APPEND; +import static org.apache.dolphinscheduler.data.quality.Constants.DB_TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER; +import static org.apache.dolphinscheduler.data.quality.Constants.JDBC; +import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD; +import static org.apache.dolphinscheduler.data.quality.Constants.SAVE_MODE; +import static org.apache.dolphinscheduler.data.quality.Constants.SQL; +import static org.apache.dolphinscheduler.data.quality.Constants.TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.URL; +import static org.apache.dolphinscheduler.data.quality.Constants.USER; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.Arrays; + +import com.google.common.base.Strings; + +/** + * JdbcWriter + */ +public class JdbcWriter implements BatchWriter { + + private final Config config; + + public JdbcWriter(Config config) { + this.config = config; + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public ValidateResult validateConfig() { + return validate(Arrays.asList(URL, TABLE, USER, PASSWORD)); + } + + @Override + public void prepare(SparkRuntimeEnvironment prepareEnv) { + if (Strings.isNullOrEmpty(config.getString(SAVE_MODE))) { + config.put(SAVE_MODE,APPEND); + } + } + + @Override + public void write(Dataset data, SparkRuntimeEnvironment env) { + if (!Strings.isNullOrEmpty(config.getString(SQL))) { + data = env.sparkSession().sql(config.getString(SQL)); + } + + data.write() + .format(JDBC) + .option(DRIVER,config.getString(DRIVER)) + .option(URL,config.getString(URL)) + .option(DB_TABLE, config.getString(TABLE)) + .option(USER, config.getString(USER)) + .option(PASSWORD, config.getString(PASSWORD)) + .mode(config.getString(SAVE_MODE)) + .save(); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java new file mode 100644 index 0000000000..f907748a4d --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/WriterFactory.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.writer; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.WriterConfig; +import org.apache.dolphinscheduler.data.quality.enums.WriterType; +import org.apache.dolphinscheduler.data.quality.exception.DataQualityException; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; +import org.apache.dolphinscheduler.data.quality.flow.batch.writer.file.HdfsFileWriter; +import org.apache.dolphinscheduler.data.quality.flow.batch.writer.file.LocalFileWriter; + +import java.util.ArrayList; +import java.util.List; + +/** + * WriterFactory + */ +public class WriterFactory { + + private static class Singleton { + static WriterFactory instance = new WriterFactory(); + } + + public static WriterFactory getInstance() { + return Singleton.instance; + } + + public List getWriters(SparkRuntimeEnvironment sparkRuntimeEnvironment, List writerConfigs) throws DataQualityException { + + List writerList = new ArrayList<>(); + + for (WriterConfig writerConfig:writerConfigs) { + BatchWriter writer = getWriter(writerConfig); + if (writer != null) { + writer.validateConfig(); + writer.prepare(sparkRuntimeEnvironment); + writerList.add(writer); + } + } + + return writerList; + } + + private BatchWriter getWriter(WriterConfig writerConfig) throws DataQualityException { + + WriterType writerType = WriterType.getType(writerConfig.getType()); + Config config = new Config(writerConfig.getConfig()); + if (writerType != null) { + switch (writerType) { + case JDBC: + return new JdbcWriter(config); + case LOCAL_FILE: + return new LocalFileWriter(config); + case HDFS_FILE: + return new HdfsFileWriter(config); + default: + throw new DataQualityException("writer type " + writerType + " is not supported!"); + } + } + + return null; + } + +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java new file mode 100644 index 0000000000..fbe311fe2a --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/BaseFileWriter.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.writer.file; + +import static org.apache.dolphinscheduler.data.quality.Constants.SAVE_MODE; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; +import org.apache.dolphinscheduler.data.quality.utils.ConfigUtils; + +import org.apache.commons.collections.CollectionUtils; +import org.apache.spark.sql.DataFrameWriter; +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.google.common.base.Strings; + +/** + * BaseFileWriter + */ +public abstract class BaseFileWriter implements BatchWriter { + + public static final String PARTITION_BY = "partition_by"; + public static final String SERIALIZER = "serializer"; + public static final String PATH = "path"; + + private final Config config; + + protected BaseFileWriter(Config config) { + this.config = config; + } + + @Override + public Config getConfig() { + return config; + } + + @Override + public void prepare(SparkRuntimeEnvironment prepareEnv) { + Map defaultConfig = new HashMap<>(); + + defaultConfig.put(PARTITION_BY, Collections.emptyList()); + defaultConfig.put(SAVE_MODE,"error"); + defaultConfig.put(SERIALIZER,"csv"); + + config.merge(defaultConfig); + } + + protected ValidateResult checkConfigImpl(List allowedUri) { + + if (Boolean.TRUE.equals(config.has(PATH)) && !Strings.isNullOrEmpty(config.getString(PATH))) { + String dir = config.getString(PATH); + if (dir.startsWith("/") || uriInAllowedSchema(dir, allowedUri)) { + return new ValidateResult(true, ""); + } else { + return new ValidateResult(false, "invalid path URI, please set the following allowed schemas: " + String.join(",", allowedUri)); + } + } else { + return new ValidateResult(false, "please specify [path] as non-empty string"); + } + } + + protected boolean uriInAllowedSchema(String uri, List allowedUri) { + return allowedUri.stream().map(uri::startsWith).reduce(true, (a, b) -> a && b); + } + + protected String buildPathWithDefaultSchema(String uri, String defaultUriSchema) { + return uri.startsWith("/") ? defaultUriSchema + uri : uri; + } + + protected void outputImpl(Dataset df, String defaultUriSchema) { + + DataFrameWriter writer = df.write().mode(config.getString(SAVE_MODE)); + + if (CollectionUtils.isNotEmpty(config.getStringList(PARTITION_BY))) { + List partitionKeys = config.getStringList(PARTITION_BY); + writer.partitionBy(partitionKeys.toArray(new String[]{})); + } + + Config fileConfig = ConfigUtils.extractSubConfig(config, "options.", false); + if (fileConfig.isNotEmpty()) { + Map optionMap = new HashMap<>(16); + fileConfig.entrySet().forEach(x -> optionMap.put(x.getKey(),String.valueOf(x.getValue()))); + writer.options(optionMap); + } + + String path = buildPathWithDefaultSchema(config.getString(PATH), defaultUriSchema); + + switch (config.getString(SERIALIZER)) { + case "csv": + writer.csv(path); + break; + case "json": + writer.json(path); + break; + case "parquet": + writer.parquet(path); + break; + case "text": + writer.text(path); + break; + case "orc": + writer.orc(path); + break; + default: + break; + } + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java new file mode 100644 index 0000000000..7aba1de244 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/HdfsFileWriter.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.writer.file; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.Collections; + +/** + * HdfsFileWriter + */ +public class HdfsFileWriter extends BaseFileWriter { + + public HdfsFileWriter(Config config) { + super(config); + } + + @Override + public void write(Dataset data, SparkRuntimeEnvironment environment) { + outputImpl(data,"hdfs://"); + } + + @Override + public ValidateResult validateConfig() { + return checkConfigImpl(Collections.singletonList("hdfs://")); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java new file mode 100644 index 0000000000..a35db3cfbe --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/flow/batch/writer/file/LocalFileWriter.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.batch.writer.file; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.config.ValidateResult; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; + +import org.apache.spark.sql.Dataset; +import org.apache.spark.sql.Row; + +import java.util.Collections; + +/** + * LocalFileWriter + */ +public class LocalFileWriter extends BaseFileWriter { + + public LocalFileWriter(Config config) { + super(config); + } + + @Override + public void write(Dataset data, SparkRuntimeEnvironment environment) { + outputImpl(data,"file://"); + } + + @Override + public ValidateResult validateConfig() { + return checkConfigImpl(Collections.singletonList("file://")); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java new file mode 100644 index 0000000000..877dcef2e9 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtils.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.utils; + +import org.apache.dolphinscheduler.data.quality.config.Config; + +import java.util.LinkedHashMap; +import java.util.Map; + +public class ConfigUtils { + + private ConfigUtils() { + throw new IllegalStateException("Construct ConfigUtils"); + } + + /** + * Extract sub config with fixed prefix + * + * @param source config source + * @param prefix config prefix + * @param keepPrefix true if keep prefix + */ + public static Config extractSubConfig(Config source, String prefix, boolean keepPrefix) { + Map values = new LinkedHashMap<>(); + + for (Map.Entry entry : source.entrySet()) { + final String key = entry.getKey(); + final String value = String.valueOf(entry.getValue()); + + if (key.startsWith(prefix)) { + if (keepPrefix) { + values.put(key, value); + } else { + values.put(key.substring(prefix.length()), value); + } + } + } + + return new Config(values); + } +} diff --git a/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java new file mode 100644 index 0000000000..1f006bd919 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/java/org/apache/dolphinscheduler/data/quality/utils/JsonUtils.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.utils; + +import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT; +import static com.fasterxml.jackson.databind.DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT; +import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; +import static com.fasterxml.jackson.databind.DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL; +import static com.fasterxml.jackson.databind.MapperFeature.REQUIRE_SETTERS_FOR_GETTERS; +import static com.fasterxml.jackson.databind.SerializationFeature.FAIL_ON_EMPTY_BEANS; + +import java.util.TimeZone; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Strings; + +/** + * JsonUtil + */ +public class JsonUtils { + + private static final Logger logger = LoggerFactory.getLogger(JsonUtils.class); + + /** + * can use static singleton, inject: just make sure to reuse! + */ + private static final ObjectMapper MAPPER = new ObjectMapper() + .configure(FAIL_ON_UNKNOWN_PROPERTIES, false) + .configure(ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT, true) + .configure(ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,true) + .configure(READ_UNKNOWN_ENUM_VALUES_AS_NULL, true) + .configure(REQUIRE_SETTERS_FOR_GETTERS, true) + .configure(FAIL_ON_EMPTY_BEANS,false) + .setTimeZone(TimeZone.getDefault()); + + private JsonUtils() { + throw new UnsupportedOperationException("Construct JSONUtils"); + } + + public static T fromJson(String json, Class clazz) { + if (Strings.isNullOrEmpty(json)) { + return null; + } + + try { + return MAPPER.readValue(json, clazz); + } catch (Exception e) { + logger.error("parse object exception!", e); + } + + return null; + } +} diff --git a/dolphinscheduler-data-quality/src/main/resources/log4j.properties b/dolphinscheduler-data-quality/src/main/resources/log4j.properties new file mode 100644 index 0000000000..a05b60ebe9 --- /dev/null +++ b/dolphinscheduler-data-quality/src/main/resources/log4j.properties @@ -0,0 +1,22 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +log4j.rootLogger=INFO, stdout +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.Target=System.out +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%c] - %m%n \ No newline at end of file diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java new file mode 100644 index 0000000000..e6574a9c6c --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/SparkApplicationTestBase.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.execution.SparkRuntimeEnvironment; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; + +/** + * SparkApplicationTestBase + */ +public class SparkApplicationTestBase { + + protected SparkRuntimeEnvironment sparkRuntimeEnvironment; + + @Before + public void init() { + Map config = new HashMap<>(); + config.put("spark.app.name","data quality test"); + config.put("spark.sql.crossJoin.enabled","true"); + config.put("spark.driver.bindAddress","127.0.0.1"); + config.put("spark.ui.port",13000); + config.put("spark.master","local[4]"); + + sparkRuntimeEnvironment = new SparkRuntimeEnvironment(new Config(config)); + } +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java new file mode 100644 index 0000000000..ccc2745232 --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/configuration/ConfigurationParserTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.configuration; + +import org.apache.dolphinscheduler.data.quality.config.DataQualityConfiguration; +import org.apache.dolphinscheduler.data.quality.utils.JsonUtils; + +import org.junit.Assert; +import org.junit.Test; + +/** + * ConfigurationParserTest + */ +public class ConfigurationParserTest { + + @Test + public void testConfigurationValidate() { + Assert.assertEquals(1,verifyConfigurationValidate()); + } + + private int verifyConfigurationValidate() { + int flag = 1; + try { + String parameterStr = "{\"name\":\"data quality test\",\"env\":{\"type\":\"batch\",\"config\":null}," + + "\"readers\":[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"Test@123!\"," + + "\"driver\":\"com.mysql.jdbc.Driver\",\"user\":\"test\",\"output_table\":\"test1\",\"table\":\"test1\"," + + "\"url\":\"jdbc:mysql://172.16.100.199:3306/test\"} }],\"transformers\":[{\"type\":\"sql\",\"config\":" + + "{\"index\":1,\"output_table\":\"miss_count\",\"sql\":\"SELECT COUNT(*) AS miss FROM test1 WHERE (c1 is null or c1 = '') \"} }," + + "{\"type\":\"sql\",\"config\":{\"index\":2,\"output_table\":\"total_count\",\"sql\":\"SELECT COUNT(*) AS total FROM test1 \"} }]," + + "\"writers\":[{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":\"test\"," + + "\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":\"t_ds_dq_execute_result\"," + + "\"url\":\"jdbc:postgresql://172.16.100.199:5432/dolphinscheduler?stringtype=unspecified\"," + + "\"sql\":\"SELECT 0 as rule_type,'data quality test' as rule_name,7 as process_definition_id,80 as process_instance_id," + + "80 as task_instance_id,miss_count.miss AS statistics_value, total_count.total AS comparison_value,2 as check_type,10 as" + + " threshold, 3 as operator, 0 as failure_strategy, '2021-06-29 10:18:59' as create_time,'2021-06-29 10:18:59' as update_time " + + "from miss_count FULL JOIN total_count\"} }]}"; + + DataQualityConfiguration dataQualityConfiguration = JsonUtils.fromJson(parameterStr,DataQualityConfiguration.class); + dataQualityConfiguration.validate(); + } catch (Exception e) { + flag = 0; + e.printStackTrace(); + } + return flag; + } +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java new file mode 100644 index 0000000000..823e9d55cd --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/FlowTestBase.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow; + +import org.apache.dolphinscheduler.data.quality.SparkApplicationTestBase; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.util.Properties; + +/** + * FlowTestBase + */ +public class FlowTestBase extends SparkApplicationTestBase { + + protected String url = "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1"; + + protected String driver = "org.h2.Driver"; + + protected Connection getConnection() throws Exception { + Properties properties = new Properties(); + properties.setProperty("user", "test"); + properties.setProperty("password", "123456"); + properties.setProperty("rowId", "false"); + DriverManager.registerDriver(new org.h2.Driver()); + Class.forName(driver, false, this.getClass().getClassLoader()); + return DriverManager.getConnection(url, properties); + } + +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java new file mode 100644 index 0000000000..1b307ef334 --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/JdbcReaderTest.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.reader; + +import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE; +import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER; +import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD; +import static org.apache.dolphinscheduler.data.quality.Constants.TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.URL; +import static org.apache.dolphinscheduler.data.quality.Constants.USER; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.flow.FlowTestBase; +import org.apache.dolphinscheduler.data.quality.flow.batch.reader.JdbcReader; + +import java.sql.Connection; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * JdbcConnectorTest + */ +public class JdbcReaderTest extends FlowTestBase { + + @Before + public void before() { + super.init(); + createConnectorTable(); + } + + @Test + public void testJdbcConnectorExecute() { + JdbcReader jdbcReader = new JdbcReader(buildReaderConfig()); + Assert.assertNotNull(jdbcReader.read(sparkRuntimeEnvironment)); + } + + private Config buildReaderConfig() { + Map config = new HashMap<>(); + config.put(DATABASE,"test"); + config.put(TABLE,"test.test1"); + config.put(URL,url); + config.put(USER,"test"); + config.put(PASSWORD,"123456"); + config.put(DRIVER,driver); + return new Config(config); + } + + private void createConnectorTable() { + try { + Connection connection = getConnection(); + connection.prepareStatement("create schema if not exists test").executeUpdate(); + + connection.prepareStatement("drop table if exists test.test1").executeUpdate(); + connection + .prepareStatement( + "CREATE TABLE test.test1 (\n" + + " `id` int(11) NOT NULL AUTO_INCREMENT,\n" + + " `company` varchar(255) DEFAULT NULL,\n" + + " `date` varchar(255) DEFAULT NULL,\n" + + " `c1` varchar(255) DEFAULT NULL,\n" + + " `c2` varchar(255) DEFAULT NULL,\n" + + " `c3` varchar(255) DEFAULT NULL,\n" + + " `c4` int(11) DEFAULT NULL,\n" + + " PRIMARY KEY (`id`)\n" + + ")") + .executeUpdate(); + connection.prepareStatement("INSERT INTO test.test1 (company,`date`,c1,c2,c3,c4) VALUES\n" + + "\t ('1','2019-03-01','11','12','13',1),\n" + + "\t ('2','2019-06-01','21','22','23',1),\n" + + "\t ('3','2019-09-01','31','32','33',1),\n" + + "\t ('4','2019-12-01','41','42','43',1),\n" + + "\t ('5','2013','42','43','54',1),\n" + + "\t ('6','2020','42','43','54',1);").executeUpdate(); + connection.commit(); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java new file mode 100644 index 0000000000..b428da9460 --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/reader/ReaderFactoryTest.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.reader; + +import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE; +import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER; +import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD; +import static org.apache.dolphinscheduler.data.quality.Constants.TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.URL; +import static org.apache.dolphinscheduler.data.quality.Constants.USER; + +import org.apache.dolphinscheduler.data.quality.config.ReaderConfig; +import org.apache.dolphinscheduler.data.quality.exception.DataQualityException; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchReader; +import org.apache.dolphinscheduler.data.quality.flow.batch.reader.ReaderFactory; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; + +/** + * ConnectorFactoryTest + */ +public class ReaderFactoryTest { + + @Test + public void testConnectorGenerate() throws DataQualityException { + + List readerConfigs = new ArrayList<>(); + ReaderConfig readerConfig = new ReaderConfig(); + readerConfig.setType("JDBC"); + Map config = new HashMap<>(); + config.put(DATABASE,"test"); + config.put(TABLE,"test1"); + config.put(URL,"jdbc:mysql://localhost:3306/test"); + config.put(USER,"test"); + config.put(PASSWORD,"123456"); + config.put(DRIVER,"com.mysql.jdbc.Driver"); + readerConfig.setConfig(config); + readerConfigs.add(readerConfig); + + int flag = 0; + + List readers = ReaderFactory.getInstance().getReaders(null,readerConfigs); + if (readers != null && readers.size() >= 1) { + flag = 1; + } + + Assert.assertEquals(1,flag); + } +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java new file mode 100644 index 0000000000..ae888583cc --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/JdbcWriterTest.java @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.writer; + +import static org.apache.dolphinscheduler.data.quality.Constants.DATABASE; +import static org.apache.dolphinscheduler.data.quality.Constants.DRIVER; +import static org.apache.dolphinscheduler.data.quality.Constants.PASSWORD; +import static org.apache.dolphinscheduler.data.quality.Constants.TABLE; +import static org.apache.dolphinscheduler.data.quality.Constants.URL; +import static org.apache.dolphinscheduler.data.quality.Constants.USER; + +import org.apache.dolphinscheduler.data.quality.config.Config; +import org.apache.dolphinscheduler.data.quality.flow.FlowTestBase; +import org.apache.dolphinscheduler.data.quality.flow.batch.reader.JdbcReader; +import org.apache.dolphinscheduler.data.quality.flow.batch.writer.JdbcWriter; + +import java.sql.Connection; +import java.util.HashMap; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; + +/** + * JdbcWriterTest + */ +public class JdbcWriterTest extends FlowTestBase { + + @Before + public void before() { + super.init(); + createWriterTable(); + } + + @Test + public void testJdbcWriterExecute() { + JdbcReader jdbcConnector = new JdbcReader(buildJdbcReaderConfig()); + JdbcWriter jdbcWriter = new JdbcWriter(buildJdbcConfig()); + jdbcWriter.write(jdbcConnector.read(sparkRuntimeEnvironment),sparkRuntimeEnvironment); + } + + private Config buildJdbcConfig() { + Map config = new HashMap<>(); + config.put(DATABASE,"test"); + config.put(TABLE,"test.test2"); + config.put(URL,url); + config.put(USER,"test"); + config.put(PASSWORD,"123456"); + config.put(DRIVER,driver); + config.put("save_mode","append"); + return new Config(config); + } + + private Config buildJdbcReaderConfig() { + Config config = buildJdbcConfig(); + config.put("sql","SELECT '1' as company,'1' as date,'2' as c1,'2' as c2,'2' as c3, 2 as c4"); + return config; + } + + private void createWriterTable() { + try { + Connection connection = getConnection(); + connection.prepareStatement("create schema if not exists test").executeUpdate(); + + connection.prepareStatement("drop table if exists test.test2").executeUpdate(); + connection + .prepareStatement( + "CREATE TABLE test.test2 (\n" + + " `id` int(11) NOT NULL AUTO_INCREMENT,\n" + + " `company` varchar(255) DEFAULT NULL,\n" + + " `date` varchar(255) DEFAULT NULL,\n" + + " `c1` varchar(255) DEFAULT NULL,\n" + + " `c2` varchar(255) DEFAULT NULL,\n" + + " `c3` varchar(255) DEFAULT NULL,\n" + + " `c4` int(11) DEFAULT NULL,\n" + + " PRIMARY KEY (`id`)\n" + + ")") + .executeUpdate(); + connection.prepareStatement("set schema test").executeUpdate(); + connection.commit(); + } catch (Exception e) { + e.printStackTrace(); + } + } + +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java new file mode 100644 index 0000000000..7257703038 --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/flow/writer/WriterFactoryTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.flow.writer; + +import org.apache.dolphinscheduler.data.quality.config.WriterConfig; +import org.apache.dolphinscheduler.data.quality.exception.DataQualityException; +import org.apache.dolphinscheduler.data.quality.flow.batch.BatchWriter; +import org.apache.dolphinscheduler.data.quality.flow.batch.writer.WriterFactory; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Assert; +import org.junit.Test; + +/** + * WriterFactoryTest + */ +public class WriterFactoryTest { + + @Test + public void testWriterGenerate() throws DataQualityException { + + List writerConfigs = new ArrayList<>(); + WriterConfig writerConfig = new WriterConfig(); + writerConfig.setType("JDBC"); + writerConfig.setConfig(null); + writerConfigs.add(writerConfig); + + int flag = 0; + + List writers = WriterFactory.getInstance().getWriters(null,writerConfigs); + if (writers != null && writers.size() >= 1) { + flag = 1; + } + + Assert.assertEquals(1,flag); + } +} diff --git a/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java new file mode 100644 index 0000000000..8d24d22d71 --- /dev/null +++ b/dolphinscheduler-data-quality/src/test/java/org/apache/dolphinscheduler/data/quality/utils/ConfigUtilsTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.data.quality.utils; + +import org.apache.dolphinscheduler.data.quality.config.Config; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; + +public class ConfigUtilsTest { + + @Test + public void testExtractSubConfig() { + // Setup + Map configMap = new HashMap<>(); + configMap.put("aaa.www","1"); + configMap.put("bbb.www","1"); + + final Config source = new Config(configMap); + + // Run the test + final Config result = ConfigUtils.extractSubConfig(source, "aaa", false); + int expect = 1; + int actual = result.entrySet().size(); + + Assert.assertEquals(expect,actual); + } +} diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java index 349638c2aa..00004281b7 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/CommonUtils.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.plugin.datasource.api.utils; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.DATA_QUALITY_JAR_NAME; import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION; import static org.apache.dolphinscheduler.spi.task.TaskConstants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE; import static org.apache.dolphinscheduler.spi.task.TaskConstants.JAVA_SECURITY_KRB5_CONF; @@ -106,6 +107,16 @@ public class CommonUtils { return false; } + public static String getDataQualityJarName() { + String dqsJarName = PropertyUtils.getString(DATA_QUALITY_JAR_NAME); + + if (org.apache.commons.lang.StringUtils.isEmpty(dqsJarName)) { + return "dolphinscheduler-data-quality.jar"; + } + + return dqsJarName; + } + /** * hdfs udf dir * @@ -138,5 +149,4 @@ public class CommonUtils { return resourceUploadPath; } } - } diff --git a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java index 0affd1be9c..cf41bc2965 100644 --- a/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java +++ b/dolphinscheduler-datasource-plugin/dolphinscheduler-datasource-api/src/main/java/org/apache/dolphinscheduler/plugin/datasource/api/utils/DataSourceUtils.java @@ -31,6 +31,8 @@ import org.apache.dolphinscheduler.plugin.datasource.api.datasource.sqlserver.SQ import org.apache.dolphinscheduler.spi.datasource.ConnectionParam; import org.apache.dolphinscheduler.spi.enums.DbType; +import java.sql.Connection; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -82,6 +84,18 @@ public class DataSourceUtils { return getDatasourceProcessor(dbType).getJdbcUrl(baseConnectionParam); } + public static Connection getConnection(DbType dbType, ConnectionParam connectionParam) { + try { + return getDatasourceProcessor(dbType).getConnection(connectionParam); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static String getDatasourceDriver(DbType dbType) { + return getDatasourceProcessor(dbType).getDatasourceDriver(); + } + public static BaseDataSourceParamDTO buildDatasourceParamDTO(DbType dbType, String connectionParams) { return getDatasourceProcessor(dbType).createDatasourceParamDTO(connectionParams); } diff --git a/dolphinscheduler-dist/pom.xml b/dolphinscheduler-dist/pom.xml index cb2e5f274b..61362a32fc 100644 --- a/dolphinscheduler-dist/pom.xml +++ b/dolphinscheduler-dist/pom.xml @@ -51,6 +51,11 @@ dolphinscheduler-alert-server + + org.apache.dolphinscheduler + dolphinscheduler-data-quality + + org.apache.dolphinscheduler dolphinscheduler-ui diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java index 807c5218ca..538617e4d8 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java @@ -26,6 +26,7 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.DataxTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.ProcedureTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.SQLTaskExecutionContext; @@ -152,6 +153,17 @@ public class TaskExecutionContextBuilder { return this; } + /** + * build DataQualityTask related info + * + * @param dataQualityTaskExecutionContext dataQualityTaskExecutionContext + * @return TaskExecutionContextBuilder + */ + public TaskExecutionContextBuilder buildDataQualityTaskRelatedInfo(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + taskExecutionContext.setDataQualityTaskExecutionContext(dataQualityTaskExecutionContext); + return this; + } + /** * create * diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java index 8a5755cc08..651496691c 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java @@ -27,6 +27,7 @@ import org.apache.dolphinscheduler.remote.command.DBTaskAckCommand; import org.apache.dolphinscheduler.remote.command.DBTaskResponseCommand; import org.apache.dolphinscheduler.server.master.cache.ProcessInstanceExecCacheManager; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThread; +import org.apache.dolphinscheduler.server.utils.DataQualityResultOperator; import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteThreadPool; import org.apache.dolphinscheduler.service.process.ProcessService; @@ -67,6 +68,12 @@ public class TaskResponseService { @Autowired private ProcessService processService; + /** + * data quality result operator + */ + @Autowired + private DataQualityResultOperator dataQualityResultOperator; + /** * task response worker */ @@ -211,6 +218,8 @@ public class TaskResponseService { Channel channel = taskResponseEvent.getChannel(); try { if (taskInstance != null) { + dataQualityResultOperator.operateDqExecuteResult(taskResponseEvent, taskInstance); + processService.changeTaskState(taskInstance, taskResponseEvent.getState(), taskResponseEvent.getEndTime(), taskResponseEvent.getProcessId(), @@ -227,4 +236,4 @@ public class TaskResponseService { channel.writeAndFlush(taskResponseCommand.convert2Command()); } } -} +} \ No newline at end of file diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java index deb0166955..60ab1b8878 100644 --- a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/runner/task/BaseTaskProcessor.java @@ -17,6 +17,22 @@ package org.apache.dolphinscheduler.server.master.runner.task; +import static org.apache.dolphinscheduler.common.Constants.ADDRESS; +import static org.apache.dolphinscheduler.common.Constants.DATABASE; +import static org.apache.dolphinscheduler.common.Constants.JDBC_URL; +import static org.apache.dolphinscheduler.common.Constants.OTHER; +import static org.apache.dolphinscheduler.common.Constants.PASSWORD; +import static org.apache.dolphinscheduler.common.Constants.SINGLE_SLASH; +import static org.apache.dolphinscheduler.common.Constants.USER; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_NAME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SRC_CONNECTOR_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SRC_DATASOURCE_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TARGET_CONNECTOR_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TARGET_DATASOURCE_ID; + +import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.SqoopJobType; import org.apache.dolphinscheduler.common.enums.TaskType; @@ -24,6 +40,7 @@ import org.apache.dolphinscheduler.common.enums.UdfType; import org.apache.dolphinscheduler.common.process.ResourceInfo; import org.apache.dolphinscheduler.common.task.AbstractParameters; import org.apache.dolphinscheduler.common.task.datax.DataxParameters; +import org.apache.dolphinscheduler.common.task.dq.DataQualityParameters; import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters; import org.apache.dolphinscheduler.common.task.sql.SqlParameters; import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters; @@ -32,8 +49,13 @@ import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParamete import org.apache.dolphinscheduler.common.utils.HadoopUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.common.utils.LoggerUtils; +import org.apache.dolphinscheduler.common.utils.PropertyUtils; import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.DqComparisonType; +import org.apache.dolphinscheduler.dao.entity.DqRule; +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.Resource; import org.apache.dolphinscheduler.dao.entity.TaskInstance; @@ -44,20 +66,28 @@ import org.apache.dolphinscheduler.server.master.config.MasterConfig; import org.apache.dolphinscheduler.service.bean.SpringApplicationContext; import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.queue.entity.TaskExecutionContext; +import org.apache.dolphinscheduler.spi.enums.DbType; import org.apache.dolphinscheduler.spi.enums.ResourceType; import org.apache.dolphinscheduler.spi.task.TaskConstants; +import org.apache.dolphinscheduler.spi.task.dq.enums.ConnectorType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.dq.model.JdbcInfo; +import org.apache.dolphinscheduler.spi.task.dq.utils.JdbcUrlParser; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.DataxTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.ProcedureTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.SQLTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.SqoopTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.UdfFuncRequest; +import org.apache.dolphinscheduler.spi.utils.StringUtils; import org.apache.commons.collections.CollectionUtils; -import org.apache.commons.lang.StringUtils; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -67,6 +97,7 @@ import org.slf4j.LoggerFactory; import com.google.common.base.Enums; import com.google.common.base.Strings; +import com.zaxxer.hikari.HikariDataSource; public abstract class BaseTaskProcessor implements ITaskProcessor { @@ -106,6 +137,9 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { this.commitInterval = masterConfig.getTaskCommitInterval(); } + protected javax.sql.DataSource defaultDataSource = + SpringApplicationContext.getBean(javax.sql.DataSource.class); + /** * pause task, common tasks donot need this. */ @@ -236,8 +270,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { taskInstance.getStartTime(), taskInstance.getHost(), null, - null - ); + null); return null; } // set queue for process instance, user-specified queue takes precedence over tenant queue @@ -250,6 +283,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext(); ProcedureTaskExecutionContext procedureTaskExecutionContext = new ProcedureTaskExecutionContext(); SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext(); + DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext(); // SQL task if (TaskType.SQL.getDesc().equalsIgnoreCase(taskInstance.getTaskType())) { @@ -270,6 +304,10 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { setSqoopTaskRelation(sqoopTaskExecutionContext, taskInstance); } + if (TaskType.DATA_QUALITY.getDesc().equalsIgnoreCase(taskInstance.getTaskType())) { + setDataQualityTaskRelation(dataQualityTaskExecutionContext,taskInstance,tenant.getTenantCode()); + } + return TaskExecutionContextBuilder.get() .buildTaskInstanceRelatedInfo(taskInstance) .buildTaskDefinitionRelatedInfo(taskInstance.getTaskDefine()) @@ -279,6 +317,7 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { .buildDataxTaskRelatedInfo(dataxTaskExecutionContext) .buildProcedureTaskRelatedInfo(procedureTaskExecutionContext) .buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext) + .buildDataQualityTaskRelatedInfo(dataQualityTaskExecutionContext) .create(); } @@ -351,6 +390,197 @@ public abstract class BaseTaskProcessor implements ITaskProcessor { } } + /** + * set data quality task relation + * + * @param dataQualityTaskExecutionContext dataQualityTaskExecutionContext + * @param taskInstance taskInstance + */ + private void setDataQualityTaskRelation(DataQualityTaskExecutionContext dataQualityTaskExecutionContext, TaskInstance taskInstance, String tenantCode) { + DataQualityParameters dataQualityParameters = + JSONUtils.parseObject(taskInstance.getTaskParams(), DataQualityParameters.class); + if (dataQualityParameters == null) { + return; + } + + Map config = dataQualityParameters.getRuleInputParameter(); + + int ruleId = dataQualityParameters.getRuleId(); + DqRule dqRule = processService.getDqRule(ruleId); + if (dqRule == null) { + logger.error("can not get DqRule by id {}",ruleId); + return; + } + + dataQualityTaskExecutionContext.setRuleId(ruleId); + dataQualityTaskExecutionContext.setRuleType(dqRule.getType()); + dataQualityTaskExecutionContext.setRuleName(dqRule.getName()); + + List ruleInputEntryList = processService.getRuleInputEntry(ruleId); + if (CollectionUtils.isEmpty(ruleInputEntryList)) { + logger.error("{} rule input entry list is empty ",ruleId); + return; + } + List executeSqlList = processService.getDqExecuteSql(ruleId); + setComparisonParams(dataQualityTaskExecutionContext, config, ruleInputEntryList, executeSqlList); + dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(ruleInputEntryList)); + dataQualityTaskExecutionContext.setExecuteSqlList(JSONUtils.toJsonString(executeSqlList)); + + // set the path used to store data quality task check error data + dataQualityTaskExecutionContext.setHdfsPath( + PropertyUtils.getString(Constants.FS_DEFAULTFS) + + PropertyUtils.getString( + Constants.DATA_QUALITY_ERROR_OUTPUT_PATH, + "/user/" + tenantCode + "/data_quality_error_data")); + + setSourceConfig(dataQualityTaskExecutionContext, config); + setTargetConfig(dataQualityTaskExecutionContext, config); + setWriterConfig(dataQualityTaskExecutionContext); + setStatisticsValueWriterConfig(dataQualityTaskExecutionContext); + } + + /** + * It is used to get comparison params, the param contains + * comparison name、comparison table and execute sql. + * When the type is fixed_value, params will be null. + * @param dataQualityTaskExecutionContext + * @param config + * @param ruleInputEntryList + * @param executeSqlList + */ + private void setComparisonParams(DataQualityTaskExecutionContext dataQualityTaskExecutionContext, + Map config, + List ruleInputEntryList, + List executeSqlList) { + if (config.get(COMPARISON_TYPE) != null) { + int comparisonTypeId = Integer.parseInt(config.get(COMPARISON_TYPE)); + // comparison type id 1 is fixed value ,do not need set param + if (comparisonTypeId > 1) { + DqComparisonType type = processService.getComparisonTypeById(comparisonTypeId); + if (type != null) { + DqRuleInputEntry comparisonName = new DqRuleInputEntry(); + comparisonName.setField(COMPARISON_NAME); + comparisonName.setValue(type.getName()); + ruleInputEntryList.add(comparisonName); + + DqRuleInputEntry comparisonTable = new DqRuleInputEntry(); + comparisonTable.setField(COMPARISON_TABLE); + comparisonTable.setValue(type.getOutputTable()); + ruleInputEntryList.add(comparisonTable); + + if (executeSqlList == null) { + executeSqlList = new ArrayList<>(); + } + + DqRuleExecuteSql dqRuleExecuteSql = new DqRuleExecuteSql(); + dqRuleExecuteSql.setType(ExecuteSqlType.MIDDLE.getCode()); + dqRuleExecuteSql.setIndex(1); + dqRuleExecuteSql.setSql(type.getExecuteSql()); + dqRuleExecuteSql.setTableAlias(type.getOutputTable()); + executeSqlList.add(0,dqRuleExecuteSql); + + if (Boolean.TRUE.equals(type.getInnerSource())) { + dataQualityTaskExecutionContext.setComparisonNeedStatisticsValueTable(true); + } + } + } else if (comparisonTypeId == 1) { + dataQualityTaskExecutionContext.setCompareWithFixedValue(true); + } + } + } + + /** + * The default datasource is used to get the dolphinscheduler datasource info, + * and the info will be used in StatisticsValueConfig and WriterConfig + * @return DataSource + */ + public DataSource getDefaultDataSource() { + DataSource dataSource = new DataSource(); + + HikariDataSource hikariDataSource = (HikariDataSource)defaultDataSource; + dataSource.setUserName(hikariDataSource.getUsername()); + JdbcInfo jdbcInfo = JdbcUrlParser.getJdbcInfo(hikariDataSource.getJdbcUrl()); + if (jdbcInfo != null) { + Properties properties = new Properties(); + properties.setProperty(USER,hikariDataSource.getUsername()); + properties.setProperty(PASSWORD,hikariDataSource.getPassword()); + properties.setProperty(DATABASE, jdbcInfo.getDatabase()); + properties.setProperty(ADDRESS,jdbcInfo.getAddress()); + properties.setProperty(OTHER,jdbcInfo.getParams()); + properties.setProperty(JDBC_URL,jdbcInfo.getAddress() + SINGLE_SLASH + jdbcInfo.getDatabase()); + dataSource.setType(DbType.of(JdbcUrlParser.getDbType(jdbcInfo.getDriverName()).getCode())); + dataSource.setConnectionParams(JSONUtils.toJsonString(properties)); + } + + return dataSource; + } + + /** + * The StatisticsValueWriterConfig will be used in DataQualityApplication that + * writes the statistics value into dolphin scheduler datasource + * @param dataQualityTaskExecutionContext + */ + private void setStatisticsValueWriterConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + DataSource dataSource = getDefaultDataSource(); + ConnectorType writerConnectorType = ConnectorType.of(dataSource.getType().isHive() ? 1 : 0); + dataQualityTaskExecutionContext.setStatisticsValueConnectorType(writerConnectorType.getDescription()); + dataQualityTaskExecutionContext.setStatisticsValueType(dataSource.getType().getCode()); + dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams(dataSource.getConnectionParams()); + dataQualityTaskExecutionContext.setStatisticsValueTable("t_ds_dq_task_statistics_value"); + } + + /** + * The WriterConfig will be used in DataQualityApplication that + * writes the data quality check result into dolphin scheduler datasource + * @param dataQualityTaskExecutionContext + */ + private void setWriterConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + DataSource dataSource = getDefaultDataSource(); + ConnectorType writerConnectorType = ConnectorType.of(dataSource.getType().isHive() ? 1 : 0); + dataQualityTaskExecutionContext.setWriterConnectorType(writerConnectorType.getDescription()); + dataQualityTaskExecutionContext.setWriterType(dataSource.getType().getCode()); + dataQualityTaskExecutionContext.setWriterConnectionParams(dataSource.getConnectionParams()); + dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result"); + } + + /** + * The TargetConfig will be used in DataQualityApplication that + * get the data which be used to compare to src value + * @param dataQualityTaskExecutionContext + * @param config + */ + private void setTargetConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext, Map config) { + if (StringUtils.isNotEmpty(config.get(TARGET_DATASOURCE_ID))) { + DataSource dataSource = processService.findDataSourceById(Integer.parseInt(config.get(TARGET_DATASOURCE_ID))); + if (dataSource != null) { + ConnectorType targetConnectorType = ConnectorType.of( + DbType.of(Integer.parseInt(config.get(TARGET_CONNECTOR_TYPE))).isHive() ? 1 : 0); + dataQualityTaskExecutionContext.setTargetConnectorType(targetConnectorType.getDescription()); + dataQualityTaskExecutionContext.setTargetType(dataSource.getType().getCode()); + dataQualityTaskExecutionContext.setTargetConnectionParams(dataSource.getConnectionParams()); + } + } + } + + /** + * The SourceConfig will be used in DataQualityApplication that + * get the data which be used to get the statistics value + * @param dataQualityTaskExecutionContext + * @param config + */ + private void setSourceConfig(DataQualityTaskExecutionContext dataQualityTaskExecutionContext, Map config) { + if (StringUtils.isNotEmpty(config.get(SRC_DATASOURCE_ID))) { + DataSource dataSource = processService.findDataSourceById(Integer.parseInt(config.get(SRC_DATASOURCE_ID))); + if (dataSource != null) { + ConnectorType srcConnectorType = ConnectorType.of( + DbType.of(Integer.parseInt(config.get(SRC_CONNECTOR_TYPE))).isHive() ? 1 : 0); + dataQualityTaskExecutionContext.setSourceConnectorType(srcConnectorType.getDescription()); + dataQualityTaskExecutionContext.setSourceType(dataSource.getType().getCode()); + dataQualityTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams()); + } + } + } + /** * set SQL task relation * diff --git a/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/utils/DataQualityResultOperator.java b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/utils/DataQualityResultOperator.java new file mode 100644 index 0000000000..71429691fa --- /dev/null +++ b/dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/utils/DataQualityResultOperator.java @@ -0,0 +1,196 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.utils; + +import org.apache.dolphinscheduler.common.enums.ExecutionStatus; +import org.apache.dolphinscheduler.common.enums.TaskType; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; +import org.apache.dolphinscheduler.dao.entity.ProcessInstance; +import org.apache.dolphinscheduler.dao.entity.TaskInstance; + +import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseEvent; +import org.apache.dolphinscheduler.service.alert.ProcessAlertManager; +import org.apache.dolphinscheduler.service.process.ProcessService; +import org.apache.dolphinscheduler.spi.task.dq.enums.CheckType; +import org.apache.dolphinscheduler.spi.task.dq.enums.DqFailureStrategy; +import org.apache.dolphinscheduler.spi.task.dq.enums.DqTaskState; +import org.apache.dolphinscheduler.spi.task.dq.enums.OperatorType; + +import java.math.BigDecimal; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +/** + * DataQualityResultOperator + */ +@Component +public class DataQualityResultOperator { + + private final Logger logger = LoggerFactory.getLogger(DataQualityResultOperator.class); + + @Autowired + private ProcessService processService; + + @Autowired + private ProcessAlertManager alertManager; + + /** + * When the task type is data quality, it will get the statistics value、comparison value、 + * threshold、check type、operator and failure strategy,use the formula that + * {check type} {operator} {threshold} to get dqc result . If result is failure, it will alert or block + * @param taskResponseEvent + * @param taskInstance + */ + public void operateDqExecuteResult(TaskResponseEvent taskResponseEvent, TaskInstance taskInstance) { + if (TaskType.DATA_QUALITY == TaskType.valueOf(taskInstance.getTaskType())) { + + ProcessInstance processInstance = + processService.findProcessInstanceDetailById( + Integer.parseInt(String.valueOf(taskInstance.getProcessInstanceId()))); + + // when the task is failure or cancel, will delete the execute result and statistics value + if (taskResponseEvent.getState().typeIsFailure() + || taskResponseEvent.getState().typeIsCancel()) { + processService.deleteDqExecuteResultByTaskInstanceId(taskInstance.getId()); + processService.deleteTaskStatisticsValueByTaskInstanceId(taskInstance.getId()); + sendDqTaskErrorAlert(taskInstance,processInstance); + return; + } + + processService.updateDqExecuteResultUserId(taskInstance.getId()); + DqExecuteResult dqExecuteResult = + processService.getDqExecuteResultByTaskInstanceId(taskInstance.getId()); + if (dqExecuteResult != null) { + //check the result ,if result is failure do some operator by failure strategy + checkDqExecuteResult(taskResponseEvent, dqExecuteResult, processInstance); + } + } + } + + /** + * get the data quality check result + * and if the result is failure that will alert or block + * @param taskResponseEvent + * @param dqExecuteResult + * @param processInstance + */ + private void checkDqExecuteResult(TaskResponseEvent taskResponseEvent, + DqExecuteResult dqExecuteResult, + ProcessInstance processInstance) { + if (isFailure(dqExecuteResult)) { + DqFailureStrategy dqFailureStrategy = DqFailureStrategy.of(dqExecuteResult.getFailureStrategy()); + if (dqFailureStrategy != null) { + dqExecuteResult.setState(DqTaskState.FAILURE.getCode()); + sendDqTaskResultAlert(dqExecuteResult,processInstance); + switch (dqFailureStrategy) { + case ALERT: + logger.info("task is failure, continue and alert"); + break; + case BLOCK: + taskResponseEvent.setState(ExecutionStatus.FAILURE); + logger.info("task is failure, end and alert"); + break; + default: + break; + } + } + } else { + dqExecuteResult.setState(DqTaskState.SUCCESS.getCode()); + } + + processService.updateDqExecuteResultState(dqExecuteResult); + } + + /** + * It is used to judge whether the result of the data quality task is failed + * @param dqExecuteResult + * @return + */ + private boolean isFailure(DqExecuteResult dqExecuteResult) { + CheckType checkType = CheckType.of(dqExecuteResult.getCheckType()); + + double statisticsValue = dqExecuteResult.getStatisticsValue(); + double comparisonValue = dqExecuteResult.getComparisonValue(); + double threshold = dqExecuteResult.getThreshold(); + + OperatorType operatorType = OperatorType.of(dqExecuteResult.getOperator()); + + boolean isFailure = false; + if (operatorType != null) { + double srcValue = 0; + switch (checkType) { + case COMPARISON_MINUS_STATISTICS: + srcValue = comparisonValue - statisticsValue; + isFailure = getCompareResult(operatorType,srcValue,threshold); + break; + case STATISTICS_MINUS_COMPARISON: + srcValue = statisticsValue - comparisonValue; + isFailure = getCompareResult(operatorType,srcValue,threshold); + break; + case STATISTICS_COMPARISON_PERCENTAGE: + if (comparisonValue > 0) { + srcValue = statisticsValue / comparisonValue * 100; + } + isFailure = getCompareResult(operatorType,srcValue,threshold); + break; + case STATISTICS_COMPARISON_DIFFERENCE_COMPARISON_PERCENTAGE: + if (comparisonValue > 0) { + srcValue = Math.abs(comparisonValue - statisticsValue) / comparisonValue * 100; + } + isFailure = getCompareResult(operatorType,srcValue,threshold); + break; + default: + break; + } + } + + return isFailure; + } + + private void sendDqTaskResultAlert(DqExecuteResult dqExecuteResult, ProcessInstance processInstance) { + alertManager.sendDataQualityTaskExecuteResultAlert(dqExecuteResult,processInstance); + } + + private void sendDqTaskErrorAlert(TaskInstance taskInstance, ProcessInstance processInstance) { + alertManager.sendTaskErrorAlert(taskInstance,processInstance); + } + + private boolean getCompareResult(OperatorType operatorType, double srcValue, double targetValue) { + BigDecimal src = BigDecimal.valueOf(srcValue); + BigDecimal target = BigDecimal.valueOf(targetValue); + switch (operatorType) { + case EQ: + return src.compareTo(target) == 0; + case LT: + return src.compareTo(target) <= -1; + case LE: + return src.compareTo(target) == 0 || src.compareTo(target) <= -1; + case GT: + return src.compareTo(target) >= 1; + case GE: + return src.compareTo(target) == 0 || src.compareTo(target) >= 1; + case NE: + return src.compareTo(target) != 0; + default: + return true; + } + } +} diff --git a/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java new file mode 100644 index 0000000000..c1d96e3e21 --- /dev/null +++ b/dolphinscheduler-master/src/test/java/org/apache/dolphinscheduler/server/master/processor/TaskResponseProcessorTestConfig.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.server.master.processor; + +import org.apache.dolphinscheduler.server.utils.DataQualityResultOperator; +import org.mockito.Mockito; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * dependency config + */ +@Configuration +public class TaskResponseProcessorTestConfig { + + @Bean + public DataQualityResultOperator dataQualityResultOperator() { + return Mockito.mock(DataQualityResultOperator.class); + } +} \ No newline at end of file diff --git a/dolphinscheduler-server/pom.xml b/dolphinscheduler-server/pom.xml index a446230f09..3b549de420 100644 --- a/dolphinscheduler-server/pom.xml +++ b/dolphinscheduler-server/pom.xml @@ -38,6 +38,10 @@ org.apache.dolphinscheduler dolphinscheduler-service + + org.apache.dolphinscheduler + dolphinscheduler-task-dataquality + org.mockito diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java index fbf351d410..d5ef11e8aa 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/alert/ProcessAlertManager.java @@ -23,12 +23,16 @@ import org.apache.dolphinscheduler.common.enums.WarningType; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.AlertDao; import org.apache.dolphinscheduler.dao.entity.Alert; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResultAlertContent; import org.apache.dolphinscheduler.dao.entity.ProcessAlertContent; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; import org.apache.dolphinscheduler.dao.entity.ProjectUser; +import org.apache.dolphinscheduler.dao.entity.TaskAlertContent; import org.apache.dolphinscheduler.dao.entity.TaskDefinition; import org.apache.dolphinscheduler.dao.entity.TaskInstance; +import org.apache.dolphinscheduler.spi.task.dq.enums.DqTaskState; import java.util.ArrayList; import java.util.Date; @@ -270,6 +274,89 @@ public class ProcessAlertManager { alertDao.sendProcessTimeoutAlert(processInstance, processDefinition); } + /** + * send data quality task alert + */ + public void sendDataQualityTaskExecuteResultAlert(DqExecuteResult result, ProcessInstance processInstance) { + Alert alert = new Alert(); + String state = DqTaskState.of(result.getState()).getDescription(); + alert.setTitle("DataQualityResult [" + result.getTaskName() + "] " + state); + String content = getDataQualityAlterContent(result); + alert.setContent(content); + alert.setAlertGroupId(processInstance.getWarningGroupId()); + alert.setCreateTime(new Date()); + alertDao.addAlert(alert); + logger.info("add alert to db , alert: {}", alert); + } + + /** + * send data quality task error alert + */ + public void sendTaskErrorAlert(TaskInstance taskInstance,ProcessInstance processInstance) { + Alert alert = new Alert(); + alert.setTitle("Task [" + taskInstance.getName() + "] Failure Warning"); + String content = getTaskAlterContent(taskInstance); + alert.setContent(content); + alert.setAlertGroupId(processInstance.getWarningGroupId()); + alert.setCreateTime(new Date()); + alertDao.addAlert(alert); + logger.info("add alert to db , alert: {}", alert); + } + + /** + * getDataQualityAlterContent + * @param result DqExecuteResult + * @return String String + */ + public String getDataQualityAlterContent(DqExecuteResult result) { + + DqExecuteResultAlertContent content = DqExecuteResultAlertContent.newBuilder() + .processDefinitionId(result.getProcessDefinitionId()) + .processDefinitionName(result.getProcessDefinitionName()) + .processInstanceId(result.getProcessInstanceId()) + .processInstanceName(result.getProcessInstanceName()) + .taskInstanceId(result.getTaskInstanceId()) + .taskName(result.getTaskName()) + .ruleType(result.getRuleType()) + .ruleName(result.getRuleName()) + .statisticsValue(result.getStatisticsValue()) + .comparisonValue(result.getComparisonValue()) + .checkType(result.getCheckType()) + .threshold(result.getThreshold()) + .operator(result.getOperator()) + .failureStrategy(result.getFailureStrategy()) + .userId(result.getUserId()) + .userName(result.getUserName()) + .state(result.getState()) + .errorDataPath(result.getErrorOutputPath()) + .build(); + + return JSONUtils.toJsonString(content); + } + + /** + * getTaskAlterContent + * @param taskInstance TaskInstance + * @return String String + */ + public String getTaskAlterContent(TaskInstance taskInstance) { + + TaskAlertContent content = TaskAlertContent.newBuilder() + .processInstanceName(taskInstance.getProcessInstanceName()) + .processInstanceId(taskInstance.getProcessInstanceId()) + .taskInstanceId(taskInstance.getId()) + .taskName(taskInstance.getName()) + .taskType(taskInstance.getTaskType()) + .state(taskInstance.getState()) + .startTime(taskInstance.getStartTime()) + .endTime(taskInstance.getEndTime()) + .host(taskInstance.getHost()) + .logPath(taskInstance.getLogPath()) + .build(); + + return JSONUtils.toJsonString(content); + } + public void sendTaskTimeoutAlert(ProcessInstance processInstance, TaskInstance taskInstance, TaskDefinition taskDefinition) { alertDao.sendTaskTimeoutAlert(processInstance, taskInstance, taskDefinition); } diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java index 6d49860513..b8cecb6b99 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java @@ -27,6 +27,7 @@ import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_CODE; import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID; import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TASK_INSTANCE_ID; import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.enums.AuthorizationType; @@ -59,6 +60,12 @@ import org.apache.dolphinscheduler.common.utils.TaskParametersUtils; import org.apache.dolphinscheduler.dao.entity.Command; import org.apache.dolphinscheduler.dao.entity.DagData; import org.apache.dolphinscheduler.dao.entity.DataSource; +import org.apache.dolphinscheduler.dao.entity.DqComparisonType; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; +import org.apache.dolphinscheduler.dao.entity.DqRule; +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; +import org.apache.dolphinscheduler.dao.entity.DqTaskStatisticsValue; import org.apache.dolphinscheduler.dao.entity.Environment; import org.apache.dolphinscheduler.dao.entity.ErrorCommand; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; @@ -81,6 +88,12 @@ import org.apache.dolphinscheduler.dao.entity.UdfFunc; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; +import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper; +import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper; +import org.apache.dolphinscheduler.dao.mapper.DqTaskStatisticsValueMapper; import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper; import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper; @@ -102,6 +115,7 @@ import org.apache.dolphinscheduler.dao.mapper.TenantMapper; import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.dao.utils.DagHelper; +import org.apache.dolphinscheduler.dao.utils.DqRuleUtils; import org.apache.dolphinscheduler.remote.command.StateEventChangeCommand; import org.apache.dolphinscheduler.remote.command.TaskEventChangeCommand; import org.apache.dolphinscheduler.remote.processor.StateEventCallbackService; @@ -111,6 +125,7 @@ import org.apache.dolphinscheduler.service.exceptions.ServiceException; import org.apache.dolphinscheduler.service.log.LogClientService; import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; import org.apache.dolphinscheduler.spi.enums.ResourceType; +import org.apache.dolphinscheduler.spi.task.dq.enums.DqTaskState; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; @@ -134,7 +149,7 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; - +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.Lists; @@ -198,6 +213,24 @@ public class ProcessService { @Autowired private ProjectMapper projectMapper; + @Autowired + private DqExecuteResultMapper dqExecuteResultMapper; + + @Autowired + private DqRuleMapper dqRuleMapper; + + @Autowired + private DqRuleInputEntryMapper dqRuleInputEntryMapper; + + @Autowired + private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper; + + @Autowired + private DqComparisonTypeMapper dqComparisonTypeMapper; + + @Autowired + private DqTaskStatisticsValueMapper dqTaskStatisticsValueMapper; + @Autowired private TaskDefinitionMapper taskDefinitionMapper; @@ -1750,7 +1783,10 @@ public class ProcessService { * @param executePath executePath * @param logPath logPath */ - public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host, + public void changeTaskState(TaskInstance taskInstance, + ExecutionStatus state, + Date startTime, + String host, String executePath, String logPath) { taskInstance.setState(state); @@ -2567,6 +2603,65 @@ public class ProcessService { return processTaskMap; } + public DqExecuteResult getDqExecuteResultByTaskInstanceId(int taskInstanceId) { + return dqExecuteResultMapper.getExecuteResultById(taskInstanceId); + } + + public int updateDqExecuteResultUserId(int taskInstanceId) { + DqExecuteResult dqExecuteResult = + dqExecuteResultMapper.selectOne(new QueryWrapper().eq(TASK_INSTANCE_ID,taskInstanceId)); + if (dqExecuteResult == null) { + return -1; + } + + ProcessInstance processInstance = processInstanceMapper.selectById(dqExecuteResult.getProcessInstanceId()); + if (processInstance == null) { + return -1; + } + + ProcessDefinition processDefinition = processDefineMapper.queryByCode(processInstance.getProcessDefinitionCode()); + if (processDefinition == null) { + return -1; + } + + dqExecuteResult.setProcessDefinitionId(processDefinition.getId()); + dqExecuteResult.setUserId(processDefinition.getUserId()); + dqExecuteResult.setState(DqTaskState.DEFAULT.getCode()); + return dqExecuteResultMapper.updateById(dqExecuteResult); + } + + public int updateDqExecuteResultState(DqExecuteResult dqExecuteResult) { + return dqExecuteResultMapper.updateById(dqExecuteResult); + } + + public int deleteDqExecuteResultByTaskInstanceId(int taskInstanceId) { + return dqExecuteResultMapper.delete( + new QueryWrapper() + .eq(TASK_INSTANCE_ID,taskInstanceId)); + } + + public int deleteTaskStatisticsValueByTaskInstanceId(int taskInstanceId) { + return dqTaskStatisticsValueMapper.delete( + new QueryWrapper() + .eq(TASK_INSTANCE_ID,taskInstanceId)); + } + + public DqRule getDqRule(int ruleId) { + return dqRuleMapper.selectById(ruleId); + } + + public List getRuleInputEntry(int ruleId) { + return DqRuleUtils.transformInputEntry(dqRuleInputEntryMapper.getRuleInputEntryList(ruleId)); + } + + public List getDqExecuteSql(int ruleId) { + return dqRuleExecuteSqlMapper.getExecuteSqlList(ruleId); + } + + public DqComparisonType getComparisonTypeById(int id) { + return dqComparisonTypeMapper.selectById(id); + } + /** * the first time (when submit the task ) get the resource of the task group * @param taskId task id diff --git a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java index 609566ab52..1fede5f22a 100644 --- a/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java +++ b/dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/queue/entity/TaskExecutionContext.java @@ -23,6 +23,7 @@ import org.apache.dolphinscheduler.common.process.Property; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.remote.command.Command; import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.DataxTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.ProcedureTaskExecutionContext; import org.apache.dolphinscheduler.spi.task.request.SQLTaskExecutionContext; @@ -227,6 +228,11 @@ public class TaskExecutionContext implements Serializable { */ private SqoopTaskExecutionContext sqoopTaskExecutionContext; + /** + * data quality TaskExecutionContext + */ + private DataQualityTaskExecutionContext dataQualityTaskExecutionContext; + /** * taskInstance varPool */ @@ -557,6 +563,14 @@ public class TaskExecutionContext implements Serializable { this.sqoopTaskExecutionContext = sqoopTaskExecutionContext; } + public DataQualityTaskExecutionContext getDataQualityTaskExecutionContext() { + return dataQualityTaskExecutionContext; + } + + public void setDataQualityTaskExecutionContext(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + this.dataQualityTaskExecutionContext = dataQualityTaskExecutionContext; + } + public int getDryRun() { return dryRun; } @@ -606,6 +620,7 @@ public class TaskExecutionContext implements Serializable { + ", dependenceTaskExecutionContext=" + dependenceTaskExecutionContext + ", sqoopTaskExecutionContext=" + sqoopTaskExecutionContext + ", procedureTaskExecutionContext=" + procedureTaskExecutionContext + + ", dataQualityTaskExecutionContext=" + dataQualityTaskExecutionContext + '}'; } diff --git a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java index c7aab24a59..fce25851a1 100644 --- a/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java +++ b/dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java @@ -39,6 +39,10 @@ import org.apache.dolphinscheduler.common.task.spark.SparkParameters; import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.JSONUtils; import org.apache.dolphinscheduler.dao.entity.Command; +import org.apache.dolphinscheduler.dao.entity.DqExecuteResult; +import org.apache.dolphinscheduler.dao.entity.DqRule; +import org.apache.dolphinscheduler.dao.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.dao.entity.DqRuleInputEntry; import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionLog; import org.apache.dolphinscheduler.dao.entity.ProcessInstance; @@ -52,6 +56,11 @@ import org.apache.dolphinscheduler.dao.entity.TaskGroupQueue; import org.apache.dolphinscheduler.dao.entity.TaskInstance; import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.mapper.CommandMapper; +import org.apache.dolphinscheduler.dao.mapper.DqComparisonTypeMapper; +import org.apache.dolphinscheduler.dao.mapper.DqExecuteResultMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleExecuteSqlMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleInputEntryMapper; +import org.apache.dolphinscheduler.dao.mapper.DqRuleMapper; import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionLogMapper; import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; @@ -67,6 +76,12 @@ import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; import org.apache.dolphinscheduler.dao.mapper.UserMapper; import org.apache.dolphinscheduler.service.exceptions.ServiceException; import org.apache.dolphinscheduler.service.quartz.cron.CronUtilsTest; +import org.apache.dolphinscheduler.spi.params.base.FormType; +import org.apache.dolphinscheduler.spi.task.dq.enums.DqTaskState; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.dq.enums.InputType; +import org.apache.dolphinscheduler.spi.task.dq.enums.OptionSourceType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ValueType; import java.util.ArrayList; import java.util.Arrays; @@ -135,6 +150,21 @@ public class ProcessServiceTest { @Mock private TaskGroupQueueMapper taskGroupQueueMapper; + @Mock + private DqExecuteResultMapper dqExecuteResultMapper; + + @Mock + private DqRuleMapper dqRuleMapper; + + @Mock + private DqRuleInputEntryMapper dqRuleInputEntryMapper; + + @Mock + private DqRuleExecuteSqlMapper dqRuleExecuteSqlMapper; + + @Mock + private DqComparisonTypeMapper dqComparisonTypeMapper; + @Test public void testCreateSubCommand() { ProcessInstance parentInstance = new ProcessInstance(); @@ -540,6 +570,102 @@ public class ProcessServiceTest { } @Test + public void getDqRule() { + Mockito.when(dqRuleMapper.selectById(1)).thenReturn(new DqRule()); + Assert.assertNotNull(processService.getDqRule(1)); + } + + @Test + public void getRuleInputEntry() { + Mockito.when(dqRuleInputEntryMapper.getRuleInputEntryList(1)).thenReturn(getRuleInputEntryList()); + Assert.assertNotNull(processService.getRuleInputEntry(1)); + } + + @Test + public void getDqExecuteSql() { + Mockito.when(dqRuleExecuteSqlMapper.getExecuteSqlList(1)).thenReturn(getRuleExecuteSqlList()); + Assert.assertNotNull(processService.getDqExecuteSql(1)); + } + + private List getRuleInputEntryList() { + List list = new ArrayList<>(); + + DqRuleInputEntry srcConnectorType = new DqRuleInputEntry(); + srcConnectorType.setTitle("源数据类型"); + srcConnectorType.setField("src_connector_type"); + srcConnectorType.setType(FormType.SELECT.getFormType()); + srcConnectorType.setCanEdit(true); + srcConnectorType.setShow(true); + srcConnectorType.setValue("JDBC"); + srcConnectorType.setPlaceholder("Please select the source connector type"); + srcConnectorType.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcConnectorType.setOptions("[{\"label\":\"HIVE\",\"value\":\"HIVE\"},{\"label\":\"JDBC\",\"value\":\"JDBC\"}]"); + srcConnectorType.setInputType(InputType.DEFAULT.getCode()); + srcConnectorType.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setEmit(true); + + DqRuleInputEntry statisticsName = new DqRuleInputEntry(); + statisticsName.setTitle("统计值名"); + statisticsName.setField("statistics_name"); + statisticsName.setType(FormType.INPUT.getFormType()); + statisticsName.setCanEdit(true); + statisticsName.setShow(true); + statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql"); + statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsName.setInputType(InputType.DEFAULT.getCode()); + statisticsName.setValueType(ValueType.STRING.getCode()); + statisticsName.setEmit(false); + + DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry(); + statisticsExecuteSql.setTitle("统计值计算SQL"); + statisticsExecuteSql.setField("statistics_execute_sql"); + statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType()); + statisticsExecuteSql.setCanEdit(true); + statisticsExecuteSql.setShow(true); + statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql"); + statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsExecuteSql.setValueType(ValueType.LIKE_SQL.getCode()); + statisticsExecuteSql.setEmit(false); + + list.add(srcConnectorType); + list.add(statisticsName); + list.add(statisticsExecuteSql); + + return list; + } + + private List getRuleExecuteSqlList() { + List list = new ArrayList<>(); + + DqRuleExecuteSql executeSqlDefinition = new DqRuleExecuteSql(); + executeSqlDefinition.setIndex(0); + executeSqlDefinition.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})"); + executeSqlDefinition.setTableAlias("total_count"); + executeSqlDefinition.setType(ExecuteSqlType.COMPARISON.getCode()); + list.add(executeSqlDefinition); + + return list; + } + + public DqExecuteResult getExecuteResult() { + DqExecuteResult dqExecuteResult = new DqExecuteResult(); + dqExecuteResult.setId(1); + dqExecuteResult.setState(DqTaskState.FAILURE.getCode()); + + return dqExecuteResult; + } + + public List getExecuteResultList() { + + List list = new ArrayList<>(); + DqExecuteResult dqExecuteResult = new DqExecuteResult(); + dqExecuteResult.setId(1); + dqExecuteResult.setState(DqTaskState.FAILURE.getCode()); + list.add(dqExecuteResult); + + return list; + } + public void testSaveTaskDefine() { User operator = new User(); operator.setId(-1); @@ -821,5 +947,4 @@ public class ProcessServiceTest { taskGroupQueue.setCreateTime(date); return taskGroupQueue; } - } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java index 3809c52269..2055249d8a 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/enums/DbType.java @@ -64,4 +64,7 @@ public enum DbType { return null; } + public boolean isHive() { + return this == DbType.HIVE; + } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/FormType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/FormType.java index ad692505d6..a3bde26264 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/FormType.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/FormType.java @@ -17,7 +17,13 @@ package org.apache.dolphinscheduler.spi.params.base; +import static java.util.stream.Collectors.toMap; + +import java.util.Arrays; +import java.util.Map; + import com.fasterxml.jackson.annotation.JsonValue; +import com.google.common.base.Functions; public enum FormType { @@ -35,7 +41,9 @@ public enum FormType { CASCADER("cascader"), UPLOAD("upload"), ELTRANSFER("el-transfer"), - TREE("tree"); + TREE("tree"), + TEXTAREA("textarea"), + GROUP("group"); private String formType; @@ -47,4 +55,14 @@ public enum FormType { public String getFormType() { return this.formType; } + + private static final Map FORM_TYPE_MAP = + Arrays.stream(FormType.values()).collect(toMap(FormType::getFormType, Functions.identity())); + + public static FormType of(String type) { + if (FORM_TYPE_MAP.containsKey(type)) { + return FORM_TYPE_MAP.get(type); + } + return null; + } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/ParamsOptions.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/ParamsOptions.java index 6b3fc25eb1..91d493637c 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/ParamsOptions.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/ParamsOptions.java @@ -34,6 +34,8 @@ public class ParamsOptions { */ private boolean disabled; + public ParamsOptions() {} + public ParamsOptions(String label, Object value, boolean disabled) { this.label = label; this.value = value; diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java index 042446a7c4..1b65133c94 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/base/PluginParams.java @@ -17,6 +17,7 @@ package org.apache.dolphinscheduler.spi.params.base; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_EMIT; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_FIELD; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_NAME; import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_PROPS; @@ -78,6 +79,9 @@ public class PluginParams { @JsonProperty(STRING_PLUGIN_PARAM_VALIDATE) protected List validateList; + @JsonProperty(STRING_PLUGIN_PARAM_EMIT) + protected List emit; + /** * whether to hide, the default value is false */ @@ -109,7 +113,7 @@ public class PluginParams { this.info = builder.info; this.display = builder.display; this.hidden = builder.hidden; - + this.emit = builder.emit; } @JsonPOJOBuilder(buildMethodName = "build", withPrefix = "set") @@ -132,6 +136,8 @@ public class PluginParams { protected List validateList; + protected List emit; + protected Boolean hidden; protected Boolean display; @@ -157,6 +163,7 @@ public class PluginParams { @JsonProperty("value") Object value, @JsonProperty("name") String fieldName, @JsonProperty("validate") List validateList, + @JsonProperty("emit") List emit, @JsonProperty("info") String info, @JsonProperty("hidden") Boolean hidden, @JsonProperty("display") Boolean display @@ -171,6 +178,7 @@ public class PluginParams { this.value = value; this.validateList = validateList; this.fieldName = fieldName; + this.emit = emit; this.info = info; this.hidden = hidden; this.display = display; @@ -209,6 +217,10 @@ public class PluginParams { this.value = value; } + public List getEmit() { + return emit; + } + } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParam.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParam.java new file mode 100644 index 0000000000..3268b6a4ee --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParam.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.params.group; + +import org.apache.dolphinscheduler.spi.params.base.FormType; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.spi.params.base.Validate; + +import java.util.ArrayList; +import java.util.List; + +/** + * Text param + */ +public class GroupParam extends PluginParams { + + private GroupParam(Builder builder) { + super(builder); + } + + public static Builder newBuilder(String field, String title) { + return new Builder(field, title); + } + + public static class Builder extends PluginParams.Builder { + + public Builder(String field, String title) { + super(field, FormType.GROUP, title); + } + + public Builder addValidate(Validate validate) { + if (this.validateList == null) { + this.validateList = new ArrayList<>(); + } + this.validateList.add(validate); + return this; + } + + public Builder setField(String field) { + this.name = field; + return this; + } + + public Builder setTitle(String title) { + this.title = title; + return this; + } + + public Builder setValue(Object value) { + this.value = value; + return this; + } + + public Builder setValidateList(List validateList) { + this.validateList = validateList; + return this; + } + + public Builder setEmit(List emit) { + this.emit = emit; + return this; + } + + public Builder setProps(GroupParamsProps props) { + this.props = props; + return this; + } + + public Builder setRules(List rules) { + if (this.props == null) { + this.setProps(new GroupParamsProps()); + } + + ((GroupParamsProps)this.props).setRules(rules); + return this; + } + + @Override + public GroupParam build() { + return new GroupParam(this); + } + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParamsProps.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParamsProps.java new file mode 100644 index 0000000000..9f42f454ea --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/group/GroupParamsProps.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.params.group; + +import org.apache.dolphinscheduler.spi.params.base.ParamsProps; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * the props field in form-create`s json rule + */ +public class GroupParamsProps extends ParamsProps { + + private List rules; + + private int fontSize; + + @JsonProperty("rules") + public List getRules() { + return rules; + } + + public GroupParamsProps setRules(List rules) { + this.rules = rules; + return this; + } + + @JsonProperty("fontSize") + public int getFontSize() { + return fontSize; + } + + public GroupParamsProps setFontSize(int fontSize) { + this.fontSize = fontSize; + return this; + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/input/InputParam.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/input/InputParam.java index a771f9796e..2a16bd933e 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/input/InputParam.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/input/InputParam.java @@ -30,11 +30,12 @@ import java.util.List; */ public class InputParam extends PluginParams { + private final InputParamProps props; + private InputParam(Builder builder) { super(builder); + this.props = builder.props; } - - private InputParamProps props; public static Builder newBuilder(String name, String title) { return new Builder(name, title); @@ -105,6 +106,38 @@ public class InputParam extends PluginParams { return this; } + public Builder setEmit(List emit) { + this.emit = emit; + return this; + } + + public Builder setSize(String size) { + if (this.props == null) { + this.setProps(new InputParamProps()); + } + + this.props.setSize(size); + return this; + } + + public Builder setType(String type) { + if (this.props == null) { + this.setProps(new InputParamProps()); + } + + this.props.setType(type); + return this; + } + + public Builder setRows(int rows) { + if (this.props == null) { + this.setProps(new InputParamProps()); + } + + this.props.setRows(rows); + return this; + } + @Override public InputParam build() { return new InputParam(this); diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/select/SelectParam.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/select/SelectParam.java index 028874aa61..e0a731b1e6 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/select/SelectParam.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/params/select/SelectParam.java @@ -18,6 +18,7 @@ package org.apache.dolphinscheduler.spi.params.select; import static org.apache.dolphinscheduler.spi.params.base.FormType.SELECT; +import static org.apache.dolphinscheduler.spi.utils.Constants.STRING_PLUGIN_PARAM_OPTIONS; import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; import org.apache.dolphinscheduler.spi.params.base.PluginParams; @@ -26,17 +27,19 @@ import org.apache.dolphinscheduler.spi.params.base.Validate; import java.util.LinkedList; import java.util.List; +import com.fasterxml.jackson.annotation.JsonProperty; + /** * front-end select component */ public class SelectParam extends PluginParams { - private List options; - - private SelectParamProps props; + @JsonProperty(STRING_PLUGIN_PARAM_OPTIONS) + private final List options; private SelectParam(Builder builder) { super(builder); + this.options = builder.options; } public static Builder newBuilder(String name, String title) { @@ -51,8 +54,6 @@ public class SelectParam extends PluginParams { private List options; - private SelectParamProps props; - public Builder setOptions(List options) { this.options = options; return this; @@ -114,14 +115,37 @@ public class SelectParam extends PluginParams { this.display = display; return this; } + + public Builder setEmit(List emit) { + this.emit = emit; + return this; + } + + public Builder setPlaceHolder(String placeholder) { + if (this.props == null) { + this.setProps(new SelectParamProps()); + } + + this.props.setPlaceholder(placeholder); + return this; + } + + public Builder setSize(String size) { + if (this.props == null) { + this.setProps(new SelectParamProps()); + } + + this.props.setSize(size); + return this; + } + + @Override + public SelectParam build() { + return new SelectParam(this); + } } public List getOptions() { return options; } - - @Override - public SelectParamProps getProps() { - return props; - } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java index 2b410e3f40..501b4ab574 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/TaskConstants.java @@ -36,6 +36,11 @@ public class TaskConstants { public static final int EXIT_CODE_KILL = 137; public static final String PID = "pid"; + /** + * QUESTION ? + */ + public static final String QUESTION = "?"; + /** * comma , */ @@ -88,6 +93,10 @@ public class TaskConstants { * AT SIGN */ public static final String AT_SIGN = "@"; + /** + * UNDERLINE + */ + public static final String UNDERLINE = "_"; /** * sleep time @@ -340,10 +349,24 @@ public class TaskConstants { */ public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state"; + /** + * Task Logger Thread's name + */ + public static final String TASK_LOGGER_THREAD_NAME = "TaskLogInfo"; + /** * hdfs/s3 configuration * resource.upload.path */ public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path"; -} \ No newline at end of file + /** + * data.quality.jar.name + */ + public static final String DATA_QUALITY_JAR_NAME = "data-quality.jar.name"; + + /** + * data.quality.error.output.path + */ + public static final String DATA_QUALITY_ERROR_OUTPUT_PATH = "data-quality.error.output.path"; +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckType.java new file mode 100644 index 0000000000..6217c873e7 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckType.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * quality task result check type + */ +public enum CheckType { + /** + * 0-comparison_minus_statistics + * 1-statistics_minus_comparison + * 2-statistics_comparison_percentage + * 3-statistics_comparison_difference_comparison_percentage + */ + COMPARISON_MINUS_STATISTICS(0,"comparison_minus_statistics"), + STATISTICS_MINUS_COMPARISON(1,"statistics_minus_comparison"), + STATISTICS_COMPARISON_PERCENTAGE(2,"statistics_comparison_percentage"), + STATISTICS_COMPARISON_DIFFERENCE_COMPARISON_PERCENTAGE(3,"statistics_comparison_difference_comparison_percentage"); + + CheckType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (CheckType type : CheckType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static CheckType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorType.java new file mode 100644 index 0000000000..8d28a5d107 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorType.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * connector type + */ +public enum ConnectorType { + /** + * 0-jdbc + * 1-hive + */ + JDBC(0,"JDBC"), + HIVE(1,"HIVE"); + + ConnectorType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (ConnectorType type : ConnectorType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static ConnectorType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategy.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategy.java new file mode 100644 index 0000000000..97bf00525b --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategy.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +/** + * failure policy when dqs task node failed. + */ +public enum DqFailureStrategy { + /** + * 0-alert and continue when dqc tasks failed. + * 1-alert and block when dqc tasks failed. + **/ + ALERT(0, "alert"), + BLOCK(1, "block"); + + DqFailureStrategy(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (DqFailureStrategy type : DqFailureStrategy.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static DqFailureStrategy of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskState.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskState.java new file mode 100644 index 0000000000..ee1c04bd28 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskState.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * data quality task state + */ +public enum DqTaskState { + /** + * 0-default + * 1-success + * 2-failure + */ + DEFAULT(0,"default"), + SUCCESS(1,"success"), + FAILURE(2,"failure"); + + DqTaskState(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (DqTaskState type : DqTaskState.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static DqTaskState of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlType.java new file mode 100644 index 0000000000..5dfc2719cb --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlType.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * execute sql type + */ +public enum ExecuteSqlType { + /** + * 0-middle + * 1-statistics + * 2-comparison + */ + MIDDLE(0,"middle"), + STATISTICS(1,"statistics"), + COMPARISON(2,"comparison"); + + ExecuteSqlType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (ExecuteSqlType type : ExecuteSqlType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static ExecuteSqlType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputType.java new file mode 100644 index 0000000000..9ab8761f32 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputType.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * frontend form input entry type + */ +public enum InputType { + /** + * 0-default + * 1-statistics + * 2-comparison + * 3-check + */ + DEFAULT(0,"default"), + STATISTICS(1,"statistics"), + COMPARISON(2,"comparison"), + CHECK(3,"check"); + + InputType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (InputType type : InputType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static InputType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorType.java new file mode 100644 index 0000000000..4edbd6062f --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorType.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * operator type + */ +public enum OperatorType { + /** + * 0-equal + * 1-little than + * 2-little and equal + * 3-great than + * 4-great and equal + * 5-not equal + */ + EQ(0,"equal"), + LT(1,"little than"), + LE(2,"little and equal"), + GT(3,"great than"), + GE(4,"great and equal"), + NE(5,"not equal"); + + OperatorType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (OperatorType type : OperatorType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static OperatorType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceType.java new file mode 100644 index 0000000000..8533c7f230 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceType.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * form options source type + */ +public enum OptionSourceType { + /** + * 0-default + * 1-datasource_id + * 2-datasource_type + * 3-comparison_type + */ + DEFAULT(0,"default"), + DATASOURCE_ID(1,"datasource_id"), + DATASOURCE_TYPE(2,"datasource_type"), + COMPARISON_TYPE(3,"comparison_type"); + + OptionSourceType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (OptionSourceType type : OptionSourceType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static OptionSourceType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleType.java new file mode 100644 index 0000000000..866beebb48 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleType.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * rule type + */ +public enum RuleType { + /** + * 0-single_table + * 1-single_table_custom_sql + * 2-multi_table_accuracy + * 3-multi_table_comparison + */ + SINGLE_TABLE(0,"single_table"), + SINGLE_TABLE_CUSTOM_SQL(1,"single_table_custom_sql"), + MULTI_TABLE_ACCURACY(2,"multi_table_accuracy"), + MULTI_TABLE_COMPARISON(3,"multi_table_comparison"); + + RuleType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (RuleType type : RuleType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static RuleType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueType.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueType.java new file mode 100644 index 0000000000..1e7bd3037a --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueType.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * rule input entry value type + */ +public enum ValueType { + /** + * 0-string + * 1-list + * 2-number + * 3-sql + */ + STRING(0,"string"), + LIST(1,"list"), + NUMBER(2,"number"), + LIKE_SQL(3,"sql"); + + ValueType(int code, String description) { + this.code = code; + this.description = description; + } + + private final int code; + private final String description; + + @JsonValue + public int getCode() { + return code; + } + + public String getDescription() { + return description; + } + + private static final Map VALUES_MAP = new HashMap<>(); + + static { + for (ValueType type : ValueType.values()) { + VALUES_MAP.put(type.code,type); + } + } + + public static ValueType of(Integer status) { + if (VALUES_MAP.containsKey(status)) { + return VALUES_MAP.get(status); + } + throw new IllegalArgumentException("invalid code : " + status); + } +} \ No newline at end of file diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/model/JdbcInfo.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/model/JdbcInfo.java new file mode 100644 index 0000000000..1ac65c1fbd --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/model/JdbcInfo.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.model; + +/** + * JdbcInfo + */ +public class JdbcInfo { + + private String host; + + private String port; + + private String driverName; + + private String database; + + private String params; + + private String address; + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public String getPort() { + return port; + } + + public void setPort(String port) { + this.port = port; + } + + public String getDriverName() { + return driverName; + } + + public void setDriverName(String driverName) { + this.driverName = driverName; + } + + public String getDatabase() { + return database; + } + + public void setDatabase(String database) { + this.database = database; + } + + public String getParams() { + return params; + } + + public void setParams(String params) { + this.params = params; + } + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + @Override + public String toString() { + return "JdbcInfo{" + + "host='" + host + '\'' + + ", port='" + port + '\'' + + ", driverName='" + driverName + '\'' + + ", database='" + database + '\'' + + ", params='" + params + '\'' + + ", address='" + address + '\'' + + '}'; + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/DataQualityConstants.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/DataQualityConstants.java new file mode 100644 index 0000000000..4f39d0f13d --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/DataQualityConstants.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.utils; + +/** + * DataQualityConstants + */ +public class DataQualityConstants { + + private DataQualityConstants() { + throw new IllegalStateException("Utility class"); + } + + /** + * data quality task + */ + public static final String SRC_CONNECTOR_TYPE = "src_connector_type"; + public static final String SRC_DATASOURCE_ID = "src_datasource_id"; + public static final String SRC_TABLE = "src_table"; + public static final String SRC_FILTER = "src_filter"; + public static final String SRC_FIELD = "src_field"; + public static final String TARGET_CONNECTOR_TYPE = "target_connector_type"; + public static final String TARGET_DATASOURCE_ID = "target_datasource_id"; + public static final String TARGET_TABLE = "target_table"; + public static final String TARGET_FILTER = "target_filter"; + public static final String TARGET_FIELD = "target_field"; + public static final String STATISTICS_NAME = "statistics_name"; + public static final String STATISTICS_EXECUTE_SQL = "statistics_execute_sql"; + public static final String COMPARISON_NAME = "comparison_name"; + public static final String COMPARISON_TYPE = "comparison_type"; + public static final String COMPARISON_VALUE = "comparison_value"; + public static final String COMPARISON_EXECUTE_SQL = "comparison_execute_sql"; + public static final String MAPPING_COLUMNS = "mapping_columns"; + public static final String ON_CLAUSE = "on_clause"; + public static final String WHERE_CLAUSE = "where_clause"; + public static final String CHECK_TYPE = "check_type"; + public static final String THRESHOLD = "threshold"; + public static final String OPERATOR = "operator"; + public static final String FAILURE_STRATEGY = "failure_strategy"; + public static final String STATISTICS_TABLE = "statistics_table"; + public static final String COMPARISON_TABLE = "comparison_table"; + public static final String AND = " AND "; + public static final String WRITER_CONNECTOR_TYPE = "writer_connector_type"; + public static final String WRITER_DATASOURCE_ID = "writer_datasource_id"; + public static final String UNIQUE_CODE = "unique_code"; + public static final String DATA_TIME = "data_time"; + public static final String REGEXP_PATTERN = "regexp_pattern"; + public static final String ERROR_OUTPUT_PATH = "error_output_path"; + public static final String INDEX = "index"; + public static final String PATH = "path"; + public static final String HDFS_FILE = "hdfs_file"; + public static final String BATCH = "batch"; + + public static final String RULE_ID = "rule_id"; + public static final String RULE_TYPE = "rule_type"; + public static final String RULE_NAME = "rule_name"; + public static final String CREATE_TIME = "create_time"; + public static final String UPDATE_TIME = "update_time"; + public static final String PROCESS_DEFINITION_ID = "process_definition_id"; + public static final String PROCESS_INSTANCE_ID = "process_instance_id"; + public static final String TASK_INSTANCE_ID = "task_instance_id"; + + public static final String ADDRESS = "address"; + public static final String DATABASE = "database"; + public static final String JDBC_URL = "jdbcUrl"; + public static final String PRINCIPAL = "principal"; + public static final String OTHER = "other"; + public static final String ORACLE_DB_CONNECT_TYPE = "connectType"; + + public static final String TABLE = "table"; + public static final String URL = "url"; + public static final String DRIVER = "driver"; + public static final String SQL = "sql"; + public static final String INPUT_TABLE = "input_table"; + public static final String OUTPUT_TABLE = "output_table"; + public static final String TMP_TABLE = "tmp_table"; + + public static final String USER = "user"; + public static final String PASSWORD = "password"; + + /** + * database type + */ + public static final String MYSQL = "MYSQL"; + public static final String POSTGRESQL = "POSTGRESQL"; + +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParser.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParser.java new file mode 100644 index 0000000000..a1d246b2f7 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParser.java @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.utils; + +import static org.apache.dolphinscheduler.spi.task.TaskConstants.COLON; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.DOUBLE_SLASH; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.QUESTION; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.SEMICOLON; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.SINGLE_SLASH; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.MYSQL; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.POSTGRESQL; + +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.task.dq.model.JdbcInfo; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +/** + * JdbcUrlParser + */ +public class JdbcUrlParser { + + private JdbcUrlParser() { + throw new IllegalStateException("Utility class"); + } + + public static DbType getDbType(String datasourceType) { + switch (datasourceType.toUpperCase()) { + case MYSQL: + return DbType.MYSQL; + case POSTGRESQL: + return DbType.POSTGRESQL; + default: + return null; + } + } + + public static JdbcInfo getJdbcInfo(String jdbcUrl) { + + JdbcInfo jdbcInfo = new JdbcInfo(); + + int pos; + int pos1; + int pos2; + String tempUri; + + if (jdbcUrl == null || !jdbcUrl.startsWith("jdbc:") || (pos1 = jdbcUrl.indexOf(COLON, 5)) == -1) { + return null; + } + + String driverName = jdbcUrl.substring(5, pos1); + String params = ""; + String host = ""; + String database = ""; + String port = ""; + if (((pos2 = jdbcUrl.indexOf(SEMICOLON, pos1)) == -1) && ((pos2 = jdbcUrl.indexOf(QUESTION, pos1)) == -1)) { + tempUri = jdbcUrl.substring(pos1 + 1); + } else { + tempUri = jdbcUrl.substring(pos1 + 1, pos2); + params = jdbcUrl.substring(pos2 + 1); + } + + if (tempUri.startsWith(DOUBLE_SLASH)) { + if ((pos = tempUri.indexOf(SINGLE_SLASH, 2)) != -1) { + host = tempUri.substring(2, pos); + database = tempUri.substring(pos + 1); + + if ((pos = host.indexOf(COLON)) != -1) { + port = host.substring(pos + 1); + host = host.substring(0, pos); + } + } + } else { + database = tempUri; + } + + if (StringUtils.isEmpty(database)) { + return null; + } + + if (database.contains(QUESTION)) { + database = database.substring(0, database.indexOf(QUESTION)); + } + + if (database.contains(SEMICOLON)) { + database = database.substring(0, database.indexOf(SEMICOLON)); + } + + jdbcInfo.setDriverName(driverName); + jdbcInfo.setHost(host); + jdbcInfo.setPort(port); + jdbcInfo.setDatabase(database); + jdbcInfo.setParams(params); + jdbcInfo.setAddress("jdbc:" + driverName + "://" + host + COLON + port); + + return jdbcInfo; + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5Utils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5Utils.java new file mode 100644 index 0000000000..e4778ed66d --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5Utils.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.utils; + +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.util.Base64; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Md5Utils + */ +public class Md5Utils { + + private static final Logger logger = LoggerFactory.getLogger(Md5Utils.class); + + private Md5Utils() { + throw new IllegalStateException("Utility class"); + } + + public static String getMd5(String src, boolean isUpper) { + String md5 = ""; + try { + MessageDigest md = MessageDigest.getInstance("SHA-256"); + Base64.Encoder encoder = Base64.getEncoder(); + md5 = encoder.encodeToString(md.digest(src.getBytes(StandardCharsets.UTF_8))); + } catch (Exception e) { + logger.error("get md5 error: {}", e.getMessage()); + } + + if (isUpper) { + md5 = md5.toUpperCase(); + } + + return md5; + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/DataQualityTaskExecutionContext.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/DataQualityTaskExecutionContext.java new file mode 100644 index 0000000000..4ffd62d969 --- /dev/null +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/DataQualityTaskExecutionContext.java @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.request; + +import java.io.Serializable; + +/** + * DataQualityTaskExecutionContext + */ +public class DataQualityTaskExecutionContext implements Serializable { + + /** + * rule id + */ + private int ruleId; + /** + * rule name + */ + private String ruleName; + /** + * rule type + */ + private int ruleType; + /** + * input entry list + */ + private String ruleInputEntryList; + /** + * execute sql list + */ + private String executeSqlList; + /** + * if comparison value calculate from statistics value table + */ + private boolean comparisonNeedStatisticsValueTable = false; + /** + * compare with fixed value + */ + private boolean compareWithFixedValue = false; + /** + * error output path + */ + private String hdfsPath; + /** + * sourceConnector type + */ + private String sourceConnectorType; + /** + * source type + */ + private int sourceType; + /** + * source connection params + */ + private String sourceConnectionParams; + /** + * target connector type + */ + private String targetConnectorType; + /** + * target type + */ + private int targetType; + /** + * target connection params + */ + private String targetConnectionParams; + /** + * source connector type + */ + private String writerConnectorType; + /** + * writer type + */ + private int writerType; + /** + * writer table + */ + private String writerTable; + /** + * writer connection params + */ + private String writerConnectionParams; + /** + * statistics value connector type + */ + private String statisticsValueConnectorType; + /** + * statistics value type + */ + private int statisticsValueType; + /** + * statistics value table + */ + private String statisticsValueTable; + /** + * statistics value writer connection params + */ + private String statisticsValueWriterConnectionParams; + + public int getRuleId() { + return ruleId; + } + + public void setRuleId(int ruleId) { + this.ruleId = ruleId; + } + + public String getSourceConnectorType() { + return sourceConnectorType; + } + + public void setSourceConnectorType(String sourceConnectorType) { + this.sourceConnectorType = sourceConnectorType; + } + + public int getSourceType() { + return sourceType; + } + + public void setSourceType(int sourceType) { + this.sourceType = sourceType; + } + + public String getSourceConnectionParams() { + return sourceConnectionParams; + } + + public void setSourceConnectionParams(String sourceConnectionParams) { + this.sourceConnectionParams = sourceConnectionParams; + } + + public String getTargetConnectorType() { + return targetConnectorType; + } + + public void setTargetConnectorType(String targetConnectorType) { + this.targetConnectorType = targetConnectorType; + } + + public int getTargetType() { + return targetType; + } + + public void setTargetType(int targetType) { + this.targetType = targetType; + } + + public String getTargetConnectionParams() { + return targetConnectionParams; + } + + public void setTargetConnectionParams(String targetConnectionParams) { + this.targetConnectionParams = targetConnectionParams; + } + + public int getWriterType() { + return writerType; + } + + public void setWriterType(int writerType) { + this.writerType = writerType; + } + + public String getWriterConnectionParams() { + return writerConnectionParams; + } + + public void setWriterConnectionParams(String writerConnectionParams) { + this.writerConnectionParams = writerConnectionParams; + } + + public String getWriterTable() { + return writerTable; + } + + public void setWriterTable(String writerTable) { + this.writerTable = writerTable; + } + + public String getWriterConnectorType() { + return writerConnectorType; + } + + public void setWriterConnectorType(String writerConnectorType) { + this.writerConnectorType = writerConnectorType; + } + + public String getStatisticsValueConnectorType() { + return statisticsValueConnectorType; + } + + public void setStatisticsValueConnectorType(String statisticsValueConnectorType) { + this.statisticsValueConnectorType = statisticsValueConnectorType; + } + + public int getStatisticsValueType() { + return statisticsValueType; + } + + public void setStatisticsValueType(int statisticsValueType) { + this.statisticsValueType = statisticsValueType; + } + + public String getStatisticsValueTable() { + return statisticsValueTable; + } + + public void setStatisticsValueTable(String statisticsValueTable) { + this.statisticsValueTable = statisticsValueTable; + } + + public String getStatisticsValueWriterConnectionParams() { + return statisticsValueWriterConnectionParams; + } + + public void setStatisticsValueWriterConnectionParams(String statisticsValueWriterConnectionParams) { + this.statisticsValueWriterConnectionParams = statisticsValueWriterConnectionParams; + } + + public String getRuleName() { + return ruleName; + } + + public void setRuleName(String ruleName) { + this.ruleName = ruleName; + } + + public int getRuleType() { + return ruleType; + } + + public void setRuleType(int ruleType) { + this.ruleType = ruleType; + } + + public String getRuleInputEntryList() { + return ruleInputEntryList; + } + + public void setRuleInputEntryList(String ruleInputEntryList) { + this.ruleInputEntryList = ruleInputEntryList; + } + + public String getExecuteSqlList() { + return executeSqlList; + } + + public void setExecuteSqlList(String executeSqlList) { + this.executeSqlList = executeSqlList; + } + + public boolean isComparisonNeedStatisticsValueTable() { + return comparisonNeedStatisticsValueTable; + } + + public void setComparisonNeedStatisticsValueTable(boolean comparisonNeedStatisticsValueTable) { + this.comparisonNeedStatisticsValueTable = comparisonNeedStatisticsValueTable; + } + + public boolean isCompareWithFixedValue() { + return compareWithFixedValue; + } + + public void setCompareWithFixedValue(boolean compareWithFixedValue) { + this.compareWithFixedValue = compareWithFixedValue; + } + + public String getHdfsPath() { + return hdfsPath; + } + + public void setHdfsPath(String hdfsPath) { + this.hdfsPath = hdfsPath; + } +} diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java index 3fa9442174..ab7bea3f39 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/task/request/TaskRequest.java @@ -225,6 +225,8 @@ public class TaskRequest { */ private SqoopTaskExecutionContext sqoopTaskExecutionContext; + private DataQualityTaskExecutionContext dataQualityTaskExecutionContext; + public Map getResources() { return resources; } @@ -504,4 +506,12 @@ public class TaskRequest { public void setProcedureTaskExecutionContext(ProcedureTaskExecutionContext procedureTaskExecutionContext) { this.procedureTaskExecutionContext = procedureTaskExecutionContext; } + + public DataQualityTaskExecutionContext getDataQualityTaskExecutionContext() { + return dataQualityTaskExecutionContext; + } + + public void setDataQualityTaskExecutionContext(DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + this.dataQualityTaskExecutionContext = dataQualityTaskExecutionContext; + } } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java index 8c848b4e29..e4776755ae 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/CollectionUtils.java @@ -164,5 +164,4 @@ public class CollectionUtils { } return count; } - } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java index abe0672241..8effe9a256 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/Constants.java @@ -41,6 +41,8 @@ public class Constants { public static final String STRING_PLUGIN_PARAM_VALIDATE = "validate"; /** alert plugin param options string **/ public static final String STRING_PLUGIN_PARAM_OPTIONS = "options"; + /**plugin param emit string **/ + public static final String STRING_PLUGIN_PARAM_EMIT = "emit"; /** string true */ @@ -72,6 +74,30 @@ public class Constants { */ public static final String YYYYMMDDHHMMSSSSS = "yyyyMMddHHmmssSSS"; + /** + * double brackets left + */ + public static final String DOUBLE_BRACKETS_LEFT = "{{"; + + /** + * double brackets left + */ + public static final String DOUBLE_BRACKETS_RIGHT = "}}"; + + /** + * double brackets left + */ + public static final String DOUBLE_BRACKETS_LEFT_SPACE = "{ {"; + + /** + * double brackets left + */ + public static final String DOUBLE_BRACKETS_RIGHT_SPACE = "} }"; + + public static final String SMALL = "small"; + + public static final String CHANGE = "change"; + public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle"; public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive"; @@ -199,4 +225,5 @@ public class Constants { public static final String DATASOURCE_ENCRYPTION_SALT_DEFAULT = "!@#$%^&*"; public static final String DATASOURCE_ENCRYPTION_ENABLE = "datasource.encryption.enable"; public static final String DATASOURCE_ENCRYPTION_SALT = "datasource.encryption.salt"; + } diff --git a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java index 4c116e817a..0190e170d2 100644 --- a/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java +++ b/dolphinscheduler-spi/src/main/java/org/apache/dolphinscheduler/spi/utils/StringUtils.java @@ -17,8 +17,14 @@ package org.apache.dolphinscheduler.spi.utils; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.SINGLE_QUOTES; + +import java.io.IOException; +import java.io.StringWriter; +import java.io.Writer; import java.util.Collection; import java.util.Iterator; +import java.util.Locale; /** * java.lang.String utils class @@ -171,4 +177,121 @@ public class StringUtils { } } } + + public static String escapeJava(String str) { + return escapeJavaStyleString(str, false, false); + } + + private static String escapeJavaStyleString(String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) { + if (str == null) { + return null; + } + try { + StringWriter writer = new StringWriter(str.length() * 2); + escapeJavaStyleString(writer, str, escapeSingleQuotes, escapeForwardSlash); + return writer.toString(); + } catch (IOException ioe) { + // this should never ever happen while writing to a StringWriter + throw new RuntimeException(ioe); + } + } + + private static void escapeJavaStyleString(Writer out, String str, boolean escapeSingleQuote, + boolean escapeForwardSlash) throws IOException { + if (out == null) { + throw new IllegalArgumentException("The Writer must not be null"); + } + if (str == null) { + return; + } + int sz; + sz = str.length(); + for (int i = 0; i < sz; i++) { + char ch = str.charAt(i); + + // handle unicode + if (ch > 0xfff) { + out.write("\\u" + hex(ch)); + } else if (ch > 0xff) { + out.write("\\u0" + hex(ch)); + } else if (ch > 0x7f) { + out.write("\\u00" + hex(ch)); + } else if (ch < 32) { + switch (ch) { + case '\b' : + out.write('\\'); + out.write('b'); + break; + case '\n' : + out.write('\\'); + out.write('n'); + break; + case '\t' : + out.write('\\'); + out.write('t'); + break; + case '\f' : + out.write('\\'); + out.write('f'); + break; + case '\r' : + out.write('\\'); + out.write('r'); + break; + default : + if (ch > 0xf) { + out.write("\\u00" + hex(ch)); + } else { + out.write("\\u000" + hex(ch)); + } + break; + } + } else { + switch (ch) { + case '\'' : + if (escapeSingleQuote) { + out.write('\\'); + } + out.write('\''); + break; + case '"' : + out.write('\\'); + out.write('"'); + break; + case '\\' : + out.write('\\'); + out.write('\\'); + break; + case '/' : + if (escapeForwardSlash) { + out.write('\\'); + } + out.write('/'); + break; + default : + out.write(ch); + break; + } + } + } + } + + private static String hex(char ch) { + return Integer.toHexString(ch).toUpperCase(Locale.ENGLISH); + } + + public static String wrapperSingleQuotes(String value) { + return SINGLE_QUOTES + value + SINGLE_QUOTES; + } + + public static String replaceDoubleBrackets(String mainParameter) { + mainParameter = mainParameter + .replace(Constants.DOUBLE_BRACKETS_LEFT, Constants.DOUBLE_BRACKETS_LEFT_SPACE) + .replace(Constants.DOUBLE_BRACKETS_RIGHT, Constants.DOUBLE_BRACKETS_RIGHT_SPACE); + if (mainParameter.contains(Constants.DOUBLE_BRACKETS_LEFT) || mainParameter.contains(Constants.DOUBLE_BRACKETS_RIGHT)) { + return replaceDoubleBrackets(mainParameter); + } else { + return mainParameter; + } + } } diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransferTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransferTest.java index 2a1183cb56..c9cbda319f 100644 --- a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransferTest.java +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/params/PluginParamsTransferTest.java @@ -132,29 +132,52 @@ public class PluginParamsTransferTest { String paramsJson = PluginParamsTransfer.transferParamsToJson(paramsList); - String paramsJsonAssert = "[{\"props\":null,\"field\":\"field1\",\"name\":\"field1\",\"type\":\"input\",\"title\":\"field1\",\"value\":null,\"validate\"" - + ":[{\"required\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":null}]},{\"props\":null,\"field\":" - + "\"field2\",\"name\":\"field2\",\"type\":\"input\",\"title\":\"field2\",\"value\":null,\"validate\":null},{\"props\":null,\"field\":\"field3\"," - + "\"name\":\"field3\",\"type\":\"input\",\"title\":\"field3\",\"value\":null,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\"," - + "\"trigger\":\"blur\",\"min\":null,\"max\":null}]},{\"props\":null,\"field\":\"field4\",\"name\":\"field4\",\"type\":\"input\",\"title\":\"field" - + "4\",\"value\":null,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"number\",\"trigger\":\"blur\",\"min\":null,\"max\":null}]},{\"pro" - + "ps\":null,\"field\":\"field5\",\"name\":\"field5\",\"type\":\"input\",\"title\":\"field5\",\"value\":null,\"validate\":[{\"required\":true,\"mess" - + "age\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":null}]},{\"props\":null,\"field\":\"field6\",\"name\":\"field6\",\"typ" - + "e\":\"radio\",\"title\":\"field6\",\"value\":true,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"mi" - + "n\":null,\"max\":null}],\"options\":[{\"label\":\"YES\",\"value\":true,\"disabled\":false},{\"label\":\"NO\",\"value\":false,\"disabled\":false}" - + "]},{\"props\":null,\"field\":\"field7\",\"name\":\"field7\",\"type\":\"input\",\"title\":\"field7\",\"value\":null,\"validate\":null},{\"field\":\"f" - + "ield8\",\"name\":\"field8\",\"props\":{\"disabled\":null,\"placeholder\":\"if enable use authentication, you need input password\",\"size\":\"smal" - + "l\"},\"type\":\"input\",\"title\":\"field8\",\"value\":null,\"validate\":null},{\"props\":null,\"field\":\"field9\",\"name\":\"field9\",\"type\":\"ra" - + "dio\",\"title\":\"field9\",\"value\":false,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":nul" - + "l,\"max\":null}],\"options\":[{\"label\":\"YES\",\"value\":true,\"disabled\":false},{\"label\":\"NO\",\"value\":false,\"disabled\":false}]},{\"pro" - + "ps\":null,\"field\":\"field10\",\"name\":\"field10\",\"type\":\"radio\",\"title\":\"field10\",\"value\":false,\"validate\":[{\"required\":true,\"mes" - + "sage\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"options\":[{\"label\":\"YES\",\"value\":true,\"disabled\":fal" - + "se},{\"label\":\"NO\",\"value\":false,\"disabled\":false}]},{\"props\":null,\"field\":\"field11\",\"name\":\"field11\",\"type\":\"input\",\"titl" - + "e\":\"field11\",\"value\":\"*\",\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":nul" - + "l}]},{\"props\":null,\"field\":\"showType\",\"name\":\"showType\",\"type\":\"radio\",\"title\":\"showType\",\"value\":\"table\",\"validate\":[{\"re" - + "quired\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"options\":[{\"label\":\"table\",\"value\":\"tab" - + "le\",\"disabled\":false},{\"label\":\"text\",\"value\":\"text\",\"disabled\":false},{\"label\":\"attachment\",\"value\":\"attachment\",\"disabled\":f" - + "alse},{\"label\":\"tableattachment\",\"value\":\"tableattachment\",\"disabled\":false}]}]"; + String paramsJsonAssert = "[{\"props\":null,\"field\":\"field1\",\"name\":\"field1\"," + + "\"type\":\"input\",\"title\":\"field1\",\"value\":null,\"validate\":[{\"required\":true," + + "\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":null}]," + + "\"emit\":null},{\"props\":null,\"field\":\"field2\",\"name\":\"field2\",\"type\":\"input\"," + + "\"title\":\"field2\",\"value\":null,\"validate\":null,\"emit\":null},{\"props\":null," + + "\"field\":\"field3\",\"name\":\"field3\",\"type\":\"input\",\"title\":\"field3\"," + + "\"value\":null,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\"," + + "\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"emit\":null},{\"props\":null," + + "\"field\":\"field4\",\"name\":\"field4\",\"type\":\"input\",\"title\":\"field4\"," + + "\"value\":null,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"number\"," + + "\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"emit\":null},{\"props\":null," + + "\"field\":\"field5\",\"name\":\"field5\",\"type\":\"input\",\"title\":\"field5\"," + + "\"value\":null,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\"," + + "\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"emit\":null},{\"props\":null," + + "\"field\":\"field6\",\"name\":\"field6\",\"type\":\"radio\",\"title\":\"field6\"," + + "\"value\":true,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\"," + + "\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"emit\":null,\"options\":[{\"label\":\"YES\"," + + "\"value\":true,\"disabled\":false},{\"label\":\"NO\",\"value\":false,\"disabled\":false}]}," + + "{\"props\":{\"disabled\":null,\"type\":null,\"maxlength\":null,\"minlength\":null," + + "\"clearable\":null,\"prefixIcon\":null,\"suffixIcon\":null,\"rows\":null,\"autosize\":null," + + "\"autocomplete\":null,\"name\":null,\"readonly\":null,\"max\":null,\"min\":null,\"step\":null," + + "\"resize\":null,\"autofocus\":null,\"form\":null,\"label\":null,\"tabindex\":null," + + "\"validateEvent\":null,\"showPassword\":null,\"placeholder\":\"if enable use authentication, " + + "you need input user\",\"size\":\"small\"},\"field\":\"field7\",\"name\":\"field7\"," + + "\"type\":\"input\",\"title\":\"field7\",\"value\":null,\"validate\":null,\"emit\":null}," + + "{\"field\":\"field8\",\"name\":\"field8\",\"props\":{\"disabled\":null,\"placeholder\":" + + "\"if enable use authentication, you need input password\",\"size\":\"small\"},\"type\":" + + "\"input\",\"title\":\"field8\",\"value\":null,\"validate\":null,\"emit\":null},{\"props\":" + + "null,\"field\":\"field9\",\"name\":\"field9\",\"type\":\"radio\",\"title\":\"field9\"," + + "\"value\":false,\"validate\":[{\"required\":true,\"message\":null,\"type\":\"string\"," + + "\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"emit\":null,\"options\":[{\"label\":" + + "\"YES\",\"value\":true,\"disabled\":false},{\"label\":\"NO\",\"value\":false,\"disabled\":" + + "false}]},{\"props\":null,\"field\":\"field10\",\"name\":\"field10\",\"type\":\"radio\"," + + "\"title\":\"field10\",\"value\":false,\"validate\":[{\"required\":true,\"message\":null," + + "\"type\":\"string\",\"trigger\":\"blur\",\"min\":null,\"max\":null}],\"emit\":null," + + "\"options\":[{\"label\":\"YES\",\"value\":true,\"disabled\":false},{\"label\":\"NO\"," + + "\"value\":false,\"disabled\":false}]},{\"props\":null,\"field\":\"field11\",\"name\":" + + "\"field11\",\"type\":\"input\",\"title\":\"field11\",\"value\":\"*\",\"validate\":" + + "[{\"required\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\"" + + ":null,\"max\":null}],\"emit\":null},{\"props\":null,\"field\":\"showType\",\"name\":" + + "\"showType\",\"type\":\"radio\",\"title\":\"showType\",\"value\":\"table\",\"validate\"" + + ":[{\"required\":true,\"message\":null,\"type\":\"string\",\"trigger\":\"blur\",\"min\"" + + ":null,\"max\":null}],\"emit\":null,\"options\":[{\"label\":\"table\",\"value\":\"table\"" + + ",\"disabled\":false},{\"label\":\"text\",\"value\":\"text\",\"disabled\":false},{\"label\"" + + ":\"attachment\",\"value\":\"attachment\",\"disabled\":false},{\"label\":\"tableattachment\"" + + ",\"value\":\"tableattachment\",\"disabled\":false}]}]"; Assert.assertEquals(paramsJsonAssert, paramsJson); } diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckTypeTest.java new file mode 100644 index 0000000000..8aa20c0944 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/CheckTypeTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class CheckTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, CheckType.COMPARISON_MINUS_STATISTICS.getCode()); + assertEquals(1, CheckType.STATISTICS_MINUS_COMPARISON.getCode()); + assertEquals(2, CheckType.STATISTICS_COMPARISON_PERCENTAGE.getCode()); + assertEquals(3, CheckType.STATISTICS_COMPARISON_DIFFERENCE_COMPARISON_PERCENTAGE.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("comparison_minus_statistics", CheckType.COMPARISON_MINUS_STATISTICS.getDescription()); + assertEquals("statistics_minus_comparison", CheckType.STATISTICS_MINUS_COMPARISON.getDescription()); + assertEquals("statistics_comparison_percentage", CheckType.STATISTICS_COMPARISON_PERCENTAGE.getDescription()); + assertEquals("statistics_comparison_difference_comparison_percentage", CheckType.STATISTICS_COMPARISON_DIFFERENCE_COMPARISON_PERCENTAGE.getDescription()); + } + + @Test + public void testOf() { + assertEquals(CheckType.COMPARISON_MINUS_STATISTICS, CheckType.of(0)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorTypeTest.java new file mode 100644 index 0000000000..fa091578f9 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ConnectorTypeTest.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class ConnectorTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, ConnectorType.JDBC.getCode()); + assertEquals(1, ConnectorType.HIVE.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("JDBC", ConnectorType.JDBC.getDescription()); + assertEquals("HIVE", ConnectorType.HIVE.getDescription()); + } + + @Test + public void testOf() { + assertEquals(ConnectorType.JDBC, ConnectorType.of(0)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategyTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategyTest.java new file mode 100644 index 0000000000..07878ce6ff --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqFailureStrategyTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class DqFailureStrategyTest { + + @Test + public void testGetCode() { + assertEquals(0, DqFailureStrategy.ALERT.getCode()); + assertEquals(1, DqFailureStrategy.BLOCK.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("alert", DqFailureStrategy.ALERT.getDescription()); + assertEquals("block", DqFailureStrategy.BLOCK.getDescription()); + } + + @Test + public void testOf() { + assertEquals(DqFailureStrategy.ALERT, DqFailureStrategy.of(0)); + assertEquals(DqFailureStrategy.BLOCK, DqFailureStrategy.of(1)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskStateTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskStateTest.java new file mode 100644 index 0000000000..a23b05cc53 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/DqTaskStateTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class DqTaskStateTest { + + @Test + public void testGetCode() { + assertEquals(0, DqTaskState.DEFAULT.getCode()); + assertEquals(1, DqTaskState.SUCCESS.getCode()); + assertEquals(2, DqTaskState.FAILURE.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("default", DqTaskState.DEFAULT.getDescription()); + assertEquals("success", DqTaskState.SUCCESS.getDescription()); + assertEquals("failure", DqTaskState.FAILURE.getDescription()); + } + + @Test + public void testOf() { + assertEquals(DqTaskState.DEFAULT, DqTaskState.of(0)); + assertEquals(DqTaskState.SUCCESS, DqTaskState.of(1)); + assertEquals(DqTaskState.FAILURE, DqTaskState.of(2)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlTypeTest.java new file mode 100644 index 0000000000..c4623fb889 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ExecuteSqlTypeTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class ExecuteSqlTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, ExecuteSqlType.MIDDLE.getCode()); + assertEquals(1, ExecuteSqlType.STATISTICS.getCode()); + assertEquals(2, ExecuteSqlType.COMPARISON.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("middle", ExecuteSqlType.MIDDLE.getDescription()); + assertEquals("statistics", ExecuteSqlType.STATISTICS.getDescription()); + assertEquals("comparison", ExecuteSqlType.COMPARISON.getDescription()); + } + + @Test + public void testOf() { + assertEquals(ExecuteSqlType.MIDDLE, ExecuteSqlType.of(0)); + assertEquals(ExecuteSqlType.STATISTICS, ExecuteSqlType.of(1)); + assertEquals(ExecuteSqlType.COMPARISON, ExecuteSqlType.of(2)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputTypeTest.java new file mode 100644 index 0000000000..b59941e407 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/InputTypeTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class InputTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, InputType.DEFAULT.getCode()); + assertEquals(1, InputType.STATISTICS.getCode()); + assertEquals(2, InputType.COMPARISON.getCode()); + assertEquals(3, InputType.CHECK.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("default", InputType.DEFAULT.getDescription()); + assertEquals("statistics", InputType.STATISTICS.getDescription()); + assertEquals("comparison", InputType.COMPARISON.getDescription()); + assertEquals("check", InputType.CHECK.getDescription()); + } + + @Test + public void testOf() { + assertEquals(InputType.DEFAULT, InputType.of(0)); + assertEquals(InputType.STATISTICS, InputType.of(1)); + assertEquals(InputType.COMPARISON, InputType.of(2)); + assertEquals(InputType.CHECK, InputType.of(3)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorTypeTest.java new file mode 100644 index 0000000000..83c53de686 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OperatorTypeTest.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class OperatorTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, OperatorType.EQ.getCode()); + assertEquals(1, OperatorType.LT.getCode()); + assertEquals(2, OperatorType.LE.getCode()); + assertEquals(3, OperatorType.GT.getCode()); + assertEquals(4, OperatorType.GE.getCode()); + assertEquals(5, OperatorType.NE.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("equal", OperatorType.EQ.getDescription()); + assertEquals("little than", OperatorType.LT.getDescription()); + assertEquals("little and equal", OperatorType.LE.getDescription()); + assertEquals("great than", OperatorType.GT.getDescription()); + assertEquals("great and equal", OperatorType.GE.getDescription()); + assertEquals("not equal", OperatorType.NE.getDescription()); + } + + @Test + public void testOf() { + assertEquals(OperatorType.EQ, OperatorType.of(0)); + assertEquals(OperatorType.LT, OperatorType.of(1)); + assertEquals(OperatorType.LE, OperatorType.of(2)); + assertEquals(OperatorType.GT, OperatorType.of(3)); + assertEquals(OperatorType.GE, OperatorType.of(4)); + assertEquals(OperatorType.NE, OperatorType.of(5)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceTypeTest.java new file mode 100644 index 0000000000..6ee19b0edd --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/OptionSourceTypeTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class OptionSourceTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, OptionSourceType.DEFAULT.getCode()); + assertEquals(1, OptionSourceType.DATASOURCE_ID.getCode()); + assertEquals(2, OptionSourceType.DATASOURCE_TYPE.getCode()); + assertEquals(3, OptionSourceType.COMPARISON_TYPE.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("default", OptionSourceType.DEFAULT.getDescription()); + assertEquals("datasource_id", OptionSourceType.DATASOURCE_ID.getDescription()); + assertEquals("datasource_type", OptionSourceType.DATASOURCE_TYPE.getDescription()); + assertEquals("comparison_type", OptionSourceType.COMPARISON_TYPE.getDescription()); + } + + @Test + public void testOf() { + assertEquals(OptionSourceType.DEFAULT, OptionSourceType.of(0)); + assertEquals(OptionSourceType.DATASOURCE_ID, OptionSourceType.of(1)); + assertEquals(OptionSourceType.DATASOURCE_TYPE, OptionSourceType.of(2)); + assertEquals(OptionSourceType.COMPARISON_TYPE, OptionSourceType.of(3)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleTypeTest.java new file mode 100644 index 0000000000..32f6580d73 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/RuleTypeTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class RuleTypeTest { + + @Test + public void testGetCode() { + assertEquals(0, RuleType.SINGLE_TABLE.getCode()); + assertEquals(1, RuleType.SINGLE_TABLE_CUSTOM_SQL.getCode()); + assertEquals(2, RuleType.MULTI_TABLE_ACCURACY.getCode()); + assertEquals(3, RuleType.MULTI_TABLE_COMPARISON.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("single_table", RuleType.SINGLE_TABLE.getDescription()); + assertEquals("single_table_custom_sql", RuleType.SINGLE_TABLE_CUSTOM_SQL.getDescription()); + assertEquals("multi_table_accuracy", RuleType.MULTI_TABLE_ACCURACY.getDescription()); + assertEquals("multi_table_comparison", RuleType.MULTI_TABLE_COMPARISON.getDescription()); + } + + @Test + public void testOf() { + assertEquals(RuleType.SINGLE_TABLE, RuleType.of(0)); + assertEquals(RuleType.SINGLE_TABLE_CUSTOM_SQL, RuleType.of(1)); + assertEquals(RuleType.MULTI_TABLE_ACCURACY, RuleType.of(2)); + assertEquals(RuleType.MULTI_TABLE_COMPARISON, RuleType.of(3)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueTypeTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueTypeTest.java new file mode 100644 index 0000000000..6022b680f5 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/enums/ValueTypeTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.enums; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class ValueTypeTest { + /** + * 0-string + * 1-list + * 2-number + * 3-sql + */ + @Test + public void testGetCode() { + assertEquals(0, ValueType.STRING.getCode()); + assertEquals(1, ValueType.LIST.getCode()); + assertEquals(2, ValueType.NUMBER.getCode()); + assertEquals(3, ValueType.LIKE_SQL.getCode()); + } + + @Test + public void testGetDescription() { + assertEquals("string", ValueType.STRING.getDescription()); + assertEquals("list", ValueType.LIST.getDescription()); + assertEquals("number", ValueType.NUMBER.getDescription()); + assertEquals("sql", ValueType.LIKE_SQL.getDescription()); + } + + @Test + public void testOf() { + assertEquals(ValueType.STRING, ValueType.of(0)); + assertEquals(ValueType.LIST, ValueType.of(1)); + assertEquals(ValueType.NUMBER, ValueType.of(2)); + assertEquals(ValueType.LIKE_SQL, ValueType.of(3)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParserTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParserTest.java new file mode 100644 index 0000000000..85f2d7e3f0 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/JdbcUrlParserTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.utils; + +import org.apache.dolphinscheduler.spi.task.dq.model.JdbcInfo; + +import org.junit.Assert; +import org.junit.Test; + +/** + * JdbcUrlParserTest + */ +public class JdbcUrlParserTest { + + @Test + public void testGetJdbcInfo() { + JdbcInfo jdbcInfo = + JdbcUrlParser.getJdbcInfo("jdbc:mysql://localhost:3306/dolphinscheduler?" + + "useUnicode=true&characterEncoding=UTF-8"); + if (jdbcInfo != null) { + String jdbcInfoStr = jdbcInfo.toString(); + String expected = "JdbcInfo{host='localhost', port='3306', " + + "driverName='mysql', database='dolphinscheduler', " + + "params='useUnicode=true&characterEncoding=UTF-8', " + + "address='jdbc:mysql://localhost:3306'}"; + Assert.assertEquals(expected,jdbcInfoStr); + } + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5UtilsTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5UtilsTest.java new file mode 100644 index 0000000000..96bc7c6d16 --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/task/dq/utils/Md5UtilsTest.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.task.dq.utils; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class Md5UtilsTest { + + @Test + public void testGetMd5() { + assertEquals("jZae727K08KaOmKSgOaGzww/XVqGr/PKEgIMkjrcbJI=", Md5Utils.getMd5("123456", false)); + } +} diff --git a/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/utils/StringUtilsTest.java b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/utils/StringUtilsTest.java new file mode 100644 index 0000000000..0ba8787e2c --- /dev/null +++ b/dolphinscheduler-spi/src/test/java/org/apache/dolphinscheduler/spi/utils/StringUtilsTest.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.spi.utils; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Test; + +public class StringUtilsTest { + + @Test + public void testIsEmpty() { + assertTrue(StringUtils.isEmpty("")); + } + + @Test + public void testIsEmpty2() { + assertFalse(StringUtils.isEmpty("123")); + } + + @Test + public void testIsNotEmpty() { + assertTrue(StringUtils.isNotEmpty("cs")); + } + + @Test + public void testIsNotEmpty2() { + assertFalse(StringUtils.isNotEmpty("")); + } + + @Test + public void testIsBlank() { + assertTrue(StringUtils.isBlank("")); + } + + @Test + public void testIsBlank2() { + assertFalse(StringUtils.isBlank("123")); + } + + @Test + public void testIsNotBlank() { + assertTrue(StringUtils.isNotBlank("cs")); + } + + @Test + public void testIsNotBlank2() { + assertFalse(StringUtils.isNotBlank("")); + } + + @Test + public void testTrim() { + assertEquals("result", StringUtils.trim(" result ")); + } + + @Test + public void testTrim2() { + assertEquals("", StringUtils.trim("")); + } + + @Test + public void testEqualsIgnoreCase() { + assertTrue(StringUtils.equalsIgnoreCase("Str1", "str1")); + } + + @Test + public void testEqualsIgnoreCase2() { + assertFalse(StringUtils.equalsIgnoreCase("str1", null)); + } + + @Test + public void testJoin1() { + // Setup + List collection = new ArrayList<>(); + collection.add("1"); + collection.add("2"); + + // Run the test + final String result = StringUtils.join(collection, "_"); + + // Verify the results + assertEquals("1_2", result); + } + + @Test + public void testEscapeJava() { + assertEquals("str", StringUtils.escapeJava("str")); + } + + @Test + public void testWrapperSingleQuotes() { + assertEquals("'a'", StringUtils.wrapperSingleQuotes("a")); + } + + @Test + public void testReplaceDoubleBrackets() { + assertEquals("{ {a} }", StringUtils.replaceDoubleBrackets("{{a}}")); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml index ab287c59c0..5eb651c37d 100644 --- a/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-api/pom.xml @@ -274,5 +274,19 @@ + + org.apache.hadoop + hadoop-common + + + slf4j-log4j12 + org.slf4j + + + com.sun.jersey + jersey-json + + + diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml new file mode 100644 index 0000000000..43767232dd --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/pom.xml @@ -0,0 +1,49 @@ + + + + + dolphinscheduler-task-plugin + org.apache.dolphinscheduler + 2.0.4-SNAPSHOT + + 4.0.0 + + dolphinscheduler-task-dataquality + jar + + + + org.apache.dolphinscheduler + dolphinscheduler-spi + provided + + + org.apache.dolphinscheduler + dolphinscheduler-task-api + ${project.version} + + + org.apache.dolphinscheduler + dolphinscheduler-datasource-api + ${project.version} + + + + \ No newline at end of file diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameters.java new file mode 100644 index 0000000000..2493c533f6 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameters.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq; + +import org.apache.dolphinscheduler.plugin.task.dq.utils.spark.SparkParameters; +import org.apache.dolphinscheduler.plugin.task.util.MapUtils; +import org.apache.dolphinscheduler.spi.task.AbstractParameters; +import org.apache.dolphinscheduler.spi.task.ResourceInfo; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * DataQualityParameters + */ +public class DataQualityParameters extends AbstractParameters { + + private static final Logger logger = LoggerFactory.getLogger(DataQualityParameters.class); + + /** + * rule id + */ + private int ruleId; + /** + * rule input entry value map + */ + private Map ruleInputParameter; + /** + * spark parameters + */ + private SparkParameters sparkParameters; + + public int getRuleId() { + return ruleId; + } + + public void setRuleId(int ruleId) { + this.ruleId = ruleId; + } + + public Map getRuleInputParameter() { + return ruleInputParameter; + } + + public void setRuleInputParameter(Map ruleInputParameter) { + this.ruleInputParameter = ruleInputParameter; + } + + /** + * In this function ,we need more detailed check every parameter, + * if the parameter is non-conformant will return false + * @return boolean result + */ + @Override + public boolean checkParameters() { + + if (ruleId == 0) { + logger.error("rule id is null"); + return false; + } + + if (MapUtils.isEmpty(ruleInputParameter)) { + logger.error("rule input parameter is empty"); + return false; + } + + return sparkParameters != null; + } + + @Override + public List getResourceFilesList() { + return new ArrayList<>(); + } + + public SparkParameters getSparkParameters() { + return sparkParameters; + } + + public void setSparkParameters(SparkParameters sparkParameters) { + this.sparkParameters = sparkParameters; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java new file mode 100644 index 0000000000..cae09cdcdc --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTask.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq; + +import static org.apache.dolphinscheduler.spi.task.TaskConstants.SLASH; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.UNDERLINE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.CREATE_TIME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.DATA_TIME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.ERROR_OUTPUT_PATH; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.PROCESS_DEFINITION_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.PROCESS_INSTANCE_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.REGEXP_PATTERN; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.RULE_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.RULE_NAME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.RULE_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TASK_INSTANCE_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.UPDATE_TIME; +import static org.apache.dolphinscheduler.spi.utils.Constants.YYYY_MM_DD_HH_MM_SS; + +import org.apache.dolphinscheduler.plugin.datasource.api.utils.CommonUtils; +import org.apache.dolphinscheduler.plugin.task.api.AbstractYarnTask; +import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.plugin.task.dq.utils.spark.SparkArgsUtils; +import org.apache.dolphinscheduler.spi.task.AbstractParameters; +import org.apache.dolphinscheduler.spi.task.Property; +import org.apache.dolphinscheduler.spi.task.ResourceInfo; +import org.apache.dolphinscheduler.spi.task.paramparser.ParamUtils; +import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.task.request.TaskRequest; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import java.io.File; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * In DataQualityTask, the input parameters will be converted into DataQualityConfiguration, + * which will be converted into a string as the parameter of DataQualityApplication, + * and DataQualityApplication is spark application + */ +public class DataQualityTask extends AbstractYarnTask { + + /** + * spark2 command + */ + private static final String SPARK2_COMMAND = "${SPARK_HOME2}/bin/spark-submit"; + + private DataQualityParameters dataQualityParameters; + + private final TaskRequest dqTaskExecutionContext; + + public DataQualityTask(TaskRequest taskExecutionContext) { + super(taskExecutionContext); + this.dqTaskExecutionContext = taskExecutionContext; + } + + @Override + public void init() { + logger.info("data quality task params {}", dqTaskExecutionContext.getTaskParams()); + + dataQualityParameters = JSONUtils.parseObject(dqTaskExecutionContext.getTaskParams(), DataQualityParameters.class); + + if (null == dataQualityParameters) { + logger.error("data quality params is null"); + return; + } + + if (!dataQualityParameters.checkParameters()) { + throw new RuntimeException("data quality task params is not valid"); + } + + Map inputParameter = dataQualityParameters.getRuleInputParameter(); + for (Map.Entry entry: inputParameter.entrySet()) { + if (entry != null && entry.getValue() != null) { + entry.setValue(entry.getValue().trim()); + } + } + + DataQualityTaskExecutionContext dataQualityTaskExecutionContext + = dqTaskExecutionContext.getDataQualityTaskExecutionContext(); + + operateInputParameter(inputParameter, dataQualityTaskExecutionContext); + + RuleManager ruleManager = new RuleManager( + inputParameter, + dataQualityTaskExecutionContext); + + DataQualityConfiguration dataQualityConfiguration = + ruleManager.generateDataQualityParameter(); + + dataQualityParameters + .getSparkParameters() + .setMainArgs("\"" + + StringUtils.replaceDoubleBrackets(StringUtils.escapeJava(JSONUtils.toJsonString(dataQualityConfiguration))) + "\""); + + dataQualityParameters + .getSparkParameters() + .setQueue(dqTaskExecutionContext.getQueue()); + + setMainJarName(); + } + + private void operateInputParameter(Map inputParameter, DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + DateTimeFormatter df = DateTimeFormatter.ofPattern(YYYY_MM_DD_HH_MM_SS); + LocalDateTime time = LocalDateTime.now(); + String now = df.format(time); + + inputParameter.put(RULE_ID, String.valueOf(dataQualityTaskExecutionContext.getRuleId())); + inputParameter.put(RULE_TYPE, String.valueOf(dataQualityTaskExecutionContext.getRuleType())); + inputParameter.put(RULE_NAME, StringUtils.wrapperSingleQuotes(dataQualityTaskExecutionContext.getRuleName())); + inputParameter.put(CREATE_TIME, StringUtils.wrapperSingleQuotes(now)); + inputParameter.put(UPDATE_TIME, StringUtils.wrapperSingleQuotes(now)); + inputParameter.put(PROCESS_DEFINITION_ID, String.valueOf(dqTaskExecutionContext.getProcessDefineId())); + inputParameter.put(PROCESS_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getProcessInstanceId())); + inputParameter.put(TASK_INSTANCE_ID, String.valueOf(dqTaskExecutionContext.getTaskInstanceId())); + + if (StringUtils.isEmpty(inputParameter.get(DATA_TIME))) { + inputParameter.put(DATA_TIME,StringUtils.wrapperSingleQuotes(now)); + } + + if (StringUtils.isNotEmpty(inputParameter.get(REGEXP_PATTERN))) { + inputParameter.put(REGEXP_PATTERN,StringUtils.escapeJava(StringUtils.escapeJava(inputParameter.get(REGEXP_PATTERN)))); + } + + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getHdfsPath())) { + inputParameter.put(ERROR_OUTPUT_PATH, + dataQualityTaskExecutionContext.getHdfsPath() + + SLASH + dqTaskExecutionContext.getProcessDefineId() + + UNDERLINE + dqTaskExecutionContext.getProcessInstanceId() + + UNDERLINE + dqTaskExecutionContext.getTaskName()); + } else { + inputParameter.put(ERROR_OUTPUT_PATH,""); + } + } + + @Override + protected String buildCommand() { + List args = new ArrayList<>(); + + args.add(SPARK2_COMMAND); + + // other parameters + args.addAll(SparkArgsUtils.buildArgs(dataQualityParameters.getSparkParameters())); + + // replace placeholder + Map paramsMap = ParamUtils.convert(dqTaskExecutionContext,getParameters()); + + String command = null; + + if (null != paramsMap) { + command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), ParamUtils.convert(paramsMap)); + } + + logger.info("data quality task command: {}", command); + + return command; + } + + @Override + protected void setMainJarName() { + ResourceInfo mainJar = new ResourceInfo(); + mainJar.setRes(CommonUtils.getDataQualityJarName()); + dataQualityParameters.getSparkParameters().setMainJar(mainJar); + } + + @Override + public AbstractParameters getParameters() { + return dataQualityParameters; + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java new file mode 100644 index 0000000000..53247dc4a1 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannel.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq; + +import org.apache.dolphinscheduler.spi.task.AbstractTask; +import org.apache.dolphinscheduler.spi.task.TaskChannel; +import org.apache.dolphinscheduler.spi.task.request.TaskRequest; + +public class DataQualityTaskChannel implements TaskChannel { + + @Override + public void cancelApplication(boolean status) { + + } + + @Override + public AbstractTask createTask(TaskRequest taskRequest) { + return new DataQualityTask(taskRequest); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java new file mode 100644 index 0000000000..557d39f45e --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskChannelFactory.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq; + +import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.spi.task.TaskChannel; +import org.apache.dolphinscheduler.spi.task.TaskChannelFactory; + +import java.util.List; + +import com.google.auto.service.AutoService; + +@AutoService(TaskChannelFactory.class) +public class DataQualityTaskChannelFactory implements TaskChannelFactory { + + @Override + public String getName() { + return "DATA_QUALITY"; + } + + @Override + public List getParams() { + return null; + } + + @Override + public TaskChannel create() { + return new DataQualityTaskChannel(); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java new file mode 100644 index 0000000000..8f05e9cbc6 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/exception/DataQualityException.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.exception; + +/** + * data quality runtime exception + */ +public class DataQualityException extends RuntimeException { + + public DataQualityException() { + super(); + } + + public DataQualityException(String message) { + super(message); + } + + public DataQualityException(String message, Throwable cause) { + super(message, cause); + } + + public DataQualityException(Throwable cause) { + super(cause); + } + + protected DataQualityException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java new file mode 100644 index 0000000000..36cc83bc8e --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/RuleManager.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule; + +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.UNIQUE_CODE; + +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleInputEntry; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.IRuleParser; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.MultiTableAccuracyRuleParser; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.MultiTableComparisonRuleParser; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.SingleTableCustomSqlRuleParser; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.SingleTableRuleParser; +import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils; +import org.apache.dolphinscheduler.spi.enums.CommandType; +import org.apache.dolphinscheduler.spi.task.dq.enums.RuleType; +import org.apache.dolphinscheduler.spi.task.paramparser.BusinessTimeUtils; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import java.util.Date; +import java.util.Map; + +/** + * RuleManager is responsible for parsing the input parameters to the DataQualityConfiguration + * And DataQualityConfiguration will be used in DataQualityApplication + */ +public class RuleManager { + + private final Map inputParameterValue; + private final DataQualityTaskExecutionContext dataQualityTaskExecutionContext; + + private static final String NONE_COMPARISON_TYPE = "0"; + private static final String BASE_SQL = + "select ${rule_type} as rule_type," + + "${rule_name} as rule_name," + + "${process_definition_id} as process_definition_id," + + "${process_instance_id} as process_instance_id," + + "${task_instance_id} as task_instance_id," + + "${statistics_name} AS statistics_value," + + "${comparison_name} AS comparison_value," + + "${comparison_type} AS comparison_type," + + "${check_type} as check_type," + + "${threshold} as threshold," + + "${operator} as operator," + + "${failure_strategy} as failure_strategy," + + "'${error_output_path}' as error_output_path," + + "${create_time} as create_time," + + "${update_time} as update_time "; + + public static final String DEFAULT_COMPARISON_WRITER_SQL = + BASE_SQL + "from ${statistics_table} full join ${comparison_table}"; + + public static final String MULTI_TABLE_COMPARISON_WRITER_SQL = + BASE_SQL + + "from ( ${statistics_execute_sql} ) tmp1 " + + "join ( ${comparison_execute_sql} ) tmp2"; + + public static final String SINGLE_TABLE_CUSTOM_SQL_WRITER_SQL = + BASE_SQL + + "from ( ${statistics_table} ) tmp1 " + + "join ${comparison_table}"; + public static final String TASK_STATISTICS_VALUE_WRITER_SQL = + "select " + + "${process_definition_id} as process_definition_id," + + "${task_instance_id} as task_instance_id," + + "${rule_id} as rule_id," + + "${unique_code} as unique_code," + + "'${statistics_name}'AS statistics_name," + + "${statistics_name} AS statistics_value," + + "${data_time} as data_time," + + "${create_time} as create_time," + + "${update_time} as update_time " + + "from ${statistics_table}"; + + public RuleManager(Map inputParameterValue, DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + this.inputParameterValue = inputParameterValue; + this.dataQualityTaskExecutionContext = dataQualityTaskExecutionContext; + } + + /** + * @return DataQualityConfiguration + * @throws RuntimeException RuntimeException + */ + public DataQualityConfiguration generateDataQualityParameter() throws RuntimeException { + + Map inputParameterValueResult = + RuleParserUtils.getInputParameterMapFromEntryList( + JSONUtils.toList(dataQualityTaskExecutionContext.getRuleInputEntryList(), DqRuleInputEntry.class)); + inputParameterValueResult.putAll(inputParameterValue); + inputParameterValueResult.putAll(BusinessTimeUtils.getBusinessTime(CommandType.START_PROCESS, new Date())); + inputParameterValueResult.putIfAbsent(COMPARISON_TYPE, NONE_COMPARISON_TYPE); + inputParameterValueResult.put(UNIQUE_CODE, + StringUtils.wrapperSingleQuotes(RuleParserUtils.generateUniqueCode(inputParameterValueResult))); + + IRuleParser ruleParser = null; + switch (RuleType.of(dataQualityTaskExecutionContext.getRuleType())) { + case SINGLE_TABLE: + ruleParser = new SingleTableRuleParser(); + break; + case SINGLE_TABLE_CUSTOM_SQL: + ruleParser = new SingleTableCustomSqlRuleParser(); + break; + case MULTI_TABLE_ACCURACY: + ruleParser = new MultiTableAccuracyRuleParser(); + break; + case MULTI_TABLE_COMPARISON: + ruleParser = new MultiTableComparisonRuleParser(); + break; + default: + throw new DataQualityException("rule type is not support"); + } + + return ruleParser.parse(inputParameterValueResult, dataQualityTaskExecutionContext); + } +} \ No newline at end of file diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java new file mode 100644 index 0000000000..a2bfbcf117 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleExecuteSql.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.entity; + +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; + +import java.io.Serializable; +import java.util.Date; + +import com.fasterxml.jackson.annotation.JsonFormat; + +/** + * RuleExecuteSql + */ +public class DqRuleExecuteSql implements Serializable { + /** + * primary key + */ + private int id; + /** + * index,ensure the execution order of sql + */ + private int index; + /** + * SQL Statement + */ + private String sql; + /** + * table alias name + */ + private String tableAlias; + /** + * input entry type: default,statistics,comparison,check + */ + private int type = ExecuteSqlType.MIDDLE.getCode(); + /** + * is error output sql + */ + private boolean isErrorOutputSql; + /** + * create_time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update_time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getIndex() { + return index; + } + + public void setIndex(int index) { + this.index = index; + } + + public String getSql() { + return sql; + } + + public void setSql(String sql) { + this.sql = sql; + } + + public String getTableAlias() { + return tableAlias; + } + + public void setTableAlias(String tableAlias) { + this.tableAlias = tableAlias; + } + + public int getType() { + return type; + } + + public void setType(int type) { + this.type = type; + } + + public boolean isErrorOutputSql() { + return isErrorOutputSql; + } + + public void setErrorOutputSql(boolean errorOutputSql) { + isErrorOutputSql = errorOutputSql; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DqRuleExecuteSql{" + + "id=" + id + + ", index=" + index + + ", sql='" + sql + '\'' + + ", tableAlias='" + tableAlias + '\'' + + ", type=" + type + + ", isErrorOutputSql=" + isErrorOutputSql + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} \ No newline at end of file diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java new file mode 100644 index 0000000000..6a9d853bc0 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/entity/DqRuleInputEntry.java @@ -0,0 +1,276 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.entity; + +import org.apache.dolphinscheduler.spi.task.dq.enums.InputType; +import org.apache.dolphinscheduler.spi.task.dq.enums.OptionSourceType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ValueType; + +import java.io.Serializable; +import java.util.Date; + +import com.fasterxml.jackson.annotation.JsonFormat; + +/** + * RuleInputEntry + */ +public class DqRuleInputEntry implements Serializable { + /** + * primary key + */ + private int id; + /** + * form field name + */ + private String field; + /** + * form type + */ + private String type; + /** + * form title + */ + private String title; + /** + * default value,can be null + */ + private String value; + /** + * default options,can be null + * [{label:"",value:""}] + */ + private String options; + /** + * ${field} + */ + private String placeholder; + /** + * the source type of options,use default options or other + */ + private int optionSourceType = OptionSourceType.DEFAULT.getCode(); + /** + * input entry type: string,array,number .etc + */ + private int valueType = ValueType.NUMBER.getCode(); + /** + * input entry type: default,statistics,comparison + */ + private int inputType = InputType.DEFAULT.getCode(); + /** + * whether to display on the front end + */ + private Boolean isShow; + /** + * whether to edit on the front end + */ + private Boolean canEdit; + /** + * is emit event + */ + private Boolean isEmit; + /** + * is validate + */ + private Boolean isValidate; + /** + * values map + */ + private String valuesMap; + /** + * values map + */ + private Integer index; + /** + * create_time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date createTime; + /** + * update_time + */ + @JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8") + private Date updateTime; + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getField() { + return field; + } + + public void setField(String field) { + this.field = field; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public String getOptions() { + return options; + } + + public void setOptions(String options) { + this.options = options; + } + + public String getPlaceholder() { + return placeholder; + } + + public void setPlaceholder(String placeholder) { + this.placeholder = placeholder; + } + + public int getOptionSourceType() { + return optionSourceType; + } + + public void setOptionSourceType(int optionSourceType) { + this.optionSourceType = optionSourceType; + } + + public int getValueType() { + return valueType; + } + + public void setValueType(int valueType) { + this.valueType = valueType; + } + + public int getInputType() { + return inputType; + } + + public void setInputType(int inputType) { + this.inputType = inputType; + } + + public Boolean getShow() { + return isShow; + } + + public void setShow(Boolean show) { + isShow = show; + } + + public Boolean getCanEdit() { + return canEdit; + } + + public void setCanEdit(Boolean canEdit) { + this.canEdit = canEdit; + } + + public Boolean getEmit() { + return isEmit; + } + + public void setEmit(Boolean emit) { + isEmit = emit; + } + + public Boolean getValidate() { + return isValidate; + } + + public void setValidate(Boolean validate) { + isValidate = validate; + } + + public String getValuesMap() { + return valuesMap; + } + + public void setValuesMap(String valuesMap) { + this.valuesMap = valuesMap; + } + + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + return "DqRuleInputEntry{" + + "id=" + id + + ", field='" + field + '\'' + + ", type=" + type + + ", title='" + title + '\'' + + ", value='" + value + '\'' + + ", options='" + options + '\'' + + ", placeholder='" + placeholder + '\'' + + ", optionSourceType=" + optionSourceType + + ", valueType=" + valueType + + ", inputType=" + inputType + + ", isShow=" + isShow + + ", canEdit=" + canEdit + + ", isEmit=" + isEmit + + ", isValidate=" + isValidate + + ", valuesMap='" + valuesMap + '\'' + + ", index=" + index + + ", createTime=" + createTime + + ", updateTime=" + updateTime + + '}'; + } +} \ No newline at end of file diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java new file mode 100644 index 0000000000..a775bde6d4 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/BaseConfig.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parameter; + +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * BaseConfig + */ +public class BaseConfig { + + @JsonProperty("type") + private String type; + + @JsonProperty("config") + private Map config; + + public BaseConfig() { + } + + public BaseConfig(String type, Map config) { + this.type = type; + this.config = config; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Map getConfig() { + return config; + } + + public void setConfig(Map config) { + this.config = config; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java new file mode 100644 index 0000000000..eff3132978 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/DataQualityConfiguration.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parameter; + +import java.util.List; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * DataQualityConfiguration + * The reader is responsible for connecting to the data source, + * and the transformer is responsible for transformer the data that from reader, + * and the writer is responsible for writing data to the target datasource + */ +public class DataQualityConfiguration { + + @JsonProperty("name") + private String name; + + @JsonProperty("env") + private EnvConfig envConfig; + + @JsonProperty("readers") + private List readerConfigs; + + @JsonProperty("transformers") + private List transformerConfigs; + + @JsonProperty("writers") + private List writerConfigs; + + public DataQualityConfiguration() {} + + public DataQualityConfiguration(String name, + List readerConfigs, + List writerConfigs, + List transformerConfigs) { + this.name = name; + this.readerConfigs = readerConfigs; + this.writerConfigs = writerConfigs; + this.transformerConfigs = transformerConfigs; + } + + public DataQualityConfiguration(String name, + EnvConfig envConfig, + List readerConfigs, + List writerConfigs, + List transformerConfigs) { + this.name = name; + this.envConfig = envConfig; + this.readerConfigs = readerConfigs; + this.writerConfigs = writerConfigs; + this.transformerConfigs = transformerConfigs; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public EnvConfig getEnvConfig() { + return envConfig; + } + + public void setEnvConfig(EnvConfig envConfig) { + this.envConfig = envConfig; + } + + public List getReaderConfigs() { + return readerConfigs; + } + + public void setReaderConfigs(List readerConfigs) { + this.readerConfigs = readerConfigs; + } + + public List getTransformerConfigs() { + return transformerConfigs; + } + + public void setTransformerConfigs(List transformerConfigs) { + this.transformerConfigs = transformerConfigs; + } + + public List getWriterConfigs() { + return writerConfigs; + } + + public void setWriterConfigs(List writerConfigs) { + this.writerConfigs = writerConfigs; + } + + @Override + public String toString() { + return "DataQualityConfiguration{" + + "name='" + name + '\'' + + ", envConfig=" + envConfig + + ", readerConfigs=" + readerConfigs + + ", transformerConfigs=" + transformerConfigs + + ", writerConfigs=" + writerConfigs + + '}'; + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java new file mode 100644 index 0000000000..b4c49b89d6 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parameter/EnvConfig.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parameter; + +import java.util.Map; + +/** + * EnvConfig + */ +public class EnvConfig extends BaseConfig { + + public EnvConfig() { + } + + public EnvConfig(String type, Map config) { + super(type,config); + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java new file mode 100644 index 0000000000..c2da180b52 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/IRuleParser.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parser; + +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; + +import java.util.Map; + +/** + * IRuleParser is a component that actually converts input parameters to DataQualityConfiguration + */ +public interface IRuleParser { + + DataQualityConfiguration parse(Map inputParameterValue, + DataQualityTaskExecutionContext context) throws DataQualityException; +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java new file mode 100644 index 0000000000..666a16b433 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MappingColumn.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parser; + +/** + * MappingColumn + */ +public class MappingColumn { + + private String srcField; + private String operator; + private String targetField; + + public MappingColumn() {} + + public MappingColumn(String srcField, String operator, String targetField) { + this.srcField = srcField; + this.operator = operator; + this.targetField = targetField; + } + + public String getSrcField() { + return srcField; + } + + public void setSrcField(String srcField) { + this.srcField = srcField; + } + + public String getOperator() { + return operator; + } + + public void setOperator(String operator) { + this.operator = operator; + } + + public String getTargetField() { + return targetField; + } + + public void setTargetField(String targetField) { + this.targetField = targetField; + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java new file mode 100644 index 0000000000..950e3e1a01 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableAccuracyRuleParser.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parser; + +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.MAPPING_COLUMNS; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.ON_CLAUSE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.STATISTICS_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.WHERE_CLAUSE; + +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * MultiTableAccuracyRuleParser + */ +public class MultiTableAccuracyRuleParser implements IRuleParser { + + @Override + public DataQualityConfiguration parse(Map inputParameterValue, + DataQualityTaskExecutionContext context) throws DataQualityException { + List dqRuleExecuteSqlList = + JSONUtils.toList(context.getExecuteSqlList(),DqRuleExecuteSql.class); + + DqRuleExecuteSql statisticsSql = + RuleParserUtils.getExecuteSqlListByType( + dqRuleExecuteSqlList, ExecuteSqlType.STATISTICS).get(0); + inputParameterValue.put(STATISTICS_TABLE,statisticsSql.getTableAlias()); + + int index = 1; + + List readerConfigList = + RuleParserUtils.getReaderConfigList(inputParameterValue,context); + + RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList,context); + + List transformerConfigList = new ArrayList<>(); + + List mappingColumnList = RuleParserUtils.getMappingColumnList(inputParameterValue.get(MAPPING_COLUMNS)); + + //get on clause + inputParameterValue.put(ON_CLAUSE, RuleParserUtils.getOnClause(mappingColumnList,inputParameterValue)); + //get where clause + inputParameterValue.put(WHERE_CLAUSE, RuleParserUtils.getWhereClause(mappingColumnList,inputParameterValue)); + + index = RuleParserUtils.replaceExecuteSqlPlaceholder( + dqRuleExecuteSqlList, + index, + inputParameterValue, + transformerConfigList); + + String writerSql = RuleManager.DEFAULT_COMPARISON_WRITER_SQL; + if (context.isCompareWithFixedValue()) { + writerSql = writerSql.replaceAll("full join \\$\\{comparison_table}",""); + } + + List writerConfigList = RuleParserUtils.getAllWriterConfigList(inputParameterValue, + context, index, transformerConfigList, writerSql,RuleManager.TASK_STATISTICS_VALUE_WRITER_SQL); + + return new DataQualityConfiguration( + context.getRuleName(), + RuleParserUtils.getEnvConfig(), + readerConfigList, + writerConfigList, + transformerConfigList); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java new file mode 100644 index 0000000000..32df66c3de --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/MultiTableComparisonRuleParser.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parser; + +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils; +import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * MultiTableComparisonRuleParser + */ +public class MultiTableComparisonRuleParser implements IRuleParser { + + @Override + public DataQualityConfiguration parse(Map inputParameterValue, + DataQualityTaskExecutionContext context) throws DataQualityException { + + List readerConfigList = + RuleParserUtils.getReaderConfigList(inputParameterValue,context); + RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList,context); + + List transformerConfigList = new ArrayList<>(); + + List writerConfigList = RuleParserUtils.getWriterConfigList( + ParameterUtils.convertParameterPlaceholders(RuleManager.MULTI_TABLE_COMPARISON_WRITER_SQL,inputParameterValue), + context); + + return new DataQualityConfiguration( + context.getRuleName(), + RuleParserUtils.getEnvConfig(), + readerConfigList, + writerConfigList, + transformerConfigList); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java new file mode 100644 index 0000000000..b3673b9e74 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableCustomSqlRuleParser.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parser; + +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; + +import java.util.List; +import java.util.Map; + +/** + * SingleTableCustomSqlRuleParser + */ +public class SingleTableCustomSqlRuleParser implements IRuleParser { + + @Override + public DataQualityConfiguration parse(Map inputParameterValue, + DataQualityTaskExecutionContext context) throws DataQualityException { + List dqRuleExecuteSqlList = + JSONUtils.toList(context.getExecuteSqlList(),DqRuleExecuteSql.class); + + int index = 1; + + List readerConfigList = + RuleParserUtils.getReaderConfigList(inputParameterValue,context); + RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList,context); + + List transformerConfigList = RuleParserUtils + .getSingleTableCustomSqlTransformerConfigList(index,inputParameterValue); + + //replace the placeholder in execute sql list + index = RuleParserUtils.replaceExecuteSqlPlaceholder( + dqRuleExecuteSqlList, + index, + inputParameterValue, + transformerConfigList); + + String writerSql = RuleManager.SINGLE_TABLE_CUSTOM_SQL_WRITER_SQL; + if (context.isCompareWithFixedValue()) { + writerSql = writerSql.replaceAll("join \\$\\{comparison_table}",""); + } + + List writerConfigList = RuleParserUtils.getAllWriterConfigList(inputParameterValue, + context, index, transformerConfigList, writerSql,RuleManager.TASK_STATISTICS_VALUE_WRITER_SQL); + + return new DataQualityConfiguration( + context.getRuleName(), + RuleParserUtils.getEnvConfig(), + readerConfigList, + writerConfigList, + transformerConfigList); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java new file mode 100644 index 0000000000..cf7c30723b --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/rule/parser/SingleTableRuleParser.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.rule.parser; + +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.STATISTICS_TABLE; + +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.DataQualityConfiguration; +import org.apache.dolphinscheduler.plugin.task.dq.utils.RuleParserUtils; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * SingleTableRuleParser + */ +public class SingleTableRuleParser implements IRuleParser { + + @Override + public DataQualityConfiguration parse(Map inputParameterValue, + DataQualityTaskExecutionContext context) throws DataQualityException { + List dqRuleExecuteSqlList = + JSONUtils.toList(context.getExecuteSqlList(),DqRuleExecuteSql.class); + + DqRuleExecuteSql statisticsSql = + RuleParserUtils.getExecuteSqlListByType(dqRuleExecuteSqlList, ExecuteSqlType.STATISTICS).get(0); + inputParameterValue.put(STATISTICS_TABLE,statisticsSql.getTableAlias()); + + int index = 1; + + List readerConfigList = + RuleParserUtils.getReaderConfigList(inputParameterValue,context); + RuleParserUtils.addStatisticsValueTableReaderConfig(readerConfigList,context); + + List transformerConfigList = new ArrayList<>(); + + //replace the placeholder in execute sql list + index = RuleParserUtils.replaceExecuteSqlPlaceholder( + dqRuleExecuteSqlList, + index, + inputParameterValue, + transformerConfigList); + + String writerSql = RuleManager.DEFAULT_COMPARISON_WRITER_SQL; + + if (context.isCompareWithFixedValue()) { + writerSql = writerSql.replaceAll("full join \\$\\{comparison_table}",""); + } + + List writerConfigList = RuleParserUtils.getAllWriterConfigList(inputParameterValue, + context, index, transformerConfigList, writerSql,RuleManager.TASK_STATISTICS_VALUE_WRITER_SQL); + + return new DataQualityConfiguration( + context.getRuleName(), + RuleParserUtils.getEnvConfig(), + readerConfigList, + writerConfigList, + transformerConfigList); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java new file mode 100644 index 0000000000..565a501043 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/RuleParserUtils.java @@ -0,0 +1,563 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.utils; + +import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_BUSINESS_DATE; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_CURRENT_DATE; +import static org.apache.dolphinscheduler.spi.task.TaskConstants.PARAMETER_DATETIME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.AND; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.BATCH; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.CHECK_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_NAME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.CREATE_TIME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.DATABASE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.DATA_TIME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.DRIVER; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.ERROR_OUTPUT_PATH; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.FAILURE_STRATEGY; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.HDFS_FILE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.INDEX; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.INPUT_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.OPERATOR; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.OUTPUT_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.PASSWORD; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.PATH; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.PROCESS_DEFINITION_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.PROCESS_INSTANCE_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.RULE_NAME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.RULE_TYPE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SQL; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SRC_FIELD; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SRC_FILTER; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SRC_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.STATISTICS_EXECUTE_SQL; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.STATISTICS_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TARGET_FIELD; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TARGET_FILTER; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TARGET_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.TASK_INSTANCE_ID; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.THRESHOLD; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.UPDATE_TIME; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.URL; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.USER; + +import org.apache.dolphinscheduler.plugin.datasource.api.utils.DataSourceUtils; +import org.apache.dolphinscheduler.plugin.task.dq.exception.DataQualityException; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleInputEntry; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.BaseConfig; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parameter.EnvConfig; +import org.apache.dolphinscheduler.plugin.task.dq.rule.parser.MappingColumn; +import org.apache.dolphinscheduler.plugin.task.util.MapUtils; +import org.apache.dolphinscheduler.spi.datasource.BaseConnectionParam; +import org.apache.dolphinscheduler.spi.enums.DbType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.dq.utils.Md5Utils; +import org.apache.dolphinscheduler.spi.task.paramparser.ParameterUtils; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import org.apache.commons.collections4.CollectionUtils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import com.fasterxml.jackson.databind.node.ArrayNode; + +/** + * RuleParserUtils + */ +public class RuleParserUtils { + + private RuleParserUtils() { + throw new IllegalStateException("Utility class"); + } + + private static final String AND_SRC_FILTER = "AND (${src_filter})"; + private static final String WHERE_SRC_FILTER = "WHERE (${src_filter})"; + private static final String AND_TARGET_FILTER = "AND (${target_filter})"; + private static final String WHERE_TARGET_FILTER = "WHERE (${target_filter})"; + + public static List getReaderConfigList( + Map inputParameterValue, + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + + List readerConfigList = new ArrayList<>(); + + //all the rule need the source config + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getSourceConnectorType())) { + BaseConnectionParam sourceDataSource = + (BaseConnectionParam) DataSourceUtils.buildConnectionParams( + DbType.of(dataQualityTaskExecutionContext.getSourceType()), + dataQualityTaskExecutionContext.getSourceConnectionParams()); + BaseConfig sourceBaseConfig = new BaseConfig(); + sourceBaseConfig.setType(dataQualityTaskExecutionContext.getSourceConnectorType()); + Map config = new HashMap<>(); + if (sourceDataSource != null) { + config.put(DATABASE,sourceDataSource.getDatabase()); + config.put(TABLE,inputParameterValue.get(SRC_TABLE)); + config.put(URL,DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getSourceType()),sourceDataSource)); + config.put(USER,sourceDataSource.getUser()); + config.put(PASSWORD,sourceDataSource.getPassword()); + config.put(DRIVER, DataSourceUtils.getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getSourceType()))); + String outputTable = sourceDataSource.getDatabase() + "_" + inputParameterValue.get(SRC_TABLE); + config.put(OUTPUT_TABLE,outputTable); + inputParameterValue.put(SRC_TABLE,outputTable); + } + sourceBaseConfig.setConfig(config); + + readerConfigList.add(sourceBaseConfig); + } + + // MultiTableAccuracyRule need the target config + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getTargetConnectorType())) { + BaseConnectionParam targetDataSource = + (BaseConnectionParam) DataSourceUtils.buildConnectionParams( + DbType.of(dataQualityTaskExecutionContext.getTargetType()), + dataQualityTaskExecutionContext.getTargetConnectionParams()); + BaseConfig targetBaseConfig = new BaseConfig(); + targetBaseConfig.setType(dataQualityTaskExecutionContext.getTargetConnectorType()); + Map config = new HashMap<>(); + if (targetDataSource != null) { + config.put(DATABASE,targetDataSource.getDatabase()); + config.put(TABLE,inputParameterValue.get(TARGET_TABLE)); + config.put(URL,DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getTargetType()),targetDataSource)); + config.put(USER,targetDataSource.getUser()); + config.put(PASSWORD,targetDataSource.getPassword()); + config.put(DRIVER, DataSourceUtils.getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getTargetType()))); + String outputTable = targetDataSource.getDatabase() + "_" + inputParameterValue.get(TARGET_TABLE); + config.put(OUTPUT_TABLE,outputTable); + inputParameterValue.put(TARGET_TABLE,outputTable); + } + targetBaseConfig.setConfig(config); + + readerConfigList.add(targetBaseConfig); + } + + return readerConfigList; + } + + public static int replaceExecuteSqlPlaceholder(List executeSqlList, + int index, Map inputParameterValueResult, + List transformerConfigList) { + List midExecuteSqlDefinitionList + = getExecuteSqlListByType(executeSqlList, ExecuteSqlType.MIDDLE); + + List statisticsExecuteSqlDefinitionList + = getExecuteSqlListByType(executeSqlList, ExecuteSqlType.STATISTICS); + + checkAndReplace(midExecuteSqlDefinitionList,inputParameterValueResult.get(SRC_FILTER),AND_SRC_FILTER); + checkAndReplace(midExecuteSqlDefinitionList,inputParameterValueResult.get(SRC_FILTER),WHERE_SRC_FILTER); + checkAndReplace(statisticsExecuteSqlDefinitionList,inputParameterValueResult.get(SRC_FILTER),AND_SRC_FILTER); + checkAndReplace(statisticsExecuteSqlDefinitionList,inputParameterValueResult.get(SRC_FILTER),WHERE_SRC_FILTER); + + checkAndReplace(midExecuteSqlDefinitionList,inputParameterValueResult.get(TARGET_FILTER),AND_TARGET_FILTER); + checkAndReplace(midExecuteSqlDefinitionList,inputParameterValueResult.get(TARGET_FILTER),WHERE_TARGET_FILTER); + checkAndReplace(statisticsExecuteSqlDefinitionList,inputParameterValueResult.get(TARGET_FILTER),AND_TARGET_FILTER); + checkAndReplace(statisticsExecuteSqlDefinitionList,inputParameterValueResult.get(TARGET_FILTER),WHERE_TARGET_FILTER); + + if (CollectionUtils.isNotEmpty(midExecuteSqlDefinitionList)) { + for (DqRuleExecuteSql executeSqlDefinition:midExecuteSqlDefinitionList) { + index = setTransformerConfig( + index, + inputParameterValueResult, + transformerConfigList, + executeSqlDefinition); + } + } + + if (CollectionUtils.isNotEmpty(statisticsExecuteSqlDefinitionList)) { + for (DqRuleExecuteSql executeSqlDefinition:statisticsExecuteSqlDefinitionList) { + index = setTransformerConfig( + index, + inputParameterValueResult, + transformerConfigList, + executeSqlDefinition); + } + } + + return index; + } + + private static int setTransformerConfig(int index, + Map inputParameterValueResult, + List transformerConfigList, + DqRuleExecuteSql executeSqlDefinition) { + Map config = new HashMap<>(); + config.put(INDEX,index++); + config.put(SQL, ParameterUtils.convertParameterPlaceholders(executeSqlDefinition.getSql(),inputParameterValueResult)); + config.put(OUTPUT_TABLE,executeSqlDefinition.getTableAlias()); + + BaseConfig transformerConfig = new BaseConfig(SQL,config); + transformerConfigList.add(transformerConfig); + return index; + } + + public static List getSingleTableCustomSqlTransformerConfigList(int index, + Map inputParameterValueResult) { + List list = new ArrayList<>(); + + Map config = new HashMap<>(); + config.put(INDEX,index + 1); + config.put(SQL,ParameterUtils.convertParameterPlaceholders(inputParameterValueResult.get(STATISTICS_EXECUTE_SQL),inputParameterValueResult)); + config.put(OUTPUT_TABLE,inputParameterValueResult.get(SRC_TABLE)); + inputParameterValueResult.put(STATISTICS_TABLE,inputParameterValueResult.get(SRC_TABLE)); + BaseConfig transformerConfig = new BaseConfig(SQL,config); + list.add(transformerConfig); + return list; + } + + private static String getCoalesceString(String table, String column) { + return "coalesce(" + table + "." + column + ", '')"; + } + + private static String getSrcColumnIsNullStr(String table,List columns) { + String[] columnList = new String[columns.size()]; + for (int i = 0; i < columns.size(); i++) { + String column = columns.get(i); + columnList[i] = table + "." + column + " IS NULL"; + } + return String.join(AND, columnList); + } + + public static Map getInputParameterMapFromEntryList(List defaultInputEntryList) { + + Map defaultInputParameterValue = new HashMap<>(); + for (DqRuleInputEntry inputEntry:defaultInputEntryList) { + defaultInputParameterValue.put(inputEntry.getField(),inputEntry.getValue()); + } + + return defaultInputParameterValue; + } + + public static List getWriterConfigList( + String sql, + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + + List writerConfigList = new ArrayList<>(); + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getWriterConnectorType())) { + BaseConnectionParam writerDataSource = + (BaseConnectionParam) DataSourceUtils.buildConnectionParams( + DbType.of(dataQualityTaskExecutionContext.getWriterType()), + dataQualityTaskExecutionContext.getWriterConnectionParams()); + BaseConfig writerConfig = new BaseConfig(); + writerConfig.setType(dataQualityTaskExecutionContext.getWriterConnectorType()); + + Map config = new HashMap<>(); + if (writerDataSource != null) { + config.put(DATABASE,writerDataSource.getDatabase()); + config.put(TABLE,dataQualityTaskExecutionContext.getWriterTable()); + config.put(URL,DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getWriterType()),writerDataSource)); + config.put(USER,writerDataSource.getUser()); + config.put(PASSWORD,writerDataSource.getPassword()); + config.put(DRIVER, DataSourceUtils.getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getWriterType()))); + config.put(SQL,sql); + } + writerConfig.setConfig(config); + writerConfigList.add(writerConfig); + } + + return writerConfigList; + } + + public static void addStatisticsValueTableReaderConfig (List readerConfigList, + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + if (dataQualityTaskExecutionContext.isComparisonNeedStatisticsValueTable()) { + List statisticsBaseConfigList = RuleParserUtils.getStatisticsValueConfigReaderList(dataQualityTaskExecutionContext); + readerConfigList.addAll(statisticsBaseConfigList); + } + } + + public static List getStatisticsValueConfigWriterList ( + String sql, + Map inputParameterValueResult, + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + + List writerConfigList = new ArrayList<>(); + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) { + BaseConfig writerConfig = getStatisticsValueConfig(dataQualityTaskExecutionContext); + if (writerConfig != null) { + writerConfig.getConfig().put(SQL,ParameterUtils.convertParameterPlaceholders(sql,inputParameterValueResult)); + } + writerConfigList.add(writerConfig); + } + return writerConfigList; + } + + public static List getStatisticsValueConfigReaderList ( + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + + List readerConfigList = new ArrayList<>(); + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) { + BaseConfig readerConfig = getStatisticsValueConfig(dataQualityTaskExecutionContext); + if (readerConfig != null) { + readerConfig.getConfig().put(OUTPUT_TABLE,dataQualityTaskExecutionContext.getStatisticsValueTable()); + } + readerConfigList.add(readerConfig); + } + return readerConfigList; + } + + public static BaseConfig getStatisticsValueConfig ( + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) throws DataQualityException { + BaseConfig baseConfig = null; + if (StringUtils.isNotEmpty(dataQualityTaskExecutionContext.getStatisticsValueConnectorType())) { + BaseConnectionParam writerDataSource = + (BaseConnectionParam) DataSourceUtils.buildConnectionParams( + DbType.of(dataQualityTaskExecutionContext.getStatisticsValueType()), + dataQualityTaskExecutionContext.getStatisticsValueWriterConnectionParams()); + baseConfig = new BaseConfig(); + baseConfig.setType(dataQualityTaskExecutionContext.getStatisticsValueConnectorType()); + + Map config = new HashMap<>(); + if (writerDataSource != null) { + config.put(DATABASE,writerDataSource.getDatabase()); + config.put(TABLE,dataQualityTaskExecutionContext.getStatisticsValueTable()); + config.put(URL,DataSourceUtils.getJdbcUrl(DbType.of(dataQualityTaskExecutionContext.getStatisticsValueType()),writerDataSource)); + config.put(USER,writerDataSource.getUser()); + config.put(PASSWORD,writerDataSource.getPassword()); + config.put(DRIVER, DataSourceUtils.getDatasourceDriver(DbType.of(dataQualityTaskExecutionContext.getWriterType()))); + } + + baseConfig.setConfig(config); + } + + return baseConfig; + } + + public static String getOnClause(List mappingColumnList,Map inputParameterValueResult) { + //get on clause + String[] columnList = new String[mappingColumnList.size()]; + for (int i = 0; i < mappingColumnList.size(); i++) { + MappingColumn column = mappingColumnList.get(i); + columnList[i] = getCoalesceString(inputParameterValueResult.get(SRC_TABLE),column.getSrcField()) + + column.getOperator() + + getCoalesceString(inputParameterValueResult.get(TARGET_TABLE),column.getTargetField()); + } + + return String.join(AND,columnList); + } + + public static String getWhereClause(List mappingColumnList,Map inputParameterValueResult) { + String srcColumnNotNull = "( NOT (" + getSrcColumnIsNullStr(inputParameterValueResult.get(SRC_TABLE),getSrcColumnList(mappingColumnList)) + " ))"; + String targetColumnIsNull = "( " + getSrcColumnIsNullStr(inputParameterValueResult.get(TARGET_TABLE),getTargetColumnList(mappingColumnList)) + " )"; + + return srcColumnNotNull + AND + targetColumnIsNull; + } + + public static List getWriterConfigList( + int index, + Map inputParameterValueResult, + List transformerConfigList, + DataQualityTaskExecutionContext dataQualityTaskExecutionContext, + String writerSql) throws DataQualityException { + List comparisonExecuteSqlList = + getExecuteSqlListByType(JSONUtils.toList(dataQualityTaskExecutionContext.getExecuteSqlList(),DqRuleExecuteSql.class), ExecuteSqlType.COMPARISON); + + if (CollectionUtils.isNotEmpty(comparisonExecuteSqlList)) { + DqRuleExecuteSql comparisonSql = comparisonExecuteSqlList.get(0); + inputParameterValueResult.put(COMPARISON_TABLE,comparisonSql.getTableAlias()); + + checkAndReplace(comparisonExecuteSqlList,inputParameterValueResult.get(SRC_FILTER),AND_SRC_FILTER); + checkAndReplace(comparisonExecuteSqlList,inputParameterValueResult.get(SRC_FILTER),WHERE_SRC_FILTER); + checkAndReplace(comparisonExecuteSqlList,inputParameterValueResult.get(TARGET_FILTER),AND_TARGET_FILTER); + checkAndReplace(comparisonExecuteSqlList,inputParameterValueResult.get(TARGET_FILTER),WHERE_TARGET_FILTER); + + for (DqRuleExecuteSql executeSqlDefinition:comparisonExecuteSqlList) { + index = setTransformerConfig( + index, + inputParameterValueResult, + transformerConfigList, + executeSqlDefinition); + } + } + + return getWriterConfigList( + ParameterUtils.convertParameterPlaceholders(writerSql,inputParameterValueResult), + dataQualityTaskExecutionContext + ); + } + + public static List getAllWriterConfigList ( + Map inputParameterValue, + DataQualityTaskExecutionContext context, + int index, + List transformerConfigList, + String writerSql, + String statisticsValueWriterSql) { + + List writerConfigList = RuleParserUtils.getWriterConfigList( + index, + inputParameterValue, + transformerConfigList, + context, + writerSql); + + writerConfigList.addAll( + RuleParserUtils.getStatisticsValueConfigWriterList( + statisticsValueWriterSql, + inputParameterValue, + context)); + + BaseConfig errorOutputWriter = RuleParserUtils.getErrorOutputWriter(inputParameterValue,context); + if (errorOutputWriter != null) { + writerConfigList.add(errorOutputWriter); + } + + return writerConfigList; + } + + public static List getExecuteSqlListByType ( + List allExecuteSqlList, ExecuteSqlType executeSqlType) { + if (CollectionUtils.isEmpty(allExecuteSqlList)) { + return allExecuteSqlList; + } + + return allExecuteSqlList + .stream() + .filter(x -> ExecuteSqlType.of(x.getType()) == executeSqlType) + .collect(Collectors.toList()); + } + + private static void checkAndReplace(List list, String checkValue, String replaceSrc) { + if (StringUtils.isEmpty(checkValue) && CollectionUtils.isNotEmpty(list)) { + for (DqRuleExecuteSql executeSqlDefinition:list) { + String sql = executeSqlDefinition.getSql(); + sql = sql.replace(replaceSrc,""); + executeSqlDefinition.setSql(sql); + } + } + } + + public static List getMappingColumnList(String mappingColumns) { + ArrayNode mappingColumnList = JSONUtils.parseArray(mappingColumns); + List list = new ArrayList<>(); + mappingColumnList.forEach(item -> { + MappingColumn column = new MappingColumn( + String.valueOf(item.get(SRC_FIELD)).replace("\"",""), + String.valueOf(item.get(OPERATOR)).replace("\""," "), + String.valueOf(item.get(TARGET_FIELD)).replace("\"","")); + list.add(column); + }); + + return list; + } + + public static List getSrcColumnList(List mappingColumns) { + List list = new ArrayList<>(); + mappingColumns.forEach(item -> + list.add(item.getSrcField()) + ); + + return list; + } + + public static List getTargetColumnList(List mappingColumns) { + List list = new ArrayList<>(); + mappingColumns.forEach(item -> + list.add(item.getTargetField()) + ); + + return list; + } + + public static EnvConfig getEnvConfig() { + EnvConfig envConfig = new EnvConfig(); + envConfig.setType(BATCH); + return envConfig; + } + + public static BaseConfig getErrorOutputWriter(Map inputParameterValueResult, + DataQualityTaskExecutionContext dataQualityTaskExecutionContext) { + List dqRuleExecuteSqlList = JSONUtils.toList(dataQualityTaskExecutionContext.getExecuteSqlList() + ,DqRuleExecuteSql.class); + + DqRuleExecuteSql errorOutputSql = null; + if (CollectionUtils.isEmpty(dqRuleExecuteSqlList)) { + return null; + } + + for (DqRuleExecuteSql executeSql : dqRuleExecuteSqlList) { + if (executeSql.isErrorOutputSql()) { + errorOutputSql = executeSql; + break; + } + } + + BaseConfig baseConfig = null; + if (StringUtils.isNotEmpty(inputParameterValueResult.get(ERROR_OUTPUT_PATH)) + && errorOutputSql != null) { + baseConfig = new BaseConfig(); + Map config = new HashMap<>(); + config.put(PATH,inputParameterValueResult.get(ERROR_OUTPUT_PATH)); + config.put(INPUT_TABLE,errorOutputSql.getTableAlias()); + baseConfig.setConfig(config); + baseConfig.setType(HDFS_FILE); + } + + return baseConfig; + } + + /** + * the unique code use to get the same type and condition task statistics value + * @param inputParameterValue + * @return + */ + public static String generateUniqueCode(Map inputParameterValue) { + + if (MapUtils.isEmpty(inputParameterValue)) { + return "-1"; + } + + Map newInputParameterValue = new HashMap<>(inputParameterValue); + + newInputParameterValue.remove(RULE_TYPE); + newInputParameterValue.remove(RULE_NAME); + newInputParameterValue.remove(CREATE_TIME); + newInputParameterValue.remove(UPDATE_TIME); + newInputParameterValue.remove(PROCESS_DEFINITION_ID); + newInputParameterValue.remove(PROCESS_INSTANCE_ID); + newInputParameterValue.remove(TASK_INSTANCE_ID); + newInputParameterValue.remove(CHECK_TYPE); + newInputParameterValue.remove(OPERATOR); + newInputParameterValue.remove(THRESHOLD); + newInputParameterValue.remove(FAILURE_STRATEGY); + newInputParameterValue.remove(DATA_TIME); + newInputParameterValue.remove(ERROR_OUTPUT_PATH); + newInputParameterValue.remove(COMPARISON_TYPE); + newInputParameterValue.remove(COMPARISON_NAME); + newInputParameterValue.remove(COMPARISON_TABLE); + newInputParameterValue.remove(PARAMETER_CURRENT_DATE); + newInputParameterValue.remove(PARAMETER_BUSINESS_DATE); + newInputParameterValue.remove(PARAMETER_DATETIME); + + StringBuilder sb = new StringBuilder(); + for (String value : newInputParameterValue.values()) { + sb.append(value); + } + + return Md5Utils.getMd5(sb.toString(),true); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/ProgramType.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/ProgramType.java new file mode 100644 index 0000000000..4a502b2a15 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/ProgramType.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.utils.spark; + +/** + * support program types + */ +public enum ProgramType { + + /** + * 0 JAVA,1 SCALA,2 PYTHON + */ + JAVA, + SCALA, + PYTHON +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkArgsUtils.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkArgsUtils.java new file mode 100644 index 0000000000..5459618113 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkArgsUtils.java @@ -0,0 +1,129 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.utils.spark; + +import org.apache.dolphinscheduler.plugin.task.util.ArgsUtils; +import org.apache.dolphinscheduler.spi.task.ResourceInfo; +import org.apache.dolphinscheduler.spi.utils.StringUtils; + +import java.util.ArrayList; +import java.util.List; + +/** + * spark args utils + */ +public class SparkArgsUtils { + + private static final String SPARK_CLUSTER = "cluster"; + + private static final String SPARK_LOCAL = "local"; + + private static final String SPARK_ON_YARN = "yarn"; + + private SparkArgsUtils() { + throw new IllegalStateException("Utility class"); + } + + /** + * build args + * + * @param param param + * @return argument list + */ + public static List buildArgs(SparkParameters param) { + List args = new ArrayList<>(); + args.add(SparkConstants.MASTER); + + String deployMode = StringUtils.isNotEmpty(param.getDeployMode()) ? param.getDeployMode() : SPARK_CLUSTER; + if (!SPARK_LOCAL.equals(deployMode)) { + args.add(SPARK_ON_YARN); + args.add(SparkConstants.DEPLOY_MODE); + } + args.add(deployMode); + + ProgramType programType = param.getProgramType(); + String mainClass = param.getMainClass(); + if (programType != null && programType != ProgramType.PYTHON && StringUtils.isNotEmpty(mainClass)) { + args.add(SparkConstants.MAIN_CLASS); + args.add(mainClass); + } + + int driverCores = param.getDriverCores(); + if (driverCores > 0) { + args.add(SparkConstants.DRIVER_CORES); + args.add(String.format("%d", driverCores)); + } + + String driverMemory = param.getDriverMemory(); + if (StringUtils.isNotEmpty(driverMemory)) { + args.add(SparkConstants.DRIVER_MEMORY); + args.add(driverMemory); + } + + int numExecutors = param.getNumExecutors(); + if (numExecutors > 0) { + args.add(SparkConstants.NUM_EXECUTORS); + args.add(String.format("%d", numExecutors)); + } + + int executorCores = param.getExecutorCores(); + if (executorCores > 0) { + args.add(SparkConstants.EXECUTOR_CORES); + args.add(String.format("%d", executorCores)); + } + + String executorMemory = param.getExecutorMemory(); + if (StringUtils.isNotEmpty(executorMemory)) { + args.add(SparkConstants.EXECUTOR_MEMORY); + args.add(executorMemory); + } + + String appName = param.getAppName(); + if (StringUtils.isNotEmpty(appName)) { + args.add(SparkConstants.SPARK_NAME); + args.add(ArgsUtils.escape(appName)); + } + + String others = param.getOthers(); + if (!SPARK_LOCAL.equals(deployMode) && (StringUtils.isEmpty(others) || !others.contains(SparkConstants.SPARK_QUEUE))) { + String queue = param.getQueue(); + if (StringUtils.isNotEmpty(queue)) { + args.add(SparkConstants.SPARK_QUEUE); + args.add(queue); + } + } + + // --conf --files --jars --packages + if (StringUtils.isNotEmpty(others)) { + args.add(others); + } + + ResourceInfo mainJar = param.getMainJar(); + if (mainJar != null) { + args.add(mainJar.getRes()); + } + + String mainArgs = param.getMainArgs(); + if (StringUtils.isNotEmpty(mainArgs)) { + args.add(mainArgs); + } + + return args; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkConstants.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkConstants.java new file mode 100644 index 0000000000..0834474b96 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkConstants.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.utils.spark; + +public class SparkConstants { + + private SparkConstants() { + throw new IllegalStateException("Utility class"); + } + + /** + * --class CLASS_NAME + */ + public static final String MAIN_CLASS = "--class"; + + /** + * --name NAME + */ + public static final String SPARK_NAME = "--name"; + + /** + * --queue QUEUE + */ + public static final String SPARK_QUEUE = "--queue"; + + public static final String DEPLOY_MODE = "--deploy-mode"; + + /** + * --driver-cores NUM + */ + public static final String DRIVER_CORES = "--driver-cores"; + + /** + * --driver-memory MEM + */ + public static final String DRIVER_MEMORY = "--driver-memory"; + + /** + * master + */ + public static final String MASTER = "--master"; + + /** + * --num-executors NUM + */ + public static final String NUM_EXECUTORS = "--num-executors"; + + /** + * --executor-cores NUM + */ + public static final String EXECUTOR_CORES = "--executor-cores"; + + /** + * --executor-memory MEM + */ + public static final String EXECUTOR_MEMORY = "--executor-memory"; + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkParameters.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkParameters.java new file mode 100644 index 0000000000..8de367ab32 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/main/java/org/apache/dolphinscheduler/plugin/task/dq/utils/spark/SparkParameters.java @@ -0,0 +1,241 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq.utils.spark; + +import org.apache.dolphinscheduler.spi.task.AbstractParameters; +import org.apache.dolphinscheduler.spi.task.ResourceInfo; + +import java.util.ArrayList; +import java.util.List; + +/** + * spark parameters + */ +public class SparkParameters extends AbstractParameters { + + /** + * main jar + */ + private ResourceInfo mainJar; + + /** + * main class + */ + private String mainClass; + + /** + * deploy mode + */ + private String deployMode; + + /** + * arguments + */ + private String mainArgs; + + /** + * driver-cores Number of cores used by the driver, only in cluster mode + */ + private int driverCores; + + /** + * driver-memory Memory for driver + */ + + private String driverMemory; + + /** + * num-executors Number of executors to launch + */ + private int numExecutors; + + /** + * executor-cores Number of cores per executor + */ + private int executorCores; + + /** + * Memory per executor + */ + private String executorMemory; + + /** + * app name + */ + private String appName; + + /** + * The YARN queue to submit to + */ + private String queue; + + /** + * other arguments + */ + private String others; + + /** + * program type + * 0 JAVA,1 SCALA,2 PYTHON + */ + private ProgramType programType; + + /** + * spark version + */ + private String sparkVersion; + + /** + * resource list + */ + private List resourceList = new ArrayList<>(); + + public ResourceInfo getMainJar() { + return mainJar; + } + + public void setMainJar(ResourceInfo mainJar) { + this.mainJar = mainJar; + } + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public String getDeployMode() { + return deployMode; + } + + public void setDeployMode(String deployMode) { + this.deployMode = deployMode; + } + + public String getMainArgs() { + return mainArgs; + } + + public void setMainArgs(String mainArgs) { + this.mainArgs = mainArgs; + } + + public int getDriverCores() { + return driverCores; + } + + public void setDriverCores(int driverCores) { + this.driverCores = driverCores; + } + + public String getDriverMemory() { + return driverMemory; + } + + public void setDriverMemory(String driverMemory) { + this.driverMemory = driverMemory; + } + + public int getNumExecutors() { + return numExecutors; + } + + public void setNumExecutors(int numExecutors) { + this.numExecutors = numExecutors; + } + + public int getExecutorCores() { + return executorCores; + } + + public void setExecutorCores(int executorCores) { + this.executorCores = executorCores; + } + + public String getExecutorMemory() { + return executorMemory; + } + + public void setExecutorMemory(String executorMemory) { + this.executorMemory = executorMemory; + } + + public String getAppName() { + return appName; + } + + public void setAppName(String appName) { + this.appName = appName; + } + + public String getQueue() { + return queue; + } + + public void setQueue(String queue) { + this.queue = queue; + } + + public String getOthers() { + return others; + } + + public void setOthers(String others) { + this.others = others; + } + + public List getResourceList() { + return resourceList; + } + + public void setResourceList(List resourceList) { + this.resourceList = resourceList; + } + + public ProgramType getProgramType() { + return programType; + } + + public void setProgramType(ProgramType programType) { + this.programType = programType; + } + + public String getSparkVersion() { + return sparkVersion; + } + + public void setSparkVersion(String sparkVersion) { + this.sparkVersion = sparkVersion; + } + + @Override + public boolean checkParameters() { + return mainJar != null && programType != null; + } + + @Override + public List getResourceFilesList() { + if (mainJar != null && !resourceList.contains(mainJar)) { + resourceList.add(mainJar); + } + return resourceList; + } + +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java new file mode 100644 index 0000000000..b23b45ae65 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityParameterTest.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq; + +import org.apache.dolphinscheduler.plugin.task.dq.utils.spark.SparkParameters; +import org.apache.dolphinscheduler.spi.params.base.ParamsOptions; +import org.apache.dolphinscheduler.spi.params.base.PluginParams; +import org.apache.dolphinscheduler.spi.params.base.TriggerType; +import org.apache.dolphinscheduler.spi.params.base.Validate; +import org.apache.dolphinscheduler.spi.params.input.InputParam; +import org.apache.dolphinscheduler.spi.params.input.InputParamProps; +import org.apache.dolphinscheduler.spi.params.select.SelectParam; +import org.apache.dolphinscheduler.spi.params.select.SelectParamProps; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * DataQualityParameterTest + */ +public class DataQualityParameterTest { + + private DataQualityParameters dataQualityParameters = null; + + @Before + public void before() { + dataQualityParameters = new DataQualityParameters(); + dataQualityParameters.setRuleId(1); + dataQualityParameters.setSparkParameters(new SparkParameters()); + } + + @Test + public void testCheckParameterNormal() { + + Map inputParameterValue = new HashMap<>(); + inputParameterValue.put("src_connector_type","JDBC"); + inputParameterValue.put("src_datasource_id","1"); + inputParameterValue.put("src_table","test1"); + inputParameterValue.put("src_filter","date=2012-10-05"); + inputParameterValue.put("src_field","id"); + + inputParameterValue.put("rule_type","1"); + inputParameterValue.put("process_definition_id","1"); + inputParameterValue.put("task_instance_id","1"); + inputParameterValue.put("check_type","1"); + inputParameterValue.put("threshold","1000"); + inputParameterValue.put("create_time","2012-10-05"); + inputParameterValue.put("update_time","2012-10-05"); + + dataQualityParameters.setRuleInputParameter(inputParameterValue); + + Assert.assertTrue(dataQualityParameters.checkParameters()); + } + + @Test + public void testRuleInputParameter() { + String formCreateJson = "[{\"field\":\"src_connector_type\",\"name\":\"源数据类型\"," + + "\"props\":{\"disabled\":false,\"multiple\":false,\"size\":\"small\"}," + + "\"type\":\"select\",\"title\":\"源数据类型\",\"value\":\"JDBC\"," + + "\"options\":[{\"label\":\"HIVE\",\"value\":\"HIVE\",\"disabled\":false}," + + "{\"label\":\"JDBC\",\"value\":\"JDBC\",\"disabled\":false}]}," + + "{\"props\":{\"disabled\":false,\"rows\":0,\"placeholder\":\"Please enter source table name\"," + + "\"size\":\"small\"},\"field\":\"src_table\",\"name\":\"源数据表\"," + + "\"type\":\"input\",\"title\":\"源数据表\",\"validate\":[{\"required\":true,\"type\":\"string\"," + + "\"trigger\":\"blur\"}]}]"; + + List pluginParamsList = new ArrayList<>(); + SelectParamProps selectParamProps = new SelectParamProps(); + selectParamProps.setMultiple(false); + selectParamProps.setDisabled(false); + selectParamProps.setSize("small"); + + SelectParam srcConnectorType = SelectParam.newBuilder("src_connector_type","源数据类型") + .setProps(selectParamProps) + .addOptions(new ParamsOptions("HIVE","HIVE",false)) + .addOptions(new ParamsOptions("JDBC","JDBC",false)) + .setValue("JDBC") + .build(); + + InputParamProps inputParamProps = new InputParamProps(); + inputParamProps.setPlaceholder("Please enter source table name"); + inputParamProps.setDisabled(false); + inputParamProps.setSize("small"); + inputParamProps.setRows(0); + + InputParam srcTable = InputParam.newBuilder("src_table","源数据表") + .setProps(inputParamProps) + .addValidate(Validate.newBuilder().setType("string").setRequired(true).setTrigger(TriggerType.BLUR.getTriggerType()).build()) + .build(); + + pluginParamsList.add(srcConnectorType); + pluginParamsList.add(srcTable); + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + String result = null; + + try { + result = mapper.writeValueAsString(pluginParamsList); + } catch (JsonProcessingException e) { + Assert.fail(); + } + + Assert.assertEquals(formCreateJson,result); + } +} diff --git a/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java new file mode 100644 index 0000000000..c8c5cf2514 --- /dev/null +++ b/dolphinscheduler-task-plugin/dolphinscheduler-task-dataquality/src/test/java/org/apache/dolphinscheduler/plugin/task/dq/DataQualityTaskTest.java @@ -0,0 +1,1169 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.dolphinscheduler.plugin.task.dq; + +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.COMPARISON_TABLE; +import static org.apache.dolphinscheduler.spi.task.dq.utils.DataQualityConstants.SRC_FIELD; + +import org.apache.dolphinscheduler.plugin.task.dq.rule.RuleManager; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleExecuteSql; +import org.apache.dolphinscheduler.plugin.task.dq.rule.entity.DqRuleInputEntry; +import org.apache.dolphinscheduler.spi.params.base.FormType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ExecuteSqlType; +import org.apache.dolphinscheduler.spi.task.dq.enums.InputType; +import org.apache.dolphinscheduler.spi.task.dq.enums.OptionSourceType; +import org.apache.dolphinscheduler.spi.task.dq.enums.RuleType; +import org.apache.dolphinscheduler.spi.task.dq.enums.ValueType; +import org.apache.dolphinscheduler.spi.task.request.DataQualityTaskExecutionContext; +import org.apache.dolphinscheduler.spi.utils.JSONUtils; + +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Assert; +import org.junit.Test; + +/** + * DataQualityTaskTest + */ + +public class DataQualityTaskTest { + + @Test + public void testSingleTable() throws Exception { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext = getSingleTableContext(); + + Map inputParameterValue = new HashMap<>(); + inputParameterValue.put("src_connector_type","0"); + inputParameterValue.put("src_datasource_id","2"); + inputParameterValue.put("src_table","src_result"); + inputParameterValue.put("check_type","0"); + inputParameterValue.put("operator","3"); + inputParameterValue.put("threshold","1"); + inputParameterValue.put("failure_strategy","0"); + inputParameterValue.put("comparison_type","1"); + inputParameterValue.put("comparison_name","10"); + inputParameterValue.put("rule_id","10"); + inputParameterValue.put("rule_type","0"); + inputParameterValue.put("rule_name","'表行数校验'"); + inputParameterValue.put("create_time","'2021-08-12 10:15:48'"); + inputParameterValue.put("update_time","'2021-08-12 10:15:48'"); + inputParameterValue.put("process_definition_id","21"); + inputParameterValue.put("process_instance_id","284"); + inputParameterValue.put("task_instance_id","287"); + inputParameterValue.put("data_time","'2021-08-12 10:15:48'"); + inputParameterValue.put("error_output_path","hdfs://192.168.0.1:8022/user/ods/data_quality_error_data/21_284_287"); + + RuleManager ruleManager = new RuleManager(inputParameterValue,dataQualityTaskExecutionContext); + String expect = "{\"name\":\"表行数校验\",\"env\":{\"type\":\"batch\",\"config\":null}," + + "\"readers\":[{\"type\":\"JDBC\",\"config\":" + + "{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\"," + + "\"user\":\"test\",\"output_table\":\"test_src_result\",\"table\":\"src_result\"," + + "\"url\":\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\"}}]," + + "\"transformers\":[{\"type\":\"sql\",\"config\":{\"index\":1," + + "\"output_table\":\"table_count\",\"sql\":\"SELECT COUNT(*) AS total FROM test_src_result \"}}]," + + "\"writers\":[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\"," + + "\"driver\":\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"table\":\"dqc_result\"," + + "\"url\":\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\"," + + "\"sql\":\"select 0 as rule_type,'表行数校验' as rule_name,21 as process_definition_id,284 as process_instance_id," + + "287 as task_instance_id,table_count.total AS statistics_value,10 AS comparison_value,1 AS comparison_type," + + "0 as check_type,1 as threshold,3 as operator,0 as failure_strategy," + + "'hdfs://192.168.0.1:8022/user/ods/data_quality_error_data/21_284_287' as error_output_path," + + "'2021-08-12 10:15:48' as create_time,'2021-08-12 10:15:48' as update_time from table_count \"}}," + + "{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\"," + + "\"user\":\"test\",\"table\":\"dqc_statistics_value\",\"url\":" + + "\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\"," + + "\"sql\":\"select 21 as process_definition_id,287 as task_instance_id,10 as rule_id,'DN/MS5NLTSLVZ/++KEJ9BHPQSEN6/UY/EV5TWI1IRRY=' " + + "as unique_code,'table_count.total'AS statistics_name," + + "table_count.total AS statistics_value,'2021-08-12 10:15:48' as data_time,'2021-08-12 10:15:48' as create_time," + + "'2021-08-12 10:15:48' as update_time from table_count\"}}]}"; + Assert.assertEquals(expect, JSONUtils.toJsonString(ruleManager.generateDataQualityParameter())); + } + + private DataQualityTaskExecutionContext getSingleTableContext() { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext(); + + dataQualityTaskExecutionContext.setRuleName("表行数校验"); + dataQualityTaskExecutionContext.setRuleType(RuleType.SINGLE_TABLE.getCode()); + + List defaultInputEntryList = new ArrayList<>(); + + DqRuleInputEntry srcConnectorType = new DqRuleInputEntry(); + srcConnectorType.setTitle("源数据类型"); + srcConnectorType.setField("src_connector_type"); + srcConnectorType.setType(FormType.SELECT.getFormType()); + srcConnectorType.setCanEdit(true); + srcConnectorType.setShow(true); + srcConnectorType.setValue(null); + srcConnectorType.setPlaceholder("${src_connector_type}"); + srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode()); + srcConnectorType.setOptions(null); + srcConnectorType.setInputType(InputType.DEFAULT.getCode()); + srcConnectorType.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry(); + srcDatasourceId.setTitle("源数据源"); + srcDatasourceId.setField("src_datasource_id"); + srcDatasourceId.setType(FormType.CASCADER.getFormType()); + srcDatasourceId.setCanEdit(true); + srcDatasourceId.setShow(true); + srcDatasourceId.setValue(null); + srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode()); + srcDatasourceId.setInputType(InputType.DEFAULT.getCode()); + srcDatasourceId.setValueType(ValueType.NUMBER.getCode()); + srcDatasourceId.setCreateTime(new Date()); + srcDatasourceId.setUpdateTime(new Date()); + + DqRuleInputEntry srcTable = new DqRuleInputEntry(); + srcTable.setTitle("源数据表"); + srcTable.setField("src_table"); + srcTable.setType(FormType.INPUT.getFormType()); + srcTable.setCanEdit(true); + srcTable.setShow(true); + srcTable.setPlaceholder("Please enter source table name"); + srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcTable.setInputType(InputType.DEFAULT.getCode()); + srcTable.setValueType(ValueType.STRING.getCode()); + srcTable.setCreateTime(new Date()); + srcTable.setUpdateTime(new Date()); + + DqRuleInputEntry srcFilter = new DqRuleInputEntry(); + srcFilter.setTitle("源表过滤条件"); + srcFilter.setField("src_filter"); + srcFilter.setType(FormType.INPUT.getFormType()); + srcFilter.setCanEdit(true); + srcFilter.setShow(true); + srcFilter.setPlaceholder("Please enter filter expression"); + srcFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcFilter.setInputType(InputType.DEFAULT.getCode()); + srcFilter.setValueType(ValueType.LIKE_SQL.getCode()); + srcFilter.setCreateTime(new Date()); + srcFilter.setUpdateTime(new Date()); + + DqRuleInputEntry srcField = new DqRuleInputEntry(); + srcField.setTitle("检测列"); + srcField.setField(SRC_FIELD); + srcField.setType(FormType.INPUT.getFormType()); + srcField.setCanEdit(true); + srcField.setShow(true); + srcField.setValue(""); + srcField.setPlaceholder("Please enter column, only single column is supported"); + srcField.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcField.setInputType(InputType.DEFAULT.getCode()); + srcField.setValueType(ValueType.STRING.getCode()); + srcField.setCreateTime(new Date()); + srcField.setUpdateTime(new Date()); + + DqRuleInputEntry statisticsName = new DqRuleInputEntry(); + statisticsName.setTitle("统计值"); + statisticsName.setField("statistics_name"); + statisticsName.setType(FormType.INPUT.getFormType()); + statisticsName.setCanEdit(false); + statisticsName.setShow(false); + statisticsName.setValue("table_count.total"); + statisticsName.setPlaceholder("${statistics_name}"); + statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsName.setInputType(InputType.STATISTICS.getCode()); + statisticsName.setValueType(ValueType.STRING.getCode()); + statisticsName.setCreateTime(new Date()); + statisticsName.setUpdateTime(new Date()); + + DqRuleInputEntry checkType = new DqRuleInputEntry(); + checkType.setTitle("检测方式"); + checkType.setField("check_type"); + checkType.setType(FormType.SELECT.getFormType()); + checkType.setCanEdit(true); + checkType.setShow(true); + checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + checkType.setOptions("[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"},{\"label\":\"统计值 / 比对值\"," + + "\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]"); + checkType.setValue("0"); + checkType.setInputType(InputType.CHECK.getCode()); + checkType.setValueType(ValueType.STRING.getCode()); + checkType.setPlaceholder("检测类型"); + checkType.setCreateTime(new Date()); + checkType.setUpdateTime(new Date()); + + DqRuleInputEntry operator = new DqRuleInputEntry(); + operator.setTitle("操作符"); + operator.setField("operator"); + operator.setType(FormType.SELECT.getFormType()); + operator.setCanEdit(true); + operator.setShow(true); + operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"}," + + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"}," + + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"}," + + "{\"label\":\"!=\",\"value\":\"5\"}]"); + operator.setValue("0"); + operator.setInputType(InputType.CHECK.getCode()); + operator.setValueType(ValueType.STRING.getCode()); + operator.setPlaceholder("操作符"); + operator.setCreateTime(new Date()); + operator.setUpdateTime(new Date()); + + DqRuleInputEntry threshold = new DqRuleInputEntry(); + threshold.setTitle("阈值"); + threshold.setField("threshold"); + threshold.setType(FormType.INPUT.getFormType()); + threshold.setCanEdit(true); + threshold.setShow(true); + threshold.setPlaceholder("Please enter threshold, number is needed"); + threshold.setInputType(InputType.CHECK.getCode()); + threshold.setValueType(ValueType.NUMBER.getCode()); + threshold.setCreateTime(new Date()); + threshold.setUpdateTime(new Date()); + + DqRuleInputEntry afterFailure = new DqRuleInputEntry(); + afterFailure.setTitle("失败策略"); + afterFailure.setField("failure_strategy"); + afterFailure.setType(FormType.SELECT.getFormType()); + afterFailure.setCanEdit(true); + afterFailure.setShow(true); + afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]"); + afterFailure.setValue("0"); + afterFailure.setInputType(InputType.CHECK.getCode()); + afterFailure.setValueType(ValueType.STRING.getCode()); + afterFailure.setPlaceholder("失败策略"); + afterFailure.setCreateTime(new Date()); + afterFailure.setUpdateTime(new Date()); + + defaultInputEntryList.add(checkType); + defaultInputEntryList.add(operator); + defaultInputEntryList.add(threshold); + defaultInputEntryList.add(afterFailure); + + defaultInputEntryList.add(srcConnectorType); + defaultInputEntryList.add(srcDatasourceId); + defaultInputEntryList.add(srcTable); + defaultInputEntryList.add(srcFilter); + defaultInputEntryList.add(srcField); + defaultInputEntryList.add(statisticsName); + + DqRuleExecuteSql executeSqlDefinition3 = new DqRuleExecuteSql(); + executeSqlDefinition3.setIndex(0); + executeSqlDefinition3.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})"); + executeSqlDefinition3.setTableAlias("table_count"); + executeSqlDefinition3.setType(ExecuteSqlType.STATISTICS.getCode()); + + List executeSqlList = new ArrayList<>(); + executeSqlList.add(executeSqlDefinition3); + dataQualityTaskExecutionContext.setExecuteSqlList(JSONUtils.toJsonString(executeSqlList)); + dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList)); + dataQualityTaskExecutionContext.setSourceConnectorType("JDBC"); + dataQualityTaskExecutionContext.setSourceType(0); + dataQualityTaskExecutionContext.setSourceConnectionParams( + "{\"address\":\"jdbc:mysql://localhost:3306\"," + + "\"database\":\"test\"," + + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setWriterType(0); + dataQualityTaskExecutionContext.setWriterConnectorType("JDBC"); + dataQualityTaskExecutionContext.setWriterTable("dqc_result"); + dataQualityTaskExecutionContext.setWriterConnectionParams( + "{\"address\":\"jdbc:mysql://localhost:3306\"," + + "\"database\":\"test\"," + + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setStatisticsValueConnectorType("JDBC"); + dataQualityTaskExecutionContext.setStatisticsValueType(0); + dataQualityTaskExecutionContext.setStatisticsValueTable("dqc_statistics_value"); + dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams( + "{\"address\":\"jdbc:mysql://localhost:3306\"," + + "\"database\":\"test\"," + + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setCompareWithFixedValue(true); + return dataQualityTaskExecutionContext; + } + + @Test + public void testSingleTableCustomSql() throws Exception { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext(); + + dataQualityTaskExecutionContext.setRuleName("自定义SQL"); + dataQualityTaskExecutionContext.setRuleType(RuleType.SINGLE_TABLE_CUSTOM_SQL.getCode()); + + List defaultInputEntryList = new ArrayList<>(); + + DqRuleInputEntry srcConnectorType = new DqRuleInputEntry(); + srcConnectorType.setTitle("源数据类型"); + srcConnectorType.setField("src_connector_type"); + srcConnectorType.setType(FormType.SELECT.getFormType()); + srcConnectorType.setCanEdit(true); + srcConnectorType.setShow(true); + srcConnectorType.setValue(null); + srcConnectorType.setPlaceholder("${src_connector_type}"); + srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode()); + srcConnectorType.setOptions(null); + srcConnectorType.setInputType(InputType.DEFAULT.getCode()); + srcConnectorType.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry(); + srcDatasourceId.setTitle("源数据源"); + srcDatasourceId.setField("src_datasource_id"); + srcDatasourceId.setType(FormType.CASCADER.getFormType()); + srcDatasourceId.setCanEdit(true); + srcDatasourceId.setShow(true); + srcDatasourceId.setValue(null); + srcDatasourceId.setPlaceholder("${comparison_value}"); + srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode()); + srcDatasourceId.setInputType(InputType.DEFAULT.getCode()); + srcDatasourceId.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry srcTable = new DqRuleInputEntry(); + srcTable.setTitle("源数据表"); + srcTable.setField("src_table"); + srcTable.setType(FormType.INPUT.getFormType()); + srcTable.setCanEdit(true); + srcTable.setShow(true); + srcTable.setPlaceholder("Please enter source table name"); + srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcTable.setInputType(InputType.DEFAULT.getCode()); + srcTable.setValueType(ValueType.STRING.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry srcFilter = new DqRuleInputEntry(); + srcFilter.setTitle("源表过滤条件"); + srcFilter.setField("src_filter"); + srcFilter.setType(FormType.INPUT.getFormType()); + srcFilter.setCanEdit(true); + srcFilter.setShow(true); + srcFilter.setPlaceholder("Please enter source filter expression"); + srcFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcFilter.setInputType(InputType.DEFAULT.getCode()); + srcFilter.setValueType(ValueType.LIKE_SQL.getCode()); + + DqRuleInputEntry statisticsName = new DqRuleInputEntry(); + statisticsName.setTitle("统计值名"); + statisticsName.setField("statistics_name"); + statisticsName.setType(FormType.INPUT.getFormType()); + statisticsName.setCanEdit(true); + statisticsName.setShow(true); + statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql"); + statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsName.setInputType(InputType.DEFAULT.getCode()); + statisticsName.setValueType(ValueType.STRING.getCode()); + + DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry(); + statisticsExecuteSql.setTitle("统计值计算SQL"); + statisticsExecuteSql.setField("statistics_execute_sql"); + statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType()); + statisticsExecuteSql.setCanEdit(true); + statisticsExecuteSql.setShow(true); + statisticsExecuteSql.setPlaceholder("Please enter the statistics execute sql"); + statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsExecuteSql.setValueType(ValueType.LIKE_SQL.getCode()); + + DqRuleInputEntry checkType = new DqRuleInputEntry(); + checkType.setTitle("检测方式"); + checkType.setField("check_type"); + checkType.setType(FormType.SELECT.getFormType()); + checkType.setCanEdit(true); + checkType.setShow(true); + checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + checkType.setOptions("[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"}," + + "{\"label\":\"统计值 / 比对值\",\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]"); + checkType.setValue("0"); + checkType.setInputType(InputType.CHECK.getCode()); + checkType.setValueType(ValueType.STRING.getCode()); + checkType.setPlaceholder("检测类型"); + + DqRuleInputEntry operator = new DqRuleInputEntry(); + operator.setTitle("操作符"); + operator.setField("operator"); + operator.setType(FormType.SELECT.getFormType()); + operator.setCanEdit(true); + operator.setShow(true); + operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"}," + + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"}," + + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"}," + + "{\"label\":\"!=\",\"value\":\"5\"}]"); + operator.setValue("0"); + operator.setInputType(InputType.CHECK.getCode()); + operator.setValueType(ValueType.STRING.getCode()); + operator.setPlaceholder("操作符"); + + DqRuleInputEntry threshold = new DqRuleInputEntry(); + threshold.setTitle("阈值"); + threshold.setField("threshold"); + threshold.setType(FormType.INPUT.getFormType()); + threshold.setCanEdit(true); + threshold.setShow(true); + threshold.setPlaceholder("Please enter threshold value, number is needed"); + threshold.setInputType(InputType.CHECK.getCode()); + threshold.setValueType(ValueType.NUMBER.getCode()); + + DqRuleInputEntry afterFailure = new DqRuleInputEntry(); + afterFailure.setTitle("失败策略"); + afterFailure.setField("failure_strategy"); + afterFailure.setType(FormType.SELECT.getFormType()); + afterFailure.setCanEdit(true); + afterFailure.setShow(true); + afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]"); + afterFailure.setValue("0"); + afterFailure.setInputType(InputType.CHECK.getCode()); + afterFailure.setValueType(ValueType.STRING.getCode()); + afterFailure.setPlaceholder("失败策略"); + + defaultInputEntryList.add(checkType); + defaultInputEntryList.add(operator); + defaultInputEntryList.add(threshold); + defaultInputEntryList.add(afterFailure); + defaultInputEntryList.add(srcConnectorType); + defaultInputEntryList.add(srcDatasourceId); + defaultInputEntryList.add(srcTable); + defaultInputEntryList.add(statisticsName); + defaultInputEntryList.add(statisticsExecuteSql); + defaultInputEntryList.add(srcFilter); + + Map inputParameterValue = new HashMap<>(); + inputParameterValue.put("src_connector_type","0"); + inputParameterValue.put("src_datasource_id","2"); + inputParameterValue.put("src_table","person"); + inputParameterValue.put("statistics_name","miss"); + inputParameterValue.put("statistics_execute_sql","select count(*) as miss from ${src_table} where (sex = null or sex='') and age=1"); + inputParameterValue.put("src_filter","age=1"); + inputParameterValue.put("check_type","2"); + inputParameterValue.put("operator","3"); + inputParameterValue.put("threshold","50"); + inputParameterValue.put("failure_strategy","1"); + inputParameterValue.put("comparison_type","1"); + inputParameterValue.put("comparison_name","3"); + inputParameterValue.put("rule_id","1"); + inputParameterValue.put("rule_type","1"); + inputParameterValue.put("rule_name","'自定义SQL'"); + inputParameterValue.put("create_time","'2021-08-30 00:00:00'"); + inputParameterValue.put("update_time","'2021-08-30 00:00:00'"); + inputParameterValue.put("process_definition_id","1"); + inputParameterValue.put("process_instance_id","1"); + inputParameterValue.put("task_instance_id","1"); + inputParameterValue.put("data_time","'2021-08-30 00:00:00'"); + inputParameterValue.put("error_output_path","hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test2"); + + dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList)); + dataQualityTaskExecutionContext.setSourceConnectorType("JDBC"); + dataQualityTaskExecutionContext.setSourceType(0); + dataQualityTaskExecutionContext.setSourceConnectionParams( + "{\"address\":\"jdbc:mysql://localhost:3306\"," + + "\"database\":\"test\"," + + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setWriterType(1); + dataQualityTaskExecutionContext.setWriterConnectorType("JDBC"); + dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result"); + dataQualityTaskExecutionContext.setWriterConnectionParams( + "{\"address\":\"jdbc:postgresql://localhost:5432\"," + + "\"database\":\"dolphinscheduler\"," + + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"," + + "\"other\":\"stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\"}"); + + dataQualityTaskExecutionContext.setStatisticsValueConnectorType("JDBC"); + dataQualityTaskExecutionContext.setStatisticsValueType(1); + dataQualityTaskExecutionContext.setStatisticsValueTable("t_ds_dq_task_statistics_value"); + dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams( + "{\"address\":\"jdbc:postgresql://localhost:5432\"," + + "\"database\":\"dolphinscheduler\"," + + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"," + + "\"other\":\"stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\"}"); + + dataQualityTaskExecutionContext.setCompareWithFixedValue(true); + + RuleManager ruleManager = new RuleManager(inputParameterValue,dataQualityTaskExecutionContext); + String expect = "{\"name\":\"自定义SQL\",\"env\":{\"type\":\"batch\",\"config\":null},\"readers\":[{\"type\":\"JDBC\"," + + "\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":\"com.mysql.cj.jdbc.Driver\",\"user\":" + + "\"test\",\"output_table\":\"test_person\",\"table\":\"person\",\"url\":" + + "\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\"}}]," + + "\"transformers\":[{\"type\":\"sql\",\"config\":" + + "{\"index\":2,\"output_table\":\"test_person\",\"sql\":\"select count(*) as " + + "miss from test_person where (sex = null or sex='') and age=1\"}}],\"writers\":" + + "[{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":" + + "\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":" + + "\"t_ds_dq_execute_result\",\"url\":" + + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding" + + "=UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as rule_type,'自定义SQL' as rule_name,1 " + + "as process_definition_id,1 as process_instance_id,1 as task_instance_id,miss AS " + + "statistics_value,3 AS comparison_value,1 AS comparison_type,2 as check_type,50 as " + + "threshold,3 as operator,1 as failure_strategy,'hdfs://localhost:8022/user/ods/" + + "data_quality_error_data/1_1_test2' as error_output_path,'2021-08-30 00:00:00' as " + + "create_time,'2021-08-30 00:00:00' as update_time from ( test_person ) tmp1 \"}}," + + "{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":\"test\",\"driver\":" + + "\"org.postgresql.Driver\",\"user\":\"test\",\"table\":\"t_ds_dq_task_statistics_value\",\"url\":" + + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding=" + + "UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as process_definition_id,1 as " + + "task_instance_id,1 as rule_id,'FNWZLNCPWWF4ZWKO/LYENOPL6JPV1SHPPWQ9YSYLOCU=' as unique_code,'miss'AS statistics_name,miss AS statistics_value," + + "'2021-08-30 00:00:00' as data_time,'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' " + + "as update_time from test_person\"}}]}"; + + Assert.assertEquals(expect,JSONUtils.toJsonString(ruleManager.generateDataQualityParameter())); + } + + @Test + public void testMultiTableComparison() throws Exception { + DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext(); + dataQualityTaskExecutionContext.setRuleName("跨表值比对"); + dataQualityTaskExecutionContext.setRuleType(RuleType.MULTI_TABLE_COMPARISON.getCode()); + + List defaultInputEntryList = new ArrayList<>(); + + DqRuleInputEntry srcConnectorType = new DqRuleInputEntry(); + srcConnectorType.setTitle("源数据类型"); + srcConnectorType.setField("src_connector_type"); + srcConnectorType.setType(FormType.SELECT.getFormType()); + srcConnectorType.setCanEdit(true); + srcConnectorType.setShow(true); + srcConnectorType.setValue(null); + srcConnectorType.setPlaceholder("${src_connector_type}"); + srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode()); + srcConnectorType.setOptions(null); + srcConnectorType.setInputType(InputType.DEFAULT.getCode()); + srcConnectorType.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry(); + srcDatasourceId.setTitle("源数据源"); + srcDatasourceId.setField("src_datasource_id"); + srcDatasourceId.setType(FormType.CASCADER.getFormType()); + srcDatasourceId.setCanEdit(true); + srcDatasourceId.setShow(true); + srcDatasourceId.setValue(null); + srcDatasourceId.setPlaceholder("${comparison_value}"); + srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode()); + srcDatasourceId.setInputType(InputType.DEFAULT.getCode()); + srcDatasourceId.setValueType(ValueType.NUMBER.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry srcTable = new DqRuleInputEntry(); + srcTable.setTitle("源数据表"); + srcTable.setField("src_table"); + srcTable.setType(FormType.INPUT.getFormType()); + srcTable.setCanEdit(true); + srcTable.setShow(true); + srcTable.setPlaceholder("Please enter source table name"); + srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcTable.setInputType(InputType.DEFAULT.getCode()); + srcTable.setValueType(ValueType.STRING.getCode()); + srcConnectorType.setCreateTime(new Date()); + srcConnectorType.setUpdateTime(new Date()); + + DqRuleInputEntry statisticsName = new DqRuleInputEntry(); + statisticsName.setTitle("统计值名"); + statisticsName.setField("statistics_name"); + statisticsName.setType(FormType.INPUT.getFormType()); + statisticsName.setCanEdit(true); + statisticsName.setShow(true); + statisticsName.setPlaceholder("Please enter statistics name, the alias in statistics execute sql"); + statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsName.setValueType(ValueType.STRING.getCode()); + statisticsName.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry statisticsExecuteSql = new DqRuleInputEntry(); + statisticsExecuteSql.setTitle("统计值计算SQL"); + statisticsExecuteSql.setField("statistics_execute_sql"); + statisticsExecuteSql.setType(FormType.TEXTAREA.getFormType()); + statisticsExecuteSql.setCanEdit(true); + statisticsExecuteSql.setShow(true); + statisticsExecuteSql.setPlaceholder("Please enter statistics execute sql"); + statisticsExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsExecuteSql.setValueType(ValueType.LIKE_SQL.getCode()); + statisticsExecuteSql.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry targetConnectorType = new DqRuleInputEntry(); + targetConnectorType.setTitle("目标数据类型"); + targetConnectorType.setField("target_connector_type"); + targetConnectorType.setType(FormType.SELECT.getFormType()); + targetConnectorType.setCanEdit(true); + targetConnectorType.setShow(true); + targetConnectorType.setValue("JDBC"); + targetConnectorType.setPlaceholder("Please select target connector type"); + targetConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode()); + targetConnectorType.setOptions(null); + targetConnectorType.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry targetDatasourceId = new DqRuleInputEntry(); + targetDatasourceId.setTitle("目标数据源"); + targetDatasourceId.setField("target_datasource_id"); + targetDatasourceId.setType(FormType.SELECT.getFormType()); + targetDatasourceId.setCanEdit(true); + targetDatasourceId.setShow(true); + targetDatasourceId.setValue("1"); + targetDatasourceId.setPlaceholder("Please select target datasource"); + targetDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode()); + targetDatasourceId.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry targetTable = new DqRuleInputEntry(); + targetTable.setTitle("目标数据表"); + targetTable.setField("target_table"); + targetTable.setType(FormType.INPUT.getFormType()); + targetTable.setCanEdit(true); + targetTable.setShow(true); + targetTable.setPlaceholder("Please enter target table"); + targetTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + targetTable.setValueType(ValueType.STRING.getCode()); + targetTable.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry comparisonName = new DqRuleInputEntry(); + comparisonName.setTitle("比对值名"); + comparisonName.setField("comparison_name"); + comparisonName.setType(FormType.INPUT.getFormType()); + comparisonName.setCanEdit(true); + comparisonName.setShow(true); + comparisonName.setPlaceholder("Please enter comparison name, the alias in comparison execute sql"); + comparisonName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + comparisonName.setValueType(ValueType.STRING.getCode()); + comparisonName.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry comparisonExecuteSql = new DqRuleInputEntry(); + comparisonExecuteSql.setTitle("比对值计算SQL"); + comparisonExecuteSql.setField("comparison_execute_sql"); + comparisonExecuteSql.setType(FormType.TEXTAREA.getFormType()); + comparisonExecuteSql.setCanEdit(true); + comparisonExecuteSql.setShow(true); + comparisonExecuteSql.setPlaceholder("Please enter comparison execute sql"); + comparisonExecuteSql.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + comparisonExecuteSql.setValueType(ValueType.LIKE_SQL.getCode()); + comparisonExecuteSql.setInputType(InputType.DEFAULT.getCode()); + + DqRuleInputEntry checkType = new DqRuleInputEntry(); + checkType.setTitle("检测方式"); + checkType.setField("check_type"); + checkType.setType(FormType.SELECT.getFormType()); + checkType.setCanEdit(true); + checkType.setShow(true); + checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + checkType.setOptions("[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"}," + + "{\"label\":\"统计值 / 比对值\",\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]"); + checkType.setValue("0"); + checkType.setInputType(InputType.CHECK.getCode()); + checkType.setValueType(ValueType.STRING.getCode()); + checkType.setPlaceholder("检测类型"); + + DqRuleInputEntry operator = new DqRuleInputEntry(); + operator.setTitle("操作符"); + operator.setField("operator"); + operator.setType(FormType.SELECT.getFormType()); + operator.setCanEdit(true); + operator.setShow(true); + operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"}," + + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"}," + + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"}," + + "{\"label\":\"!=\",\"value\":\"5\"}]"); + operator.setValue("0"); + operator.setInputType(InputType.CHECK.getCode()); + operator.setValueType(ValueType.STRING.getCode()); + operator.setPlaceholder("操作符"); + + DqRuleInputEntry threshold = new DqRuleInputEntry(); + threshold.setTitle("阈值"); + threshold.setField("threshold"); + threshold.setType(FormType.INPUT.getFormType()); + threshold.setCanEdit(true); + threshold.setShow(true); + threshold.setInputType(InputType.CHECK.getCode()); + threshold.setValueType(ValueType.NUMBER.getCode()); + threshold.setPlaceholder("Please enter threshold, number is needed"); + + DqRuleInputEntry afterFailure = new DqRuleInputEntry(); + afterFailure.setTitle("失败策略"); + afterFailure.setField("failure_strategy"); + afterFailure.setType(FormType.SELECT.getFormType()); + afterFailure.setCanEdit(true); + afterFailure.setShow(true); + afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]"); + afterFailure.setValue("0"); + afterFailure.setInputType(InputType.CHECK.getCode()); + afterFailure.setValueType(ValueType.STRING.getCode()); + afterFailure.setPlaceholder("失败策略"); + + defaultInputEntryList.add(checkType); + defaultInputEntryList.add(operator); + defaultInputEntryList.add(threshold); + defaultInputEntryList.add(afterFailure); + + defaultInputEntryList.add(srcConnectorType); + defaultInputEntryList.add(srcDatasourceId); + defaultInputEntryList.add(srcTable); + defaultInputEntryList.add(statisticsName); + defaultInputEntryList.add(statisticsExecuteSql); + + defaultInputEntryList.add(targetConnectorType); + defaultInputEntryList.add(targetDatasourceId); + defaultInputEntryList.add(targetTable); + defaultInputEntryList.add(comparisonName); + defaultInputEntryList.add(comparisonExecuteSql); + + dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList)); + + Map inputParameterValue = new HashMap<>(); + inputParameterValue.put("src_connector_type","0"); + inputParameterValue.put("src_datasource_id","2"); + inputParameterValue.put("src_table","test1"); + inputParameterValue.put("statistics_name","src"); + inputParameterValue.put("statistics_execute_sql","select count(*) as src from ${src_table} where c1>20"); + inputParameterValue.put("target_connector_type","2"); + inputParameterValue.put("target_datasource_id","3"); + inputParameterValue.put("target_table","test1_1"); + inputParameterValue.put("comparison_name","target"); + inputParameterValue.put("comparison_execute_sql","select count(*) as target from ${target_table} where c1>20"); + inputParameterValue.put("check_type","1"); + inputParameterValue.put("operator","3"); + inputParameterValue.put("threshold","2"); + inputParameterValue.put("failure_strategy","0"); + inputParameterValue.put("rule_id","4"); + inputParameterValue.put("rule_type","3"); + inputParameterValue.put("rule_name","'跨表值比对'"); + inputParameterValue.put("create_time","'2021-08-25 00:00:00'"); + inputParameterValue.put("update_time","'2021-08-25 00:00:00'"); + inputParameterValue.put("process_definition_id","1"); + inputParameterValue.put("process_instance_id","1"); + inputParameterValue.put("task_instance_id","1"); + inputParameterValue.put("data_time","'2021-08-25 00:00:00'"); + inputParameterValue.put("error_output_path","hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_1"); + + dataQualityTaskExecutionContext.setSourceConnectorType("JDBC"); + dataQualityTaskExecutionContext.setSourceType(0); + dataQualityTaskExecutionContext.setSourceConnectionParams( + "{\"address\":\"jdbc:mysql://localhost:3306\"," + + "\"database\":\"test\"," + + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setTargetConnectorType("HIVE"); + dataQualityTaskExecutionContext.setTargetType(2); + dataQualityTaskExecutionContext.setTargetConnectionParams( + "{\"address\":\"jdbc:hive2://localhost:10000\"," + + "\"database\":\"default\"," + + "\"jdbcUrl\":\"jdbc:hive2://localhost:10000/default\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setWriterType(1); + dataQualityTaskExecutionContext.setWriterConnectorType("JDBC"); + dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result"); + dataQualityTaskExecutionContext.setWriterConnectionParams( + "{\"address\":\"jdbc:postgresql://localhost:5432\"," + + "\"database\":\"dolphinscheduler\"," + + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"," + + "\"other\":\"stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\"}"); + + String expect = "{\"name\":\"跨表值比对\",\"env\":{\"type\":\"batch\",\"config\":null},\"readers\"" + + ":[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":" + + "\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"output_table\":\"test_test1\",\"table\":" + + "\"test1\",\"url\":\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\"}}," + + "{\"type\":\"HIVE\",\"config\":" + + "{\"database\":\"default\",\"password\":\"test\",\"driver\":\"org.apache.hive.jdbc.HiveDriver\",\"user\":" + + "\"test\",\"output_table\":\"default_test1_1\",\"table\":\"test1_1\",\"url\":" + + "\"jdbc:hive2://localhost:10000/default\"}}],\"transformers\":[],\"writers\":" + + "[{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\",\"password\":" + + "\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":" + + "\"t_ds_dq_execute_result\",\"url\":" + + "\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\"," + + "\"sql\":\"select 3 as rule_type,'跨表值比对' as rule_name," + + "1 as process_definition_id,1 as process_instance_id,1 as task_instance_id,src AS statistics_value," + + "target AS comparison_value,0 AS comparison_type,1 as check_type,2 as threshold,3 as operator," + + "0 as failure_strategy,'hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_1' " + + "as error_output_path,'2021-08-25 00:00:00' as create_time,'2021-08-25 00:00:00' as update_time " + + "from ( select count(*) as src from test_test1 where c1>20 ) tmp1 join ( select count(*) as target from default_test1_1 " + + "where c1>20 ) tmp2\"}}]}"; + + RuleManager ruleManager = new RuleManager(inputParameterValue,dataQualityTaskExecutionContext); + Assert.assertEquals(expect,JSONUtils.toJsonString(ruleManager.generateDataQualityParameter())); + } + + @Test + public void testMultiTableAccuracy() throws Exception { + + DataQualityTaskExecutionContext dataQualityTaskExecutionContext = new DataQualityTaskExecutionContext(); + + List defaultInputEntryList = new ArrayList<>(); + + DqRuleInputEntry srcConnectorType = new DqRuleInputEntry(); + srcConnectorType.setTitle("源数据类型"); + srcConnectorType.setField("src_connector_type"); + srcConnectorType.setType(FormType.SELECT.getFormType()); + srcConnectorType.setCanEdit(true); + srcConnectorType.setShow(true); + srcConnectorType.setValue("JDBC"); + srcConnectorType.setPlaceholder("Please select source connector type"); + srcConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode()); + srcConnectorType.setOptions(null); + srcConnectorType.setInputType(InputType.DEFAULT.getCode()); + srcConnectorType.setValueType(ValueType.NUMBER.getCode()); + + DqRuleInputEntry srcDatasourceId = new DqRuleInputEntry(); + srcDatasourceId.setTitle("源数据源"); + srcDatasourceId.setField("src_datasource_id"); + srcDatasourceId.setType(FormType.SELECT.getFormType()); + srcDatasourceId.setCanEdit(true); + srcDatasourceId.setShow(true); + srcDatasourceId.setValue("1"); + srcDatasourceId.setPlaceholder("Please select source datasource"); + srcDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode()); + srcDatasourceId.setInputType(InputType.DEFAULT.getCode()); + srcDatasourceId.setValueType(ValueType.NUMBER.getCode()); + + DqRuleInputEntry srcTable = new DqRuleInputEntry(); + srcTable.setTitle("源数据表"); + srcTable.setField("src_table"); + srcTable.setType(FormType.INPUT.getFormType()); + srcTable.setCanEdit(true); + srcTable.setShow(true); + srcTable.setPlaceholder("Please enter source table"); + srcTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcTable.setInputType(InputType.DEFAULT.getCode()); + srcTable.setValueType(ValueType.STRING.getCode()); + + DqRuleInputEntry srcFilter = new DqRuleInputEntry(); + srcFilter.setTitle("源表过滤条件"); + srcFilter.setField("src_filter"); + srcFilter.setType(FormType.INPUT.getFormType()); + srcFilter.setCanEdit(true); + srcFilter.setShow(true); + srcFilter.setPlaceholder("Please enter source filter expression"); + srcFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + srcFilter.setInputType(InputType.DEFAULT.getCode()); + srcFilter.setValueType(ValueType.LIKE_SQL.getCode()); + + DqRuleInputEntry targetConnectorType = new DqRuleInputEntry(); + targetConnectorType.setTitle("目标数据类型"); + targetConnectorType.setField("target_connector_type"); + targetConnectorType.setType(FormType.SELECT.getFormType()); + targetConnectorType.setCanEdit(true); + targetConnectorType.setShow(true); + targetConnectorType.setValue("JDBC"); + targetConnectorType.setPlaceholder("Please select target connector type"); + targetConnectorType.setOptionSourceType(OptionSourceType.DATASOURCE_TYPE.getCode()); + targetConnectorType.setOptions(null); + targetConnectorType.setInputType(InputType.DEFAULT.getCode()); + targetConnectorType.setValueType(ValueType.STRING.getCode()); + + DqRuleInputEntry targetDatasourceId = new DqRuleInputEntry(); + targetDatasourceId.setTitle("目标数据源"); + targetDatasourceId.setField("target_datasource_id"); + targetDatasourceId.setType(FormType.CASCADER.getFormType()); + targetDatasourceId.setCanEdit(true); + targetDatasourceId.setShow(true); + targetDatasourceId.setValue("1"); + targetDatasourceId.setPlaceholder("Please select target datasource"); + targetDatasourceId.setOptionSourceType(OptionSourceType.DATASOURCE_ID.getCode()); + targetDatasourceId.setInputType(InputType.DEFAULT.getCode()); + targetDatasourceId.setValueType(ValueType.NUMBER.getCode()); + + DqRuleInputEntry targetTable = new DqRuleInputEntry(); + targetTable.setTitle("目标数据表"); + targetTable.setField("target_table"); + targetTable.setType(FormType.INPUT.getFormType()); + targetTable.setCanEdit(true); + targetTable.setShow(true); + targetTable.setPlaceholder("Please enter target table"); + targetTable.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + targetTable.setInputType(InputType.DEFAULT.getCode()); + targetTable.setValueType(ValueType.STRING.getCode()); + + DqRuleInputEntry targetFilter = new DqRuleInputEntry(); + targetFilter.setTitle("目标表过滤条件"); + targetFilter.setField("target_filter"); + targetFilter.setType(FormType.INPUT.getFormType()); + targetFilter.setCanEdit(true); + targetFilter.setShow(true); + targetFilter.setPlaceholder("Please enter target filter expression"); + targetFilter.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + targetFilter.setInputType(InputType.DEFAULT.getCode()); + targetFilter.setValueType(ValueType.LIKE_SQL.getCode()); + + DqRuleInputEntry mappingColumns = new DqRuleInputEntry(); + mappingColumns.setTitle("检查列"); + mappingColumns.setField("mapping_columns"); + mappingColumns.setType(FormType.INPUT.getFormType()); + mappingColumns.setCanEdit(true); + mappingColumns.setShow(true); + mappingColumns.setPlaceholder("${mapping_columns}"); + mappingColumns.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + mappingColumns.setInputType(InputType.DEFAULT.getCode()); + mappingColumns.setValueType(ValueType.LIST.getCode()); + + DqRuleInputEntry statisticsName = new DqRuleInputEntry(); + statisticsName.setTitle("统计值"); + statisticsName.setField("statistics_name"); + statisticsName.setType(FormType.INPUT.getFormType()); + statisticsName.setCanEdit(false); + statisticsName.setShow(false); + statisticsName.setValue("miss_count.miss"); + statisticsName.setPlaceholder("${statistics_name}"); + statisticsName.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + statisticsName.setInputType(InputType.DEFAULT.getCode()); + statisticsName.setValueType(ValueType.STRING.getCode()); + + defaultInputEntryList.add(srcConnectorType); + defaultInputEntryList.add(srcDatasourceId); + defaultInputEntryList.add(srcTable); + defaultInputEntryList.add(srcFilter); + defaultInputEntryList.add(targetConnectorType); + defaultInputEntryList.add(targetDatasourceId); + defaultInputEntryList.add(targetTable); + defaultInputEntryList.add(targetFilter); + defaultInputEntryList.add(mappingColumns); + defaultInputEntryList.add(statisticsName); + + DqRuleExecuteSql executeSqlDefinition3 = new DqRuleExecuteSql(); + executeSqlDefinition3.setIndex(0); + executeSqlDefinition3.setSql("SELECT COUNT(*) AS total FROM ${src_table} WHERE (${src_filter})"); + executeSqlDefinition3.setTableAlias("total_count"); + executeSqlDefinition3.setType(ExecuteSqlType.MIDDLE.getCode()); + + DqRuleExecuteSql executeSqlDefinition1 = new DqRuleExecuteSql(); + executeSqlDefinition1.setIndex(0); + executeSqlDefinition1.setSql("SELECT ${src_table}.* FROM (SELECT * FROM ${src_table} WHERE (${src_filter})) " + + "${src_table} LEFT JOIN (SELECT * FROM ${target_table} WHERE (${target_filter})) " + + "${target_table} ON ${on_clause} WHERE ${where_clause}"); + executeSqlDefinition1.setTableAlias("miss_items"); + executeSqlDefinition1.setType(ExecuteSqlType.MIDDLE.getCode()); + executeSqlDefinition1.setErrorOutputSql(true); + + DqRuleExecuteSql executeSqlDefinition2 = new DqRuleExecuteSql(); + executeSqlDefinition2.setIndex(0); + executeSqlDefinition2.setSql("SELECT COUNT(*) AS miss FROM miss_items"); + executeSqlDefinition2.setTableAlias("miss_count"); + executeSqlDefinition2.setType(ExecuteSqlType.STATISTICS.getCode()); + + DqRuleInputEntry comparisonTitle = new DqRuleInputEntry(); + comparisonTitle.setTitle("比对值"); + comparisonTitle.setField("comparison_title"); + comparisonTitle.setType(FormType.INPUT.getFormType()); + comparisonTitle.setCanEdit(false); + comparisonTitle.setShow(true); + comparisonTitle.setPlaceholder("${comparison_title}"); + comparisonTitle.setValue("目标表总行数"); + + DqRuleInputEntry comparisonName = new DqRuleInputEntry(); + comparisonName.setTitle("比对值名"); + comparisonName.setField("comparison_name"); + comparisonName.setType(FormType.INPUT.getFormType()); + comparisonName.setCanEdit(false); + comparisonName.setShow(false); + comparisonName.setValue("total_count.total"); + comparisonName.setPlaceholder("${comparison_name}"); + + DqRuleInputEntry comparisonTable = new DqRuleInputEntry(); + comparisonTable.setField(COMPARISON_TABLE); + comparisonTable.setValue("total_count"); + + DqRuleInputEntry checkType = new DqRuleInputEntry(); + checkType.setTitle("检测方式"); + checkType.setField("check_type"); + checkType.setType(FormType.SELECT.getFormType()); + checkType.setCanEdit(true); + checkType.setShow(true); + checkType.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + checkType.setOptions("[{\"label\":\"比对值 - 统计值\",\"value\":\"0\"},{\"label\":\"统计值 - 比对值\",\"value\":\"1\"},{\"label\":\"统计值 / 比对值\"," + + "\"value\":\"2\"},{\"label\":\"(比对值-统计值) / 比对值\",\"value\":\"3\"}]"); + checkType.setValue("0"); + checkType.setInputType(InputType.CHECK.getCode()); + checkType.setValueType(ValueType.STRING.getCode()); + checkType.setPlaceholder("检测类型"); + + DqRuleInputEntry operator = new DqRuleInputEntry(); + operator.setTitle("操作符"); + operator.setField("operator"); + operator.setType(FormType.SELECT.getFormType()); + operator.setCanEdit(true); + operator.setShow(true); + operator.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + operator.setOptions("[{\"label\":\"=\",\"value\":\"0\"}," + + "{\"label\":\"<\",\"value\":\"1\"},{\"label\":\"<=\",\"value\":\"2\"}," + + "{\"label\":\">\",\"value\":\"3\"},{\"label\":\">=\",\"value\":\"4\"},{\"label\":\"!=\",\"value\":\"5\"}]"); + operator.setValue("0"); + operator.setInputType(InputType.CHECK.getCode()); + operator.setValueType(ValueType.STRING.getCode()); + operator.setPlaceholder("操作符"); + + DqRuleInputEntry threshold = new DqRuleInputEntry(); + threshold.setTitle("阈值"); + threshold.setField("threshold"); + threshold.setType(FormType.INPUT.getFormType()); + threshold.setCanEdit(true); + threshold.setShow(true); + threshold.setInputType(InputType.CHECK.getCode()); + threshold.setValueType(ValueType.NUMBER.getCode()); + threshold.setPlaceholder("Please enter threshold, number is needed"); + + DqRuleInputEntry afterFailure = new DqRuleInputEntry(); + afterFailure.setTitle("失败策略"); + afterFailure.setField("failure_strategy"); + afterFailure.setType(FormType.SELECT.getFormType()); + afterFailure.setCanEdit(true); + afterFailure.setShow(true); + afterFailure.setOptionSourceType(OptionSourceType.DEFAULT.getCode()); + afterFailure.setOptions("[{\"label\":\"告警\",\"value\":\"0\"},{\"label\":\"阻断\",\"value\":\"1\"}]"); + afterFailure.setValue("0"); + afterFailure.setInputType(InputType.CHECK.getCode()); + afterFailure.setValueType(ValueType.STRING.getCode()); + afterFailure.setPlaceholder("失败策略"); + + defaultInputEntryList.add(checkType); + defaultInputEntryList.add(operator); + defaultInputEntryList.add(threshold); + defaultInputEntryList.add(afterFailure); + defaultInputEntryList.add(comparisonTitle); + defaultInputEntryList.add(comparisonName); + defaultInputEntryList.add(comparisonTable); + + List executeSqlList = new ArrayList<>(); + executeSqlList.add(executeSqlDefinition3); + executeSqlList.add(executeSqlDefinition1); + executeSqlList.add(executeSqlDefinition2); + dataQualityTaskExecutionContext.setExecuteSqlList(JSONUtils.toJsonString(executeSqlList)); + dataQualityTaskExecutionContext.setRuleInputEntryList(JSONUtils.toJsonString(defaultInputEntryList)); + + Map inputParameterValue = new HashMap<>(); + inputParameterValue.put("src_connector_type","0"); + inputParameterValue.put("src_datasource_id","2"); + inputParameterValue.put("src_table","demo_src"); + inputParameterValue.put("src_filter","age<100"); + inputParameterValue.put("target_connector_type","2"); + inputParameterValue.put("target_datasource_id","3"); + inputParameterValue.put("target_table","demo_src"); + inputParameterValue.put("target_filter","age<100"); + inputParameterValue.put("mapping_columns","[{\"src_field\":\"hour\",\"operator\":\"=\",\"target_field\":\"hour\"}]"); + inputParameterValue.put("check_type","2"); + inputParameterValue.put("operator","3"); + inputParameterValue.put("threshold","3"); + inputParameterValue.put("failure_strategy","0"); + inputParameterValue.put("comparison_type","7"); + inputParameterValue.put("rule_id","3"); + inputParameterValue.put("rule_type","2"); + inputParameterValue.put("rule_name","'跨表准确性'"); + inputParameterValue.put("create_time","'2021-08-30 00:00:00'"); + inputParameterValue.put("update_time","'2021-08-30 00:00:00'"); + inputParameterValue.put("process_definition_id","1"); + inputParameterValue.put("process_instance_id","1"); + inputParameterValue.put("task_instance_id","1"); + inputParameterValue.put("data_time","'2021-08-30 00:00:00'"); + inputParameterValue.put("error_output_path","hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test"); + + dataQualityTaskExecutionContext.setSourceConnectorType("JDBC"); + dataQualityTaskExecutionContext.setSourceType(0); + dataQualityTaskExecutionContext.setSourceConnectionParams( + "{\"address\":\"jdbc:mysql://localhost:3306\"," + + "\"database\":\"test\"," + + "\"jdbcUrl\":\"jdbc:mysql://localhost:3306/test\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setTargetConnectorType("HIVE"); + dataQualityTaskExecutionContext.setTargetType(2); + dataQualityTaskExecutionContext.setTargetConnectionParams( + "{\"address\":\"jdbc:hive2://localhost:10000\"," + + "\"database\":\"default\"," + + "\"jdbcUrl\":\"jdbc:hive2://localhost:10000/default\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"}"); + + dataQualityTaskExecutionContext.setWriterType(1); + dataQualityTaskExecutionContext.setWriterConnectorType("JDBC"); + dataQualityTaskExecutionContext.setWriterTable("t_ds_dq_execute_result"); + dataQualityTaskExecutionContext.setWriterConnectionParams( + "{\"address\":\"jdbc:postgresql://localhost:5432\"," + + "\"database\":\"dolphinscheduler\"," + + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"," + + "\"other\":\"stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\"}"); + + dataQualityTaskExecutionContext.setStatisticsValueConnectorType("JDBC"); + dataQualityTaskExecutionContext.setStatisticsValueType(1); + dataQualityTaskExecutionContext.setStatisticsValueTable("t_ds_dq_task_statistics_value"); + dataQualityTaskExecutionContext.setStatisticsValueWriterConnectionParams( + "{\"address\":\"jdbc:postgresql://localhost:5432\"," + + "\"database\":\"dolphinscheduler\"," + + "\"jdbcUrl\":\"jdbc:postgresql://localhost:5432/dolphinscheduler\"," + + "\"user\":\"test\"," + + "\"password\":\"test\"," + + "\"other\":\"stringtype=unspecified&characterEncoding=UTF-8&allowMultiQueries=true\"}"); + + dataQualityTaskExecutionContext.setRuleName("跨表准确性"); + dataQualityTaskExecutionContext.setRuleType(RuleType.MULTI_TABLE_ACCURACY.getCode()); + + String expect = "{\"name\":\"跨表准确性\",\"env\":{\"type\":\"batch\",\"config\":null},\"readers\":" + + "[{\"type\":\"JDBC\",\"config\":{\"database\":\"test\",\"password\":\"test\",\"driver\":" + + "\"com.mysql.cj.jdbc.Driver\",\"user\":\"test\",\"output_table\":\"test_demo_src\",\"table\":" + + "\"demo_src\",\"url\":\"jdbc:mysql://localhost:3306/test?allowLoadLocalInfile=false&autoDeserialize=false&allowLocalInfile=false&allowUrlInLocalInfile=false\"}}," + + "{\"type\":\"HIVE\",\"config\":" + + "{\"database\":\"default\",\"password\":\"test\",\"driver\":" + + "\"org.apache.hive.jdbc.HiveDriver\",\"user\":\"test\",\"output_table\":\"default_demo_src\",\"table\":" + + "\"demo_src\",\"url\":\"jdbc:hive2://localhost:10000/default\"}}],\"transformers\":" + + "[{\"type\":\"sql\",\"config\":{\"index\":1,\"output_table\":\"total_count\"," + + "\"sql\":\"SELECT COUNT(*) AS total FROM test_demo_src WHERE (age<100)\"}}," + + "{\"type\":\"sql\",\"config\":{\"index\":2,\"output_table\":\"miss_items\",\"sql\":" + + "\"SELECT test_demo_src.* FROM (SELECT * FROM test_demo_src WHERE (age<100)) " + + "test_demo_src LEFT JOIN (SELECT * FROM default_demo_src WHERE (age<100)) default_demo_src ON coalesce(test_demo_src.hour, '') =" + + " coalesce(default_demo_src.hour, '') WHERE ( NOT (test_demo_src.hour IS NULL )) AND " + + "( default_demo_src.hour IS NULL )\"}},{\"type\":\"sql\",\"config\":{\"index\":3,\"output_table\":\"miss_count\"," + + "\"sql\":\"SELECT COUNT(*) AS miss FROM miss_items\"}}],\"writers\":[{\"type\":\"JDBC\",\"config\":" + + "{\"database\":\"dolphinscheduler\",\"password\":\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":" + + "\"t_ds_dq_execute_result\",\"url\":\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified" + + "&characterEncoding=UTF-8&allowMultiQueries=true\",\"sql\":\"select 2 as rule_type,'跨表准确性' as rule_name,1 as process_definition_id," + + "1 as process_instance_id,1 as task_instance_id,miss_count.miss AS statistics_value,total_count.total AS comparison_value," + + "7 AS comparison_type,2 as check_type,3 as threshold,3 as operator,0 as failure_strategy," + + "'hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test' as error_output_path," + + "'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' as update_time from miss_count" + + " full join total_count\"}},{\"type\":\"JDBC\",\"config\":{\"database\":\"dolphinscheduler\"," + + "\"password\":\"test\",\"driver\":\"org.postgresql.Driver\",\"user\":\"test\",\"table\":" + + "\"t_ds_dq_task_statistics_value\",\"url\":\"jdbc:postgresql://localhost:5432/dolphinscheduler?stringtype=unspecified" + + "&characterEncoding=UTF-8&allowMultiQueries=true\",\"sql\":\"select 1 as process_definition_id,1 as task_instance_id," + + "3 as rule_id,'T4MB2XTVSL+VA/L6XCU1M/ELHKYOMGVNBBE5KHBXHHI=' as unique_code,'miss_count.miss'AS statistics_name,miss_count.miss " + + "AS statistics_value,'2021-08-30 00:00:00' as data_time," + + "'2021-08-30 00:00:00' as create_time,'2021-08-30 00:00:00' as update_time from miss_count\"}},{\"type\":\"hdfs_file\"," + + "\"config\":{\"path\":\"hdfs://localhost:8022/user/ods/data_quality_error_data/1_1_test\",\"input_table\":\"miss_items\"}}]}"; + + RuleManager ruleManager = new RuleManager(inputParameterValue,dataQualityTaskExecutionContext); + Assert.assertEquals(expect,JSONUtils.toJsonString(ruleManager.generateDataQualityParameter())); + } +} diff --git a/dolphinscheduler-task-plugin/pom.xml b/dolphinscheduler-task-plugin/pom.xml index 6a306ea7c4..e53425d3ce 100644 --- a/dolphinscheduler-task-plugin/pom.xml +++ b/dolphinscheduler-task-plugin/pom.xml @@ -41,6 +41,7 @@ dolphinscheduler-task-sqoop dolphinscheduler-task-procedure dolphinscheduler-task-pigeon + dolphinscheduler-task-dataquality dolphinscheduler-task-seatunnel diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss index a982dccbeb..640c86640d 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/canvas/taskbar.scss @@ -111,6 +111,9 @@ &.icos-switch { background-image: url("../images/task-icos/switch.png"); } + &.icos-data_quality { + background-image: url("../images/task-icos/data_quality.png"); + } } span { @@ -171,6 +174,9 @@ &.icos-switch { background-image: url("../images/task-icos/switch_hover.png"); } + &.icos-data_quality { + background-image: url("../images/task-icos/data_quality_hover.png"); + } } } } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js index 4026e139ca..9d583ea8e5 100755 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js @@ -329,12 +329,16 @@ const tasksType = { desc: 'CONDITIONS', color: '#E46F13' }, + DATA_QUALITY: { + desc: 'DATA_QUALITY', + color: '#E46F13' + }, SWITCH: { desc: 'SWITCH', color: '#E46F13' }, SEATUNNEL: { - desc: 'WATERDROP', + desc: 'SEATUNNEL', color: '#646465', helperLinkDisable: true } diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.scss old mode 100644 new mode 100755 diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue index b153c12855..1f80f3945c 100644 --- a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue @@ -450,6 +450,13 @@ :prev-tasks="prevTasks" > + + +
+ +
{{$t('Rule Name')}}
+
+ + + + +
+
+
+ +
+ +
{{$t('Deploy Mode')}}
+
+ + + + + +
+
+ +
{{$t('Driver Cores')}}
+
+ + +
+
{{$t('Driver Memory')}}
+
+ + +
+
+ +
{{$t('Executor Number')}}
+
+ + +
+
{{$t('Executor Memory')}}
+
+ + +
+
+ +
{{$t('Executor Cores')}}
+
+ + +
+
+ +
{{$t('Option Parameters')}}
+
+ + +
+
+ +
{{$t('Custom Parameters')}}
+
+ + +
+
+
+ + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/images/task-icos/data_quality.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/images/task-icos/data_quality.png new file mode 100644 index 0000000000000000000000000000000000000000..bf2f83e206000b1ecf26a03490f84fa1d48d96bf GIT binary patch literal 1086 zcmeAS@N?(olHy`uVBq!ia0vp^DnP8p!3-p4i=A8uq!^2X+?^QKos)S9WN!}e32_Aq z{s)6qt5)sbzklY;ncKE)n?HX(kPBolTC@nrm^*jwk|j%kT%h=zIdc{+TnJ>(n>P>0 z1xf+AvuDqS$O0KaaiAKY6ofr%)+`_wNCLS)#Xts7B}5R&01Coo;o?9+2pggcq6{bq z;Q|>z02hR-gqsYJg>xYafFv9MrO*IS3Mc|6Au5qYAnG9s(8S@oNEIhiBc@~iJ}_Pf zjJ63SL4Lsuj7-cdtn3_|T--doeEb4J!Xly)QqnSV3W`c9s+wBbItIojrsh`GHnw*5 zPHyfVUfw=_{sBSZ5s@)*@yXdax%oxKC8cHM6*Udbt?gYsy^|(SpD};I!o|y0tXjKq z)3)t9cJJA{|M2k>XU?9#bmi)`8#iy=zH{&XgNKiwJbm{3#miUk-hcS?`OCNOKYsoG z^Y`D{H#0v2lh#~M7sn6@$)g8N(m$B$oysb=;laMOMrQjIs^1)uIiIy< z&8_b>*;DvZXK-ao&R#Zi<@Kh$n{D`4sQgy7fA#hFzxhY5uWC6}{e&+&@U%^N$RUN2 z(--;Gw;nmCbNy-IiqXjg0)j$HJVAzdj4{H;#K!!p-N4u$pMHd9umAIqtXdAeEkdR~Xu@*g3p{k8=m>$=aZn|RMa;jp#ald4m(EBj&s z+O$=kt~h0=F!$L+pW1-MO>YZab{&nJc`KovODbW$iQ9aivlkXL3AikkxSsN=WdfJh zra6;ud0stLFfXT7X0eLOYtHqDKUJMx5hHs+FV}B^onE+z)sszXKNGxPAJ3esJR#-M zBzKkbkFQQDI~gdeEk44ofy`glX(eb&kOJgaRmzg z|NpKP;_5B>uQm0*`6LkAp#Q(_wEx<3{%cJ6 zZ#e0{-t_-E6aH(?{BJn~3P4<- zEYJj?E{FmM8)hs}5TqC=1ylw$2OuGAKu&_O5iSH9-r}E>0gR!}k|4ie21X`k7FITP z4o)s^9zK2nK_OvLF>y&LSp`KUHFZrbZ5>@xGYd-_I|oN6XEzT|U;lu>;Lxz>xcG#` zq~w&;^t}9{;?jzmx`w8f)~>z@6DQ4_yKwQ6rRz6t-nwo3&Ru)GIWUH*Vj#`{3cDC(oY0eD(JI=PzHs|M>aq_n*K2o==U-0w$lCo-U3d5|U>R{*Lx8 zWH|oOKRS4M!2~1U6SmXC^ep*yPJPj2GVMZA(fO@T&b`-O{f*y#{PO0{wy$&kJgVFi z_v<>J+@<&zXP^K6qJB+-^`um2%JRs4*R~&e+`B5_w}`#Jm*2efZKrO1YHMArWieGb zcDYfOdSuS!{Mbr4SCJeMnYAz51X*{nFl+7N>|=T}C*5b>1?Nq_xAl#Q4w}9%-?ida;Q?b+#Kq=fz&oyqg?Qw>)vX z4f}%)Cg(zb$~OM}dbWe`DL@nw8WgTe~DWM4fOkEF( literal 0 HcmV?d00001 diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DATA_QUALITY.png b/dolphinscheduler-ui/src/js/conf/home/pages/dag/img/toolbar_DATA_QUALITY.png new file mode 100644 index 0000000000000000000000000000000000000000..931438a383834759a10261085f5e89dd955e0c34 GIT binary patch literal 960 zcmV;x13&zUP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D164^xK~zXf)s{(2 z6Hyq)J4?$HT&OJtEVxiXOvDxijfN0RJZOT%iyXXo&_IkHOiD~R=tWIDkZ5Ac!JtHM zCL|hTIG8{nq;W-3WJ_6;omRVmZKpHyX2y@sNEd2aLOJ-)%glT4oA;YjaFz6FZ$y6x@Y=noG`VDdNx@q(z=WZt@TTpmX$;x)tZ-TgBh&(kbg-&{UA$iz=V zV-Y|6{nyxxTc$)Qg65DwsYn9H*Ec`SwKty)u{@4paVBU=l1P}Z2paoG7$9Pzu-Y*i zb|OA;&J!+8o<_tAwl^Qm82_(!J~& zgN2S6Z^QW`pGFn|3RbudPPh&jG>7-=Zq`>U2bd6pLM(#BNXz3CL4zF$A{;bLt-YG< zR2HRt^2T0mF8naE0MUR40R3@vsj<#fS7q$8&$c(8J5#y0_LOnAZW|m@5+qs?G<-Fs zB$})}aK&*98u-)RIRpn2fv;9FT%Df4tG@BuZMLrtm&Mv6hBp#8j(CX?qsXBe2yr(a zcJ0Vi0aTg+u^bOBL!hN0JPZL)g=ZiRkOq=aHrCdd4n~%&DJIrnkLH3@N@9%*f_}!| z8Brl-QdFzuv_JPm#~ju%i`jh^=`l$$=naVuAT&i)`Ku z8j8O#f6hlwk$4t-$La^+`boc4Hg5(U78^~#6c`eN@*vufqyW~5Adtsbq#`Ln%kZmBzg|w)1VV~R!Ym;e=QRS`|pFNl50Z4vDnzTf} ilY650KmO-aDE', + code: 3 + }, + { + label: '>=', + code: 4 + }, + { + label: '!=', + code: 5 + } +] + +const ruleType = [ + { + code: -1, + label: `${i18n.$t('All')}` + }, + { + code: 0, + label: `${i18n.$t('Single Table')}` + }, + { + code: 1, + label: `${i18n.$t('Single Table Custom Sql')}` + }, + { + code: 2, + label: `${i18n.$t('Multi Table Accuracy')}` + }, + { + code: 3, + label: `${i18n.$t('Multi Table Comparison')}` + } +] + +const checkType = [ + { + label: `${i18n.$t('Expected - Actual')}`, + code: 0 + }, + { + label: `${i18n.$t('Actual - Expected')}`, + code: 1 + }, + { + label: `${i18n.$t('Actual / Expected')}`, + code: 2 + }, + { + label: `${i18n.$t('(Expected - Actual) / Expected')}`, + code: 3 + } +] + +const failureStrategy = [ + { + label: `${i18n.$t('Alert')}`, + code: 0 + }, + { + label: `${i18n.$t('Block')}`, + code: 1 + } +] + +export { + dataQualityTaskState, + operator, + ruleType, + checkType, + failureStrategy +} diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/result.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/result.vue new file mode 100644 index 0000000000..a530d33bcc --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/result.vue @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/rule.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/rule.vue new file mode 100644 index 0000000000..cf603513ca --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/_source/conditions/rule.vue @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/index.vue new file mode 100644 index 0000000000..2bf6c326e6 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/index.vue @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/_source/list.vue new file mode 100644 index 0000000000..beeca3500a --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/_source/list.vue @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/index.vue new file mode 100644 index 0000000000..4f016ab1b3 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/result/index.vue @@ -0,0 +1,182 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/_source/list.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/_source/list.vue new file mode 100644 index 0000000000..13ccb70d80 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/_source/list.vue @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + diff --git a/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/index.vue b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/index.vue new file mode 100644 index 0000000000..4347f7a6d9 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/pages/dataquality/pages/rule/index.vue @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + diff --git a/dolphinscheduler-ui/src/js/conf/home/router/index.js b/dolphinscheduler-ui/src/js/conf/home/router/index.js index 2ac01f9e40..da3068658b 100644 --- a/dolphinscheduler-ui/src/js/conf/home/router/index.js +++ b/dolphinscheduler-ui/src/js/conf/home/router/index.js @@ -24,6 +24,7 @@ import projects from './module/projects' import resource from './module/resource' import security from './module/security' import user from './module/user' +import dataquality from './module/dataquality' Vue.use(Router) @@ -40,6 +41,7 @@ const router = new Router({ ...projects, ...resource, ...datasource, + ...dataquality, ...security, ...user, ...monitor diff --git a/dolphinscheduler-ui/src/js/conf/home/router/module/dataquality.js b/dolphinscheduler-ui/src/js/conf/home/router/module/dataquality.js new file mode 100644 index 0000000000..ea2c9c8c09 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/router/module/dataquality.js @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import i18n from '@/module/i18n/index.js' + +const dataquality = [ + { + path: '/dataquality', + name: 'dataquality', + component: resolve => require(['../../pages/dataquality/index'], resolve), + meta: { + title: `${i18n.$t('DataQuality')}` + }, + redirect: { + name: 'data-quality-result' + }, + children: [ + { + path: '/dataquality/result', + name: 'data-quality-result', + component: resolve => require(['../../pages/dataquality/pages/result/index'], resolve), + meta: { + title: `${i18n.$t('DataQuality Result')}` + } + }, + { + path: '/dataquality/rule', + name: 'data-quality-rule', + component: resolve => require(['../../pages/dataquality/pages/rule/index'], resolve), + meta: { + title: `${i18n.$t('DataQuality Rule')}` + } + } + ] + } +] + +export default dataquality diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js index 64a83b21c5..e4928514bc 100644 --- a/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/dag/actions.js @@ -865,6 +865,60 @@ export default { }) }) }, + + getRuleInputEntryList ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('data-quality/getRuleFormCreateJson', { + ruleId: payload + }, res => { + resolve(res) + }).catch(res => { + reject(res) + }) + }) + }, + + getRuleList ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('data-quality/ruleList', {}, res => { + resolve(res) + }).catch(res => { + reject(res) + }) + }) + }, + + getDatasourceOptionsById ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('data-quality/getDatasourceOptionsById', { + datasourceId: payload + }, res => { + resolve(res) + }).catch(res => { + reject(res) + }) + }) + }, + + getTablesById ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('datasources/tables', payload, res => { + resolve(res) + }).catch(res => { + reject(res) + }) + }) + }, + + getTableColumnsByIdAndName ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('datasources/tableColumns', payload, res => { + resolve(res) + }).catch(res => { + reject(res) + }) + }) + }, /** * Delete Task Definition by code */ diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dataquality/actions.js b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/actions.js new file mode 100644 index 0000000000..c922a372d1 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/actions.js @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import io from '@/module/io' + +export default { + + /** + * get result list pages + */ + getResultListPage ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('data-quality/result/page', payload, res => { + resolve(res.data) + }).catch(e => { + reject(e) + }) + }) + }, + + /** + * get rule list pages + */ + getDataQualityRuleListPage ({ state }, payload) { + return new Promise((resolve, reject) => { + io.get('data-quality/rule/page', payload, res => { + resolve(res.data) + }).catch(e => { + reject(e) + }) + }) + } +} diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dataquality/getters.js b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/getters.js new file mode 100644 index 0000000000..4213c7dd07 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/getters.js @@ -0,0 +1,18 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export default { +} diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dataquality/index.js b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/index.js new file mode 100644 index 0000000000..c7d4488633 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/index.js @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import actions from './actions' +import getters from './getters' +import mutations from './mutations' +import state from './state' + +export default { + strict: true, + namespaced: true, + state, + getters, + mutations, + actions +} diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dataquality/mutations.js b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/mutations.js new file mode 100644 index 0000000000..e84c864c3c --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/mutations.js @@ -0,0 +1,19 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export default { +} diff --git a/dolphinscheduler-ui/src/js/conf/home/store/dataquality/state.js b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/state.js new file mode 100644 index 0000000000..4213c7dd07 --- /dev/null +++ b/dolphinscheduler-ui/src/js/conf/home/store/dataquality/state.js @@ -0,0 +1,18 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export default { +} diff --git a/dolphinscheduler-ui/src/js/conf/home/store/index.js b/dolphinscheduler-ui/src/js/conf/home/store/index.js index a7e5824ff0..0f8172ae79 100644 --- a/dolphinscheduler-ui/src/js/conf/home/store/index.js +++ b/dolphinscheduler-ui/src/js/conf/home/store/index.js @@ -24,6 +24,7 @@ import security from './security' import datasource from './datasource' import user from './user' import monitor from './monitor' +import dataquality from './dataquality' Vue.use(Vuex) export default new Vuex.Store({ modules: { @@ -34,6 +35,7 @@ export default new Vuex.Store({ security, datasource, user, - monitor + monitor, + dataquality } }) diff --git a/dolphinscheduler-ui/src/js/module/components/nav/nav.vue b/dolphinscheduler-ui/src/js/module/components/nav/nav.vue index a02b39abfb..da47348714 100644 --- a/dolphinscheduler-ui/src/js/module/components/nav/nav.vue +++ b/dolphinscheduler-ui/src/js/module/components/nav/nav.vue @@ -34,6 +34,13 @@ +
+ +