diff --git a/web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/AuthListDao.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/FieldsAnalyseDao.java similarity index 66% rename from web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/AuthListDao.java rename to core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/FieldsAnalyseDao.java index 49c2c30c..b710d4b9 100644 --- a/web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/AuthListDao.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/FieldsAnalyseDao.java @@ -16,19 +16,20 @@ package com.webank.wedatasphere.qualitis.dao; -import com.webank.wedatasphere.qualitis.entity.AuthList; -import com.webank.wedatasphere.qualitis.entity.AuthList; +import com.webank.wedatasphere.qualitis.entity.FieldsAnalyse; + +import java.util.List; /** - * @author howeye + * @author v_wenxuanzhang */ -public interface AuthListDao { +public interface FieldsAnalyseDao { /** - * Find auth list by appId - * @param appId + * Find all by application ID list + * @param ruleIdList + * @param dataDateList * @return */ - AuthList findByAppId(String appId); - + List< FieldsAnalyse > findByRuleIdInAndDataDateIn(List< Long > ruleIdList, List< Integer > dataDateList); } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java index 4c58c706..ee0a59b7 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/TaskResultDao.java @@ -152,4 +152,11 @@ public interface TaskResultDao { * @return */ List findByApplicationId(String applicationId); + + /** + * Find all by application ID list + * @param applicationIdList + * @return + */ + List< TaskResult> findByApplicationIdIn(List< String> applicationIdList); } diff --git a/web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/AuthListDaoImpl.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/FieldsAnalyseDaoImpl.java similarity index 57% rename from web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/AuthListDaoImpl.java rename to core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/FieldsAnalyseDaoImpl.java index 63296053..81796918 100644 --- a/web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/AuthListDaoImpl.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/FieldsAnalyseDaoImpl.java @@ -16,23 +16,25 @@ package com.webank.wedatasphere.qualitis.dao.impl; -import com.webank.wedatasphere.qualitis.dao.AuthListDao; -import com.webank.wedatasphere.qualitis.dao.repository.AuthListRepository; -import com.webank.wedatasphere.qualitis.entity.AuthList; +import com.webank.wedatasphere.qualitis.dao.FieldsAnalyseDao; +import com.webank.wedatasphere.qualitis.dao.repository.FieldsAnalyseRepository; +import com.webank.wedatasphere.qualitis.entity.FieldsAnalyse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; +import java.util.List; + /** - * @author howeye + * @author v_wenxuanzhang */ @Repository -public class AuthListDaoImpl implements AuthListDao { +public class FieldsAnalyseDaoImpl implements FieldsAnalyseDao { @Autowired - private AuthListRepository authListRepository; + private FieldsAnalyseRepository fieldsAnalyseRepository; @Override - public AuthList findByAppId(String appId) { - return authListRepository.findByAppId(appId); + public List< FieldsAnalyse > findByRuleIdInAndDataDateIn(List< Long > ruleIdList, List< Integer > dataDateList) { + return fieldsAnalyseRepository.findByRuleIdInAndDataDateIn(ruleIdList, dataDateList); } } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java index 9c839ee6..14d8c94b 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/impl/TaskResultDaoImpl.java @@ -110,4 +110,8 @@ public List findByApplicationId(String applicationId) { return resultRepository.findByApplicationId(applicationId); } + @Override + public List findByApplicationIdIn(List< String> applicationIdList) { + return resultRepository.findByApplicationIdIn(applicationIdList); + } } diff --git a/web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/AuthListRepository.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/FieldsAnalyseRepository.java similarity index 65% rename from web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/AuthListRepository.java rename to core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/FieldsAnalyseRepository.java index 665cdc5a..d3408101 100644 --- a/web/scheduler/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/AuthListRepository.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/FieldsAnalyseRepository.java @@ -16,20 +16,21 @@ package com.webank.wedatasphere.qualitis.dao.repository; -import com.webank.wedatasphere.qualitis.entity.AuthList; -import com.webank.wedatasphere.qualitis.entity.AuthList; +import com.webank.wedatasphere.qualitis.entity.FieldsAnalyse; import org.springframework.data.jpa.repository.JpaRepository; +import java.util.List; + /** - * @author howeye + * @author v_wenxuanzhang */ -public interface AuthListRepository extends JpaRepository { +public interface FieldsAnalyseRepository extends JpaRepository< FieldsAnalyse, Long > { /** - * Find auth list by appId - * @param appId + * Find all by application ID list + * @param ruleIdList + * @param dataDateList * @return */ - AuthList findByAppId(String appId); - + List< FieldsAnalyse > findByRuleIdInAndDataDateIn(List< Long > ruleIdList, List< Integer > dataDateList); } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java index aac41353..398fc070 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/dao/repository/TaskResultRepository.java @@ -52,7 +52,7 @@ public interface TaskResultRepository extends JpaRepository { * @param applicationId * @return */ - @Query("select avg(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (t.ruleMetricId = ?4) and t.applicationId != ?5 and t.saveResult = 1") + @Query("select avg(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (?4 IS NULL OR t.ruleMetricId = ?4) and t.applicationId != ?5 and t.saveResult = 1") Double findAvgByCreateTimeBetweenAndRuleAndMetricAndApplication(String begin, String end, Long ruleId, Long ruleMetricId, String applicationId); @@ -65,7 +65,7 @@ public interface TaskResultRepository extends JpaRepository { * @param applicationId * @return */ - @Query("select count(t.id) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (t.ruleMetricId = ?4) and t.applicationId != ?5 and t.saveResult = 1") + @Query("select count(t.id) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (?4 IS NULL OR t.ruleMetricId = ?4) and t.applicationId != ?5 and t.saveResult = 1") long countByCreateTimeBetweenAndRuleAndMetricAndApplication(String begin, String end, Long ruleId, Long ruleMetricId, String applicationId); /** @@ -76,7 +76,7 @@ public interface TaskResultRepository extends JpaRepository { * @param ruleMetricId * @return */ - @Query("select avg(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (t.ruleMetricId = ?4) and t.saveResult = 1") + @Query("select avg(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (?4 IS NULL OR t.ruleMetricId = ?4) and t.saveResult = 1") Double findAvgByCreateTimeBetweenAndRuleAndRuleMetric(String begin, String end, Long ruleId, Long ruleMetricId); /** @@ -88,7 +88,7 @@ public interface TaskResultRepository extends JpaRepository { * @param applicationId * @return */ - @Query("select count(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (t.ruleMetricId = ?4) and t.applicationId != ?5 and t.saveResult = 1") + @Query("select count(value) from TaskResult t where (t.createTime between ?1 and ?2) and t.ruleId = ?3 and (?4 IS NULL OR t.ruleMetricId = ?4) and t.applicationId != ?5 and t.saveResult = 1") long countByCreateTimeBetweenAndRuleAndRuleMetric(String start, String end, Long ruleId, Long ruleMetricId, String applicationId); /** @@ -138,7 +138,7 @@ public interface TaskResultRepository extends JpaRepository { * @param ruleMetricId * @return */ - @Query(value = "SELECT tr from TaskResult tr where tr.applicationId = ?1 and tr.ruleId = ?2 and tr.ruleMetricId = ?3") + @Query(value = "SELECT tr from TaskResult tr where tr.applicationId = ?1 and tr.ruleId = ?2 and (?3 IS NULL OR tr.ruleMetricId = ?3)") TaskResult findValue(String applicationId, Long ruleId, Long ruleMetricId); /** @@ -158,4 +158,11 @@ public interface TaskResultRepository extends JpaRepository { */ @Query(value = "SELECT tr from TaskResult tr where tr.applicationId = ?1") List findByApplicationId(String applicationId); + + /** + * Find all by application ID list + * @param applicationIdList + * @return + */ + List findByApplicationIdIn(List applicationIdList); } diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/FieldsAnalyse.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/FieldsAnalyse.java new file mode 100644 index 00000000..7c791931 --- /dev/null +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/FieldsAnalyse.java @@ -0,0 +1,180 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.entity; + +import javax.persistence.*; +import java.math.BigDecimal; +import java.util.Date; + +/** + * @author v_wenxuanzhang + */ +@Entity +@Table(name = "qualitis_imsmetric_fields_analyse") +@IdClass(FieldsAnalysePrimaryKey.class) +public class FieldsAnalyse { + + @Id + @Column(name = "rule_id") + private Long ruleId; + + @Id + @Column(name = "data_date") + private Integer dataDate; + + @Column(name = "analyse_type", columnDefinition = "TINYINT(5)") + private Integer analyseType; + + @Column(name = "datasource_type", columnDefinition = "TINYINT(5)") + private Integer datasourceType; + + @Column(name = "database_name") + private String databaseName; + + @Column(name = "table_name") + private String tableName; + + @Column(name = "field_name") + private String fieldName; + + @Column(name = "value") + private BigDecimal value; + + @Column(name = "partition_attrs") + private String partitionAttrs; + + @Column(name = "create_time") + private Date createTime; + + @Column(name = "update_time") + private Date updateTime; + + @Column(name = "datasource_user") + private String datasourceUser; + + @Column(name = "remark") + private String remark; + + + public FieldsAnalyse() { + // Default Constructor + } + + public Long getRuleId() { + return ruleId; + } + + public void setRuleId(Long ruleId) { + this.ruleId = ruleId; + } + + public Integer getDataDate() { + return dataDate; + } + + public void setDataDate(Integer dataDate) { + this.dataDate = dataDate; + } + + public Integer getAnalyseType() { + return analyseType; + } + + public void setAnalyseType(Integer analyseType) { + this.analyseType = analyseType; + } + + public Integer getDatasourceType() { + return datasourceType; + } + + public void setDatasourceType(Integer datasourceType) { + this.datasourceType = datasourceType; + } + + public String getDatabaseName() { + return databaseName; + } + + public void setDatabaseName(String databaseName) { + this.databaseName = databaseName; + } + + public String getTableName() { + return tableName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + + public String getFieldName() { + return fieldName; + } + + public void setFieldName(String fieldName) { + this.fieldName = fieldName; + } + + public BigDecimal getValue() { + return value; + } + + public void setValue(BigDecimal value) { + this.value = value; + } + + public String getPartitionAttrs() { + return partitionAttrs; + } + + public void setPartitionAttrs(String partitionAttrs) { + this.partitionAttrs = partitionAttrs; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getDatasourceUser() { + return datasourceUser; + } + + public void setDatasourceUser(String datasourceUser) { + this.datasourceUser = datasourceUser; + } + + public String getRemark() { + return remark; + } + + public void setRemark(String remark) { + this.remark = remark; + } +} diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/FieldsAnalysePrimaryKey.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/FieldsAnalysePrimaryKey.java new file mode 100644 index 00000000..fd0b221d --- /dev/null +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/FieldsAnalysePrimaryKey.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.qualitis.entity; + +import java.io.Serializable; +import java.util.Objects; + +/** + * @author v_wenxuanzhang + */ +public class FieldsAnalysePrimaryKey implements Serializable { + private Long ruleId; + private Integer dataDate; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FieldsAnalysePrimaryKey that = (FieldsAnalysePrimaryKey) o; + return ruleId.equals(that.ruleId) && + dataDate.equals(that.dataDate); + } + + @Override + public int hashCode() { + return Objects.hash(ruleId, dataDate); + } + + public Long getMetricId() { + return ruleId; + } + + public void setMetricId(Long metricId) { + this.ruleId = metricId; + } + + public Integer getDs() { + return dataDate; + } + + public void setDs(Integer ds) { + this.dataDate = ds; + } +} diff --git a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java index 1a3d15a3..db313d93 100644 --- a/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java +++ b/core/analysis/src/main/java/com/webank/wedatasphere/qualitis/entity/TaskResult.java @@ -33,6 +33,8 @@ public class TaskResult { private String applicationId; @Column(name = "rule_id") private Long ruleId; + @Column(name = "task_id") + private Long taskId; private String value; @Column(name = "result_type") private String resultType; @@ -85,6 +87,14 @@ public void setRuleId(Long ruleId) { this.ruleId = ruleId; } + public Long getTaskId() { + return taskId; + } + + public void setTaskId(Long taskId) { + this.taskId = taskId; + } + public String getValue() { return value; } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/QualitisConstants.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/QualitisConstants.java index 01f6b4e1..6a218b92 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/QualitisConstants.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/QualitisConstants.java @@ -1,20 +1,15 @@ package com.webank.wedatasphere.qualitis.constants; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; import org.apache.commons.lang3.time.FastDateFormat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.net.HttpURLConnection; +import javax.servlet.http.HttpServletRequest; import java.net.InetAddress; -import java.net.URL; import java.net.UnknownHostException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.regex.Matcher; import java.util.regex.Pattern; /** @@ -25,6 +20,9 @@ public class QualitisConstants { private static final Logger LOGGER = LoggerFactory.getLogger(QualitisConstants.class); + public static final String LOCAL_IP = "127.0.0.1"; + public static final String UNKNOWN = "unknown"; + /** * 导入导出内置变量 */ @@ -58,6 +56,8 @@ public class QualitisConstants { * Ordinary num */ public static final int LENGTH_TWO = 2; + public static final int LENGTH_THREE = 3; + public static final int LENGTH_FOUR = 4; /** * Group execution num @@ -92,7 +92,7 @@ public class QualitisConstants { /** * Subsystem ID */ - public static final Integer SUB_SYSTEM_ID = 5375; + public static final String SUB_SYSTEM_ID = "5375"; /** * Key of compared value @@ -115,7 +115,22 @@ public class QualitisConstants { */ public static final String UNION_ALL = "All"; public static final Pattern DATA_SOURCE_ID = Pattern.compile("\\.\\(ID=[0-9]+\\{[0-9,]+\\}\\)"); - public static final Pattern DATA_SOURCE_NAME = Pattern.compile("\\.\\(NAME=[\\u4E00-\\u9FA5A-Za-z0-9_]+\\{[\\u4E00-\\u9FA5A-Za-z0-9_,]+\\}\\)"); + public static final Pattern DATA_SOURCE_NAME = Pattern.compile("\\.\\(NAME=[\\u4E00-\\u9FA5A-Za-z0-9_]+\\{*[\\u4E00-\\u9FA5A-Za-z0-9_,.()-]*\\}*\\)"); + /** + * example: NAME=linkis_datasource{db_env_name} or ID=201{db_env_name} + */ + public static final Pattern DATASOURCE_NAME_ENV_REGEX = Pattern.compile("NAME=[\\u4E00-\\u9FA5A-Za-z0-9_]+\\{*[\\u4E00-\\u9FA5A-Za-z0-9_,.()-]*\\}*"); + public static final Pattern DATASOURCE_ID_ENV_REGEX = Pattern.compile("ID=[0-9]+\\{[0-9,]+\\}"); + + /** + * The regex of variable name, when the users enter: ds_name[env1,env2] + */ + public static final Pattern DATASOURCE_DIFF_VARIABLE_PATTERN = Pattern.compile("[\\u4E00-\\u9FA5A-Za-z0-9_]+\\[[\\u4E00-\\u9FA5A-Za-z0-9_,.()-]+\\]"); + + /** + * To match value of number type + */ + public static final String NUMBER_REGEX = "^\\d+$"; /** * Date format @@ -137,22 +152,32 @@ public class QualitisConstants { public static final Integer EXECUTION_COMPLETED = 3; /** - * qualitis_template_default_input_meta, 0.23.0版本单表ID + * 指标采集项目名称 */ - public static final List SINGLE_TABLE = Arrays.asList(17, 18, 19, 20, 21, 22, 23, 33); + public static final String IMSMETRIC_PROJECT = "ims_omnis_prophet"; + /** - * qualitis_template_default_input_meta, 0.23.0版本跨表ID + * qualitis_template_default_input_meta, 0.23.0版本单表en_name */ - public static final List CROSS_TABLE = Arrays.asList(17, 18, 20, 24, 25, 26, 27, 28, 29, 30, 31, 32); + public static final List SINGLE_TABLE = Arrays.asList("database","table","fields","filter","enumerated_list range","numerical_range","express","standard_value_expression"); /** - * qualitis_template_default_input_meta, 0.23.0版本文件ID + * qualitis_template_default_input_meta, 0.23.0版本跨表en_name */ - public static final List FILE_TABLE = Arrays.asList(17, 18, 20); + + public static final List CROSS_TABLE = Arrays.asList("database","table","filter","left_database","left_table","right_database","right_table","left_filter","right_filter","join_express","compare_express","result_filter","left_collect_sql","right_collect_sql"); + /** + * qualitis_template_default_input_meta, 0.23.0版本文件en_name + */ + + public static final List FILE_TABLE = Arrays.asList("database","table","filter"); /** * 剔除旧数据的占位符与0.23.0版本input_type不匹配的情况 */ public static final List ELIMINATE_PLACEHOLDER = Arrays.asList(1, 7, 10, 20, 21, 22, 23, 25, 36, 37, 38); + /** + * 和input_type比较 + */ public static final List OVER_TABLE_TYPE = Arrays.asList("11", "12", "13", "14", "30", "31"); public static final String ROW_DATA_CONSISTENCY_VERIFICATION="行数据一致性校验"; @@ -204,7 +229,7 @@ public class QualitisConstants { public static final String FPS_DEFAULT_USER = "hadoop"; /** - * @Description:获取客户端内网ip + * Host */ public static String QUALITIS_SERVER_HOST; @@ -216,53 +241,6 @@ public class QualitisConstants { } } - /** - * @Description:获取客户端外网ip 此方法要接入互联网才行,内网不行 - **/ - public static String getPublicIp() { - try { - // 要获得html页面内容的地址 - String path = "http://www.net.cn/static/customercare/yourip.asp"; - // 创建url对象 - URL url = new URL(path); - // 打开连接 - HttpURLConnection conn = (HttpURLConnection) url.openConnection(); - // 设置url中文参数编码 - conn.setRequestProperty("contentType", "GBK"); - // 请求的时间 - conn.setConnectTimeout(5 * 1000); - // 请求方式 - conn.setRequestMethod("GET"); - InputStream inStream = conn.getInputStream(); - BufferedReader in = new BufferedReader(new InputStreamReader( - inStream, "GBK")); - StringBuffer buffer = new StringBuffer(); - String line = ""; - // 读取获取到内容的最后一行,写入 - while ((line = in.readLine()) != null) { - buffer.append(line); - } - List ips = new ArrayList(); - - //用正则表达式提取String字符串中的IP地址 - String regEx = "((2[0-4]\\d|25[0-5]|[01]?\\d\\d?)\\.){3}(2[0-4]\\d|25[0-5]|[01]?\\d\\d?)"; - String str = buffer.toString(); - Pattern p = Pattern.compile(regEx); - Matcher m = p.matcher(str); - while (m.find()) { - String result = m.group(); - ips.add(result); - } - String PublicIp = ips.get(0); - - // 返回公网IP值 - return PublicIp; - } catch (Exception e) { - LOGGER.error("获取公网IP连接超时"); - return ""; - } - } - public static final String DEFAULT_NODE_NAME = "qualitis_0000"; public static final String CHECKALERT_NODE_NAME_PREFIX = "checkalert"; @@ -271,8 +249,8 @@ public static String getPublicIp() { */ public static final String RULE_GROUP_FILTER_PLACEHOLDER = "${table_value_filter}"; - public static final Long EXPECT_LINES_NOT_REPEAT_ID = 2149L; - public static final Long EXPECT_DATA_NOT_REPEAT_ID = 4000L; + public static final String EXPECT_LINES_NOT_REPEAT_EN_NAME = "Primary Line Verification"; + public static final String EXPECT_DATA_NOT_REPEAT_EN_NAME = "Repeat data check"; /** * Execution param variables @@ -288,7 +266,7 @@ public static String getPublicIp() { public static final String QUALITIS_STARTUP_PARAM = "qualitis_startup_param"; public static final String QUALITIS_ENGINE_TYPE = "qualitis.linkis.engineType"; public static final String QUALITIS_MID_TABLE_REUSE = "mid_table_reuse"; - public static final String QUALITIS_UNION_ALL_SAVE = "union_all_save"; + public static final String QUALITIS_UNION_WAY = "union_way"; /** @@ -319,4 +297,83 @@ public static String getPublicIp() { public static final String AUTH_TYPE_ACCOUNT_PWD = "accountPwd"; public static final String AUTH_TYPE_DPM = "dpm"; -} + public static final String DEFAULT_AUTH_APP_ID = "linkis_id"; + + public static final String BDP_CLIENT_TOKEN = "bdp_client_token"; + + /** + * Spark 引擎复用上限 + */ + public static final String SPARK_ENGINE_REUSE_LIMIT = "wds.linkis.engineconn.max.task.execute.num"; + + /** + * Multitemplate en name + */ + public static final String MULTI_SOURCE_ACCURACY_TEMPLATE_NAME = "Field consistency check"; + public static final String MULTI_SOURCE_FULL_TEMPLATE_NAME = "Row data consistency check"; + public static final String MULTI_CLUSTER_CUSTOM_TEMPLATE_NAME = "Multi cluster custom field consistency check"; + public static final String SINGLE_CLUSTER_CUSTOM_TEMPLATE_NAME = "Single cluster custom field consistency check"; + public static final String MULTI_SOURCE_ACROSS_TEMPLATE_NAME = "Multi table rows consistensy"; + public static final String SINGLE_SOURCE_ACROSS_TEMPLATE_NAME = "Single table rows consistensy"; + public static final String CROSS_CLUSTER_TABLE_TEMPLATE_NAME = "Cross cluster table structure consistency"; + public static final String SINGLE_CLUSTER_TABLE_TEMPLATE_NAME = "Single cluster table structure consistency"; + + + public static final int ACTUAL_ENV_NAME_LENGTH = 100; + + public static boolean isAcrossCluster(String templateEnName) { + return MULTI_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(templateEnName) || MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(templateEnName); + } + + public static boolean isCustomColumnConsistence(String templateEnName) { + return MULTI_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(templateEnName) || SINGLE_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(templateEnName); + } + public static boolean isTableRowsConsistency(String templateEnName) { + return MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(templateEnName) || SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(templateEnName); + } + + public static boolean isTableStructureConsistent(String templateEnName) { + return CROSS_CLUSTER_TABLE_TEMPLATE_NAME.equals(templateEnName) || SINGLE_CLUSTER_TABLE_TEMPLATE_NAME.equals(templateEnName); + } + + public static boolean isRepeatDataCheck(String templateEnName) { + return EXPECT_LINES_NOT_REPEAT_EN_NAME.equals(templateEnName) || EXPECT_DATA_NOT_REPEAT_EN_NAME.equals(templateEnName); + } + + /** + * Null table size + */ + public static final String NULL_TABLE_SIZE = "0B"; + + public static final String CMDB_KEY_DCN_NUM = "dcn_num"; + public static final String CMDB_KEY_LOGIC_AREA = "logic_area"; + + public static final String DCN_RANGE_TYPE_ALL = "all"; + + /** + * 获取ip地址 + */ + public static String getIp(HttpServletRequest request) { + String ip = request.getHeader("x-forwarded-for"); + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("Proxy-Client-IP"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getHeader("WL-Proxy-Client-IP"); + } + if (ip == null || ip.length() == 0 || UNKNOWN.equalsIgnoreCase(ip)) { + ip = request.getRemoteAddr(); + } + if (ip.contains(SpecCharEnum.COMMA.getValue())) { + ip = ip.split(SpecCharEnum.COMMA.getValue())[0]; + } + if (LOCAL_IP.equals(ip)) { + try { + ip = InetAddress.getLocalHost().getHostAddress(); + } catch (UnknownHostException e) { + LOGGER.error("Failed to get host info."); + } + } + return ip; + } +} \ No newline at end of file diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/ResponseStatusConstants.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/ResponseStatusConstants.java index 9b1168a6..1fd9e3dd 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/ResponseStatusConstants.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/constants/ResponseStatusConstants.java @@ -11,6 +11,8 @@ public class ResponseStatusConstants { * 统一的接口响应状态码 */ public static final String OK = "200"; + public static final String BAD_REQUEST = "400"; public static final String SERVER_ERROR = "500"; + public static final String REQUEST_FORBIDDEN = "401"; } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/LinkisMetaDataManager.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/LinkisMetaDataManager.java index 8b893fe3..33a9b61b 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/LinkisMetaDataManager.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/LinkisMetaDataManager.java @@ -6,8 +6,10 @@ import com.webank.wedatasphere.qualitis.metadata.request.LinkisDataSourceRequest; import com.webank.wedatasphere.qualitis.metadata.request.ModifyDataSourceParameterRequest; import com.webank.wedatasphere.qualitis.metadata.response.datasource.LinkisDataSourceParamsResponse; +import com.webank.wedatasphere.qualitis.response.GeneralResponse; import java.util.List; +import java.util.Map; /** * @author v_minminghe@webank.com @@ -52,7 +54,7 @@ public interface LinkisMetaDataManager { * @throws UnExpectedRequestException * @throws MetaDataAcquireFailedException */ - List createDataSourceEnv(Integer inputType, Integer verifyType, List linkisDataSourceEnvRequestList, String clusterName, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException; + List createDataSourceEnvAndSetEnvId(Integer inputType, Integer verifyType, List linkisDataSourceEnvRequestList, String clusterName, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException; /** * modify Data Source Env @@ -91,4 +93,21 @@ public interface LinkisMetaDataManager { */ void deleteDataSource(Long linkisDataSourceId, String clusterName, String userName) throws UnExpectedRequestException, MetaDataAcquireFailedException; + + /** + * Getting mapping relation between name and id of dataSourceType + * @return key: name of dataSourceType, value: id of dataSourceType + */ + Map getDataSourceTypeNameAndIdMap(); + + /** + * connect + * + * @param linkisDataSourceId + * @param versionId + * @return + * @throws Exception + * @throws MetaDataAcquireFailedException + */ + GeneralResponse connect(Long linkisDataSourceId, Long versionId) throws Exception; } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/MetaDataClient.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/MetaDataClient.java index cb191ae3..9df9135d 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/MetaDataClient.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/MetaDataClient.java @@ -394,6 +394,17 @@ GeneralResponse> getDataSourceConnectParams(String clusterNa */ GeneralResponse> deleteDataSource(String clusterName, String userName, Long dataSourceId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + /** + * delete data source + * @param clusterName + * @param userName + * @param envId + * @return + * @throws UnExpectedRequestException + * @throws MetaDataAcquireFailedException + */ + GeneralResponse> deleteEnv(String clusterName, String userName, Long envId) throws UnExpectedRequestException, MetaDataAcquireFailedException; + /** * Get db by data source. * @param clusterName @@ -465,6 +476,17 @@ GeneralResponse> getDataSourceConnectParams(String clusterNa */ LinkisDataSourceInfoDetail getDataSourceInfoById(String clusterName, String userName, Long dataSourceId) throws Exception; + /** + * Get data source name by id + * @param clusterName + * @param userName + * @param dataSourceId + * @param versionId + * @return + * @throws Exception + */ + LinkisDataSourceInfoDetail getDataSourceInfoById(String clusterName, String userName, Long dataSourceId, Long versionId) throws Exception; + /** * Add udf * @param currentCluster @@ -645,16 +667,4 @@ String getUdfNewVersion(String currentCluster, String linkisUdfAdminUser, String */ void deployUdfNewVersion(String currentCluster, String linkisUdfAdminUser, Long udfId, String version) throws UnExpectedRequestException, IOException, JSONException, MetaDataAcquireFailedException; - - /** - * delete data source - * @param clusterName - * @param userName - * @param envId - * @return - * @throws UnExpectedRequestException - * @throws MetaDataAcquireFailedException - */ - GeneralResponse> deleteEnv(String clusterName, String userName, Long envId) throws UnExpectedRequestException, MetaDataAcquireFailedException; - } \ No newline at end of file diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/OperateCiService.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/OperateCiService.java index 180ceef7..4575dbc5 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/OperateCiService.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/OperateCiService.java @@ -1,12 +1,10 @@ package com.webank.wedatasphere.qualitis.metadata.client; import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; -import com.webank.wedatasphere.qualitis.metadata.response.DcnResponse; import com.webank.wedatasphere.qualitis.metadata.response.CmdbDepartmentResponse; import com.webank.wedatasphere.qualitis.metadata.response.DepartmentSubResponse; import com.webank.wedatasphere.qualitis.metadata.response.ProductResponse; import com.webank.wedatasphere.qualitis.metadata.response.SubSystemResponse; - import com.webank.wedatasphere.qualitis.response.GeneralResponse; import java.util.List; @@ -24,6 +22,14 @@ public interface OperateCiService { */ List getAllSubSystemInfo() throws UnExpectedRequestException; + /** + * Get the specific sub-system by its name + * @param subSystemName + * @return + * @throws UnExpectedRequestException + */ + String getSubSystemIdByName(String subSystemName) throws UnExpectedRequestException; + /** * Get all product info from http of cmdb, incloud: id,cn name. * @@ -52,8 +58,10 @@ public interface OperateCiService { /** * Get dcn * @param subSystemId + * @param dcnRangeType + * @param dcnRangeValues * @return * @throws UnExpectedRequestException */ - GeneralResponse getDcn(Long subSystemId) throws UnExpectedRequestException; + GeneralResponse getDcn(String subSystemId, String dcnRangeType, List dcnRangeValues) throws UnExpectedRequestException; } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/RuleClient.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/RuleClient.java index 610c6a02..9d883877 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/RuleClient.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/client/RuleClient.java @@ -47,6 +47,6 @@ public interface RuleClient { * @return * @throws MetaDataAcquireFailedException */ - DataInfo getTagList(String loginUser, int page, int size) throws MetaDataAcquireFailedException, UnExpectedRequestException; + DataInfo getTagList(String loginUser, int page, int size) throws MetaDataAcquireFailedException; } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java index 1160ac2a..e2d71446 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/constant/DataMapResponseKeyEnum.java @@ -24,7 +24,4 @@ public String getKey() { return key; } - public void setKey(String key) { - this.key = key; - } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java index 8a9e20d2..0e8cd064 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserColumnByCsRequest.java @@ -93,4 +93,16 @@ public String getLoginUser() { public void setLoginUser(String loginUser) { this.loginUser = loginUser; } + + @Override + public String toString() { + return "GetUserColumnByCsRequest{" + + "startIndex=" + startIndex + + ", pageSize=" + pageSize + + ", csId='" + csId + '\'' + + ", contextKey='" + contextKey + '\'' + + ", clusterName='" + clusterName + '\'' + + ", loginUser='" + loginUser + '\'' + + '}'; + } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java index f8a77a04..ab203724 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/GetUserTableByCsIdRequest.java @@ -70,4 +70,16 @@ public String getLoginUser() { public void setLoginUser(String loginUser) { this.loginUser = loginUser; } + + @Override + public String toString() { + return "GetUserTableByCsIdRequest{" + + "csId='" + csId + '\'' + + ", nodeName='" + nodeName + '\'' + + ", clusterName='" + clusterName + '\'' + + ", startIndex=" + startIndex + + ", pageSize=" + pageSize + + ", loginUser='" + loginUser + '\'' + + '}'; + } } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisConnectParamsRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisConnectParamsRequest.java index d41f2e42..d9719e85 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisConnectParamsRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisConnectParamsRequest.java @@ -17,10 +17,15 @@ public class LinkisConnectParamsRequest { /** * 连接参数 */ + @JsonProperty("connect_param") private String connectParam; + @JsonProperty("app_id") private String appId; + @JsonProperty("auth_type") private String authType; + @JsonProperty("object_id") private String objectId; + @JsonProperty("mk_private") private String mkPrivate; private String dk; @JsonProperty(value = "timestamp") diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceEnvRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceEnvRequest.java index bf203330..a1697908 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceEnvRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceEnvRequest.java @@ -16,17 +16,35 @@ public class LinkisDataSourceEnvRequest { private String envName; private String envDesc; private Long dataSourceTypeId; - private String database; + private String databaseInstance; + private String dcnNum; + private String logicArea; private Map connectParams = new HashMap<>(); @JsonIgnore private LinkisConnectParamsRequest connectParamsRequest; - public String getDatabase() { - return database; + public String getDcnNum() { + return dcnNum; } - public void setDatabase(String database) { - this.database = database; + public void setDcnNum(String dcnNum) { + this.dcnNum = dcnNum; + } + + public String getLogicArea() { + return logicArea; + } + + public void setLogicArea(String logicArea) { + this.logicArea = logicArea; + } + + public String getDatabaseInstance() { + return databaseInstance; + } + + public void setDatabaseInstance(String databaseInstance) { + this.databaseInstance = databaseInstance; } public Long getDataSourceTypeId() { diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceRequest.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceRequest.java index 9bccef2c..76e72e6a 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceRequest.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/request/LinkisDataSourceRequest.java @@ -3,6 +3,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.HashMap; +import java.util.List; import java.util.Map; /** @@ -28,6 +29,26 @@ public class LinkisDataSourceRequest { private Map connectParams = new HashMap<>(); @JsonIgnore private LinkisConnectParamsRequest sharedConnectParams; + @JsonIgnore + private String dcnRangeType; + @JsonIgnore + private List dataSourceEnvs; + + public List getDataSourceEnvs() { + return dataSourceEnvs; + } + + public void setDataSourceEnvs(List dataSourceEnvs) { + this.dataSourceEnvs = dataSourceEnvs; + } + + public String getDcnRangeType() { + return dcnRangeType; + } + + public void setDcnRangeType(String dcnRangeType) { + this.dcnRangeType = dcnRangeType; + } public String getSubSystem() { return subSystem; diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/SubSystemResponse.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/SubSystemResponse.java index 9b6609da..e490d68e 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/SubSystemResponse.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/metadata/response/SubSystemResponse.java @@ -7,7 +7,7 @@ * @date 2021/3/2 10:54 */ public class SubSystemResponse { - private Integer subSystemId; + private String subSystemId; private String subSystemName; private String subSystemFullCnName; @@ -19,11 +19,11 @@ public class SubSystemResponse { @JsonProperty("ops_department_name") private String opsDepartmentName; - public Integer getSubSystemId() { + public String getSubSystemId() { return subSystemId; } - public void setSubSystemId(Integer subSystemId) { + public void setSubSystemId(String subSystemId) { this.subSystemId = subSystemId; } diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/RetResponse.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/RetResponse.java new file mode 100644 index 00000000..0d5f5fcb --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/response/RetResponse.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.qualitis.response; + +import java.io.Serializable; + +/** + * @author v_minminghe@webank.com + * @date 2024-03-22 14:12 + * @description + */ +public class RetResponse implements Serializable { + + private int retCode; + private String retDetail; + private T data; + + public RetResponse(int retCode, String retDetail, T data) { + this.retCode = retCode; + this.retDetail = retDetail; + this.data = data; + } + + public RetResponse(T data) { + this.retCode = 0; + this.retDetail = "success"; + this.data = data; + } + + public int getRetCode() { + return retCode; + } + + public void setRetCode(int retCode) { + this.retCode = retCode; + } + + public String getRetDetail() { + return retDetail; + } + + public void setRetDetail(String retDetail) { + this.retDetail = retDetail; + } + + public T getData() { + return data; + } + + public void setData(T data) { + this.data = data; + } +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/HtmlTableGeneratorUtils.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/HtmlTableGeneratorUtils.java new file mode 100644 index 00000000..a2fc39d9 --- /dev/null +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/HtmlTableGeneratorUtils.java @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.qualitis.util; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +public class HtmlTableGeneratorUtils { + + private int rows; + private int columns; + private List headers; + private List> data; + + public HtmlTableGeneratorUtils(int rows, int columns, List headers, List> data) { + this.rows = rows; + this.columns = columns; + this.headers = headers; + this.data = data; + } + + public String generateTable() { + StringBuilder table = new StringBuilder(); + // 添加table标签 + table.append("\n"); + + // 生成表头 + table.append("\n"); + for (int i = 1; i <= columns; i++) { + table.append("\n"); + } + table.append("\n"); + + // 生成表格数据 + for (List row : data) { + table.append("\n"); + for (String cellData : row) { + table.append("\n"); + } + table.append("\n"); + } + + table.append("
" + headers.get(i - 1) + "
" + cellData + "
"); + + return table.toString(); + } + +} diff --git a/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/map/CustomObjectMapper.java b/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/map/CustomObjectMapper.java index 43a7ae09..314937ff 100644 --- a/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/map/CustomObjectMapper.java +++ b/core/common/src/main/java/com/webank/wedatasphere/qualitis/util/map/CustomObjectMapper.java @@ -6,13 +6,7 @@ import com.fasterxml.jackson.databind.type.MapType; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; /** @@ -514,7 +508,7 @@ public static List transValues(Object values, Class clazz) { return Collections.emptyList(); } - Collection temps = new ArrayList(); + Collection temps = new ArrayList<>(); if (Collection.class.isAssignableFrom(values.getClass())) { temps.addAll((Collection) values); } else if (values.getClass().isArray()) { @@ -533,7 +527,7 @@ public static List transValues(Object values, Class clazz) { public static Map transMapValues(Map map, Class valueClazz) { if (map == null) { - return null; + return Collections.emptyMap(); } Map newMap = new LinkedHashMap(); diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/EngineTypeEnum.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/EngineTypeEnum.java index 2b26d59e..82e8a218 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/EngineTypeEnum.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/EngineTypeEnum.java @@ -10,7 +10,8 @@ public enum EngineTypeEnum { * 2 SPARK ENGINE */ DEFAULT_ENGINE(1, "shell"), - SPARK_ENGINE(2, "spark"); + SPARK_ENGINE(2, "spark"), + TRINO_ENGINE(3, "trino"); private Integer code; private String message; diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java index 826d8f2b..142298a9 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityJob.java @@ -30,6 +30,7 @@ public class DataQualityJob { private Boolean engineReuse; private String engineType; private Integer resultNum; + private Integer index; public DataQualityJob() { // Initial @@ -93,6 +94,14 @@ public void setResultNum(Integer resultNum) { this.resultNum = resultNum; } + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + @Override public String toString() { return "DataQualityJob{" + diff --git a/core/scheduler/src/main/java/com/webank/wedatasphere/qualitis/config/SpecialProjectRuleConfig.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/SpecialProjectRuleConfig.java similarity index 100% rename from core/scheduler/src/main/java/com/webank/wedatasphere/qualitis/config/SpecialProjectRuleConfig.java rename to core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/SpecialProjectRuleConfig.java diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java index 89691917..353b7eae 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/config/TaskDataSourceConfig.java @@ -14,8 +14,6 @@ public class TaskDataSourceConfig { private String username; @Value("${task.persistent.password}") private String password; - @Value("${task.persistent.address}") - private String mysqlAddress; @Value("${task.persistent.mysqlsec_open}") private Boolean mysqlsecOpen; @Value("${task.persistent.mysqlsec}") diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/MetricTypeEnum.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/MetricTypeEnum.java new file mode 100644 index 00000000..ca6a0c32 --- /dev/null +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/MetricTypeEnum.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.qualitis.constant; + + +public enum MetricTypeEnum { + /** + * metric type + */ + TABLE_STATISTICS(1, "表统计"), + ENUM_STATISTICS(2, "枚举统计"), + COLLECT_STATISTICS(3, "聚合统计"), + ORIGIN_STATISTICS(4, "原值统计"); + + private Integer code; + private String message; + + MetricTypeEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } +} diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/ScalaCodeConstant.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/ScalaCodeConstant.java new file mode 100644 index 00000000..deb07855 --- /dev/null +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/ScalaCodeConstant.java @@ -0,0 +1,148 @@ +package com.webank.wedatasphere.qualitis.constant; + +import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSource; +import java.text.MessageFormat; + +/** + * @author v_wenxuanzhang + */ +public class ScalaCodeConstant { + + public static final String BOTTOM_BAR = "_"; + public static final String VAL_STRING = "val %s = \"%s\""; + public static final String VAL_NUMBER = "val %s = %s"; + public static final String VAR_STRING = "var %s = \"%s\""; + public static final String VAR_NUMBER = "var %s = %s"; + public static final String NUMBER = "%s = %s"; + public static final String STRING = "%s = \"%s\""; + public static final String ERROR_MSG = "{0} = \"database_name=%s,table_name=%s,attr_name=%s,rowkey_name=%s,rowvalueenum_name=%s,calc_type=%s\".format({1},{2},{3},{4},{5},{6})"; + public static final String EXCEPTION_COLLECT = "case e: %s => %s = (%s + \",%s:\" + e.getMessage) +: %s"; + + public static final String QUERY_IDENTIFY_COMMON_SQL = "select metric_id from qualitis_imsmetric_identify where datasource_type = %s and database_name = '%s' " + + "and table_name = '%s' and attr_name = '\" + %s + \"' "; + public static final String QUERY_IDENTIFY_ENUM_SQL_1 = "select metric_id from qualitis_imsmetric_identify where datasource_type = %s and database_name = '%s' " + + "and table_name = '%s' and attr_name = '\" + %s + \"' and groupbyattr_names = '\" + %s + \"' "; + public static final String QUERY_IDENTIFY_BY_ENUM_SQL_2 = " and rowvalueenum_name = '%s' and calc_type = %s "; + public static final String QUERY_IDENTIFY_BY_TOTAL_SQL = " and groupbyattr_names = '%s' and rowkey_name = '%s' and calc_type = %s "; + public static final String QUERY_IDENTIFY_BY_ORIGIN_SQL = " and rowkey_name = '%s' "; + public static final String QUERY_IDENTIFY_BY_TABLE_SQL = " and calc_type = %s "; + + public static final String INSERT_IDENTIFY_TABLE_SQL = "insert into qualitis_imsmetric_identify(metric_type,datasource_type,database_name,table_name,attr_name," + + "create_time,update_time,datasource_user,ageing,partition_attrs,calc_type) values " + + "(%s,%s,'%s','%s','\" + %s + \"','\" + %s + \"','\" + %s + \"','%s','\" + %s + \"','\" + %s + \"'"; + public static final String INSERT_IDENTIFY_BY_ENUM_SQL = "insert into qualitis_imsmetric_identify(metric_type,datasource_type,database_name,table_name,attr_name,groupbyattr_names," + + "create_time,update_time,datasource_user,ageing,partition_attrs,calc_type,rowvalueenum_name) values " + + "(%s,%s,'%s','%s','\" + %s + \"','\"+ %s + \"','\" + %s + \"','\" + %s + \"','%s','\" + %s + \"','\" + %s + \"'"; + public static final String INSERT_IDENTIFY_BY_ORIGIN_SQL = "insert into qualitis_imsmetric_identify(metric_type,datasource_type,database_name,table_name,attr_name,calc_type," + + "create_time,update_time,datasource_user,ageing,partition_attrs,rowkey_name) values " + + "(%s,%s,'%s','%s','\" + %s + \"',7,'\" + %s + \"','\" + %s + \"','%s','\" + %s + \"','\" + %s + \"'"; + + public static final String INSERT_DATA_COMMON_SQL = "INSERT INTO qualitis_imsmetric_data (metric_id, metric_value, create_time,update_time,data_time,data_date,datasource_user,datasource_type) VALUES %s " + + "ON DUPLICATE KEY UPDATE metric_value = values(metric_value) , update_time = values(update_time)"; + public static final String INSERT_DATA_SQL_1 = "'\" + %s + \"', '\" + %s + \"', '\" + %s + \"'"; + public static final String INSERT_DATA_SQL_2 = " '%s' , %s \" + \") "; + public static final String NOT_SUPPORTED_METRIC_TYPE = "不支持的指标采集方式"; + + + public static String stateValString(String fieldName, String fieldValue) { + return format(VAL_STRING, fieldName, fieldValue); + } + + public static String stateValNumber(String fieldName, Object fieldValue) { + return format(VAL_NUMBER, fieldName, fieldValue); + } + + public static String stateVarString(String fieldName, String fieldValue) { + return format(VAR_STRING, fieldName, fieldValue); + } + + public static String stateVarNumber(String fieldName, Object fieldValue) { + return format(VAR_NUMBER, fieldName, fieldValue); + } + + public static String stateString(String fieldName, Object fieldValue) { + return format(STRING, fieldName, fieldValue); + } + + public static String stateNumber(String fieldName, Object fieldValue) { + return format(NUMBER, fieldName, fieldValue); + } + + public static String jointVariableName(String prefix, String suffixes) { + return prefix + BOTTOM_BAR + suffixes; + } + + public static String errorMsg(String fieldName, String databaseName, String tableName, String attrName, String rowkeyName, String rowvalueenumName, String calcType) { + return MessageFormat.format(ERROR_MSG, fieldName, databaseName, tableName, attrName, rowkeyName, rowvalueenumName, calcType); + } + + public static String getQueryIdentifySql(int metricTypeInt, RuleDataSource dataSource, String attrName) { + if (MetricTypeEnum.TABLE_STATISTICS.getCode() == metricTypeInt) { + return format(QUERY_IDENTIFY_COMMON_SQL, dataSource.getDatasourceType(), dataSource.getDbName(), + dataSource.getTableName(), attrName) + QUERY_IDENTIFY_BY_TABLE_SQL; + + } else if (MetricTypeEnum.ENUM_STATISTICS.getCode() == metricTypeInt) { + return format(QUERY_IDENTIFY_ENUM_SQL_1, dataSource.getDatasourceType(), dataSource.getDbName(), + dataSource.getTableName(), attrName, attrName) + QUERY_IDENTIFY_BY_ENUM_SQL_2; + + } else if (MetricTypeEnum.ORIGIN_STATISTICS.getCode() == metricTypeInt) { + return format(QUERY_IDENTIFY_COMMON_SQL, dataSource.getDatasourceType(), dataSource.getDbName(), + dataSource.getTableName(), attrName) + QUERY_IDENTIFY_BY_ORIGIN_SQL; + + } else if (MetricTypeEnum.COLLECT_STATISTICS.getCode() == metricTypeInt) { + return format(QUERY_IDENTIFY_COMMON_SQL + QUERY_IDENTIFY_BY_TOTAL_SQL, dataSource.getDatasourceType(), + dataSource.getDbName(), dataSource.getTableName(), attrName); + } else { + throw new RuntimeException(NOT_SUPPORTED_METRIC_TYPE); + } + } + + public static String getInsertIdentifySql(int metricTypeInt, RuleDataSource dataSource, String attrName, + String nowTime, String ageing, String partitionAttrs) { + + if (MetricTypeEnum.TABLE_STATISTICS.getCode() == metricTypeInt) { + return format(INSERT_IDENTIFY_TABLE_SQL, metricTypeInt, dataSource.getDatasourceType(), dataSource.getDbName(), + dataSource.getTableName(), attrName, nowTime, nowTime, dataSource.getProxyUser(), ageing, partitionAttrs) + ",%s)"; + + } else if (MetricTypeEnum.ENUM_STATISTICS.getCode() == metricTypeInt) { + return format(INSERT_IDENTIFY_BY_ENUM_SQL, metricTypeInt, dataSource.getDatasourceType(), + dataSource.getDbName(), dataSource.getTableName(), attrName, attrName, nowTime, nowTime, dataSource.getProxyUser(), ageing, partitionAttrs) + ",%s,'%s')"; + + } else if (MetricTypeEnum.ORIGIN_STATISTICS.getCode() == metricTypeInt) { + return format(INSERT_IDENTIFY_BY_ORIGIN_SQL, metricTypeInt, dataSource.getDatasourceType(), + dataSource.getDbName(), dataSource.getTableName(), attrName, nowTime, nowTime, dataSource.getProxyUser(), ageing, partitionAttrs) + ",'%s')"; + + } else if (MetricTypeEnum.COLLECT_STATISTICS.getCode() == metricTypeInt) { + return format(QUERY_IDENTIFY_COMMON_SQL + QUERY_IDENTIFY_BY_TOTAL_SQL, dataSource.getDatasourceType(), + dataSource.getDbName(), dataSource.getTableName(), attrName); + } else { + throw new RuntimeException(NOT_SUPPORTED_METRIC_TYPE); + } + } + + public static String getInsertDataSql(int metricTypeInt, RuleDataSource dataSource, String nowTime) { + + if (MetricTypeEnum.TABLE_STATISTICS.getCode() == metricTypeInt) { + return "(%s, %s, " + format(INSERT_DATA_SQL_1, nowTime, nowTime, nowTime) + ", %s, " + format(INSERT_DATA_SQL_2, dataSource.getProxyUser(), dataSource.getDatasourceType()); + } else if (MetricTypeEnum.ENUM_STATISTICS.getCode() == metricTypeInt) { + return "(%s, %s, " + format(INSERT_DATA_SQL_1, nowTime, nowTime, nowTime) + ", %s, " + format(INSERT_DATA_SQL_2, dataSource.getProxyUser(), dataSource.getDatasourceType()); + + } else if (MetricTypeEnum.ORIGIN_STATISTICS.getCode() == metricTypeInt) { + return "(%s, %s, " + format(INSERT_DATA_SQL_1, nowTime, nowTime, nowTime) + ", %s, " + format(INSERT_DATA_SQL_2, dataSource.getProxyUser(), dataSource.getDatasourceType()); + } else if (MetricTypeEnum.COLLECT_STATISTICS.getCode() == metricTypeInt) { + return "(%s, %s, " + format(INSERT_DATA_SQL_1, nowTime, nowTime, nowTime) + ", %s, " + format(INSERT_DATA_SQL_2, dataSource.getProxyUser(), dataSource.getDatasourceType()); + } else { + throw new RuntimeException(NOT_SUPPORTED_METRIC_TYPE); + } + } + + public static String exceptionCollect(String exception, String errorList, String remark, String errorMsg) { + return format(EXCEPTION_COLLECT, exception, errorList, errorMsg, remark, errorList); + } + + + public static String format(String template, Object... param) { + return String.format(template, param); + } + +} diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/UnionWayEnum.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/UnionWayEnum.java new file mode 100644 index 00000000..fa16d5f9 --- /dev/null +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/constant/UnionWayEnum.java @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.qualitis.constant; + +/** + * @author v_minminghe@webank.com + * @date 2024-01-26 10:07 + * @description + */ +public enum UnionWayEnum { + + /** + * 两边数量一致的n个环境一一比对,产生n个结果 + */ + NO_COLLECT_CALCULATE(0, "无聚合计算"), + /** + * 两边数量一致的n个环境一一比对,聚合成1个结果 + */ + COLLECT_AFTER_CALCULATE(1, "先计算再聚合"), + /** + * 两边的n个环境各自聚合后,进行一次比对,产生1个结果 + */ + CALCULATE_AFTER_COLLECT(2, "先聚合再计算"); + + private Integer code; + private String message; + + UnionWayEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } + + public static UnionWayEnum fromCode(Integer code) { + for (UnionWayEnum unionWayEnum: UnionWayEnum.values()) { + if (unionWayEnum.code.equals(code)) { + return unionWayEnum; + } + } + return UnionWayEnum.NO_COLLECT_CALCULATE; + } + +} diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java index 27e428e5..57d22bab 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/AbstractTemplateConverter.java @@ -18,6 +18,7 @@ import com.webank.wedatasphere.qualitis.bean.DataQualityJob; import com.webank.wedatasphere.qualitis.bean.DataQualityTask; + import java.util.Date; import java.util.List; import java.util.Map; @@ -29,11 +30,13 @@ public abstract class AbstractTemplateConverter { /** * Convert Task into code that can be executed. + * * @param dataQualityTask * @param date * @param setFlag * @param execParams * @param runDate + * @param runToday * @param clusterType * @param dataSourceMysqlConnect * @param user @@ -41,10 +44,11 @@ public abstract class AbstractTemplateConverter { * @param rightCols * @param comelexCols * @param createUser + * @param projectId * @return * @throws Exception */ public abstract DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String setFlag, Map execParams, String runDate, - String clusterType, Map>> dataSourceMysqlConnect, String user, List leftCols, List rightCols, - List comelexCols,String createUser) throws Exception; + String runToday, String clusterType, Map>> dataSourceMysqlConnect, String user, List leftCols, List rightCols, + List comelexCols, String createUser, Long projectId) throws Exception; } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java index ff336163..5db53b34 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/converter/SqlTemplateConverter.java @@ -16,6 +16,8 @@ package com.webank.wedatasphere.qualitis.converter; +import com.webank.bsp.dpc.entity.AccountInfoSys; +import com.webank.bsp.dpc.util.AccountInfoObtainer; import com.webank.wedatasphere.qualitis.EngineTypeEnum; import com.webank.wedatasphere.qualitis.LocalConfig; import com.webank.wedatasphere.qualitis.bean.DataQualityJob; @@ -25,23 +27,33 @@ import com.webank.wedatasphere.qualitis.config.DpmConfig; import com.webank.wedatasphere.qualitis.config.FpsConfig; import com.webank.wedatasphere.qualitis.config.TaskDataSourceConfig; -import com.webank.wedatasphere.qualitis.constant.OptTypeEnum; -import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.constant.*; import com.webank.wedatasphere.qualitis.constants.QualitisConstants; import com.webank.wedatasphere.qualitis.entity.RuleMetric; import com.webank.wedatasphere.qualitis.exception.*; import com.webank.wedatasphere.qualitis.metadata.client.DataStandardClient; import com.webank.wedatasphere.qualitis.metadata.constant.RuleConstraintEnum; import com.webank.wedatasphere.qualitis.metadata.exception.MetaDataAcquireFailedException; +import com.webank.wedatasphere.qualitis.net.LocalNetwork; import com.webank.wedatasphere.qualitis.rule.constant.*; +import com.webank.wedatasphere.qualitis.rule.dao.StandardValueVersionDao; import com.webank.wedatasphere.qualitis.rule.entity.*; +import com.webank.wedatasphere.qualitis.scheduled.constant.RuleTypeEnum; import com.webank.wedatasphere.qualitis.translator.AbstractTranslator; import com.webank.wedatasphere.qualitis.util.CryptoUtils; import com.webank.wedatasphere.qualitis.util.DateExprReplaceUtil; import com.webank.wedatasphere.qualitis.util.DateUtils; +import com.webank.wedatasphere.qualitis.util.MyStringEscaper; import com.webank.wedatasphere.qualitis.util.QualitisCollectionUtils; import com.webank.wedatasphere.qualitis.util.map.CustomObjectMapper; +import net.sf.jsqlparser.JSQLParserException; +import net.sf.jsqlparser.parser.CCJSqlParserUtil; +import net.sf.jsqlparser.schema.Table; +import net.sf.jsqlparser.statement.Statement; +import net.sf.jsqlparser.statement.StatementVisitorAdapter; +import net.sf.jsqlparser.statement.select.*; import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.util.Strings; import org.slf4j.Logger; @@ -51,6 +63,8 @@ import org.springframework.stereotype.Component; import java.io.IOException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; import java.text.SimpleDateFormat; import java.util.*; import java.util.regex.Matcher; @@ -78,28 +92,59 @@ public class SqlTemplateConverter extends AbstractTemplateConverter { @Autowired private FpsConfig fpsConfig; + @Value("${task.persistent.dbName}") + private String resultDbName; + @Value("${task.persistent.tableName}") + private String resultTableName; + @Value("${task.new_value.tableName}") + private String newValueTableName; + + @Autowired + private StandardValueVersionDao standardValueVersionDao; @Autowired private DataStandardClient dataStandardClient; @Value("${linkis.sql.communalTableName:common_table}") private String commonTableName; + @Value("${intellect.check.project_name:}") + private String intellectCheckProjectName; + + @Value("${intellect.check.fields_count_project_name:}") + private String intellectCheckFieldsProjectName; + + @Value("${intellect.check.table_collect_template_name}") + private String intellectCheckTableTemplateName; + + @Value("${intellect.check.enum_collect_template_name}") + private String intellectCheckEnumTemplateName; + + @Value("${intellect.check.origin_collect_template_name}") + private String intellectCheckOriginTemplateName; + + @Value("${intellect.check.total_collect_template_name}") + private String intellectCheckTotalTemplateName; + + @Value("${task.execute.trino_column_size:100}") + private Integer trinoColumnSize; + private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile(".*\\$\\{(.*)}.*"); private static final Pattern AGGREGATE_FUNC_PATTERN = Pattern.compile("[a-zA-Z]+\\([0-9a-zA-Z_]+\\)"); + private static final String FRONT_HALF = "[ `~!@#$%^&*()+=|{}':;',\\[\\]"; + private static final String POSTERIOR_HALF = "<>/?~!@#¥%……&*()——+|{}【】‘;:”“’。,、?]|\n|\r|\t"; + private static final Pattern MID_TABLE_NAME_PATTERN = Pattern.compile(FRONT_HALF + POSTERIOR_HALF); private static final String SAVE_MID_TABLE_NAME_PLACEHOLDER = "${TABLE_NAME}"; private static final String SPARK_SQL_TEMPLATE_PLACEHOLDER = "${SQL}"; public static final String VARIABLE_NAME_PLACEHOLDER = "${VARIABLE}"; + private static final String DATABASE_PLACEHOLDER = "${database}"; + private static final String TABLE_PLACEHOLDER = "${table}"; private static final String FILTER_PLACEHOLDER = "${filter}"; private static final String FILTER_LEFT_PLACEHOLDER = "${filter_left}"; private static final String FILTER_RIGHT_PLACEHOLDER = "${filter_right}"; private static final Integer SINGLE_RULE = 1; private static final Integer CUSTOM_RULE = 2; private static final Integer MUL_SOURCE_RULE = 3; - /** - * Multi table solve. - */ - private static final Long MUL_SOURCE_ACCURACY_TEMPLATE_ID = 17L; - private static final Long MUL_SOURCE_FULL_TEMPLATE_ID = 20L; + /** * Dpm properties. */ @@ -186,6 +231,7 @@ public class SqlTemplateConverter extends AbstractTemplateConverter { * @param setFlag * @param execParams * @param runDate + * @param runToday * @param clusterType * @param dataSourceMysqlConnect * @param user @@ -193,6 +239,7 @@ public class SqlTemplateConverter extends AbstractTemplateConverter { * @param rightCols * @param complexCols * @param createUser + * @param projectId * @return * @throws ConvertException * @throws DataQualityTaskException @@ -201,20 +248,22 @@ public class SqlTemplateConverter extends AbstractTemplateConverter { */ @Override public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String setFlag, Map execParams, String runDate - , String clusterType, Map>> dataSourceMysqlConnect, String user, List leftCols, List rightCols, List complexCols, String createUser) throws Exception { + , String runToday, String clusterType, Map>> dataSourceMysqlConnect, String user, List leftCols, List rightCols, List complexCols, String createUser, Long projectId) throws Exception { - boolean withSpark = CollectionUtils.isNotEmpty(complexCols) && Boolean.FALSE.equals(taskDataSourceConfig.getHiveSortUdfOpen()); + boolean withSpark = Boolean.FALSE.equals(taskDataSourceConfig.getHiveSortUdfOpen()); LOGGER.info("Start to convert template to actual code, task: " + dataQualityTask); if (null == dataQualityTask || dataQualityTask.getRuleTaskDetails().isEmpty()) { throw new DataQualityTaskException("Task can not be null or empty"); } DataQualityJob job = new DataQualityJob(); + job.setTaskId(dataQualityTask.getTaskId()); + job.setIndex(dataQualityTask.getIndex()); if (StringUtils.isNotBlank(setFlag)) { LOGGER.info("Start to solve with set flag. Spark set conf string: {}", setFlag); String[] setStrs = setFlag.split(SpecCharEnum.DIVIDER.getValue()); for (String str : setStrs) { - job.getJobCode().add("spark.sql(\"set " + str + "\")"); + job.getJobCode().add("spark.sql(\"SET " + str + "\")"); } LOGGER.info("Finish to solve with set flag."); } @@ -223,7 +272,7 @@ public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String String queueName = ""; boolean engineReUse = true; boolean midTableReUse = true; - boolean unionAllForSaveResult = false; + int unionWay = 0; if (StringUtils.isNotBlank(startupParam)) { String[] startupParams = startupParam.split(SpecCharEnum.DIVIDER.getValue()); @@ -255,18 +304,14 @@ public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String startupParam = startupParam.replace("mid_table_reuse=false", ""); } } - if ("union_all_save".equals(key)) { - if ("true".equals(value)) { - unionAllForSaveResult = true; - startupParam = startupParam.replace("union_all_save=true", ""); - } else { - unionAllForSaveResult = false; - startupParam = startupParam.replace("union_all_save=false", ""); - } + if ("union_way".equals(key)) { + unionWay = Integer.valueOf(value); + startupParam = startupParam.replace("union_way=" + value, ""); } - if ("qualitis.linkis.engineType".equals(key) && !EngineTypeEnum.DEFAULT_ENGINE.getMessage().equals(value)) { + if (QualitisConstants.QUALITIS_ENGINE_TYPE.equals(key) && !EngineTypeEnum.DEFAULT_ENGINE.getMessage().equals(value)) { engineType = value; + startupParam = startupParam.replace(QualitisConstants.QUALITIS_ENGINE_TYPE + SpecCharEnum.EQUAL.getValue() + value, ""); } if ("wds.linkis.rm.yarnqueue".equals(key)) { @@ -280,24 +325,30 @@ public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String if (execParams.keySet().contains(QualitisConstants.QUALITIS_MID_TABLE_REUSE) && Boolean.FALSE.equals(Boolean.parseBoolean(execParams.get(QualitisConstants.QUALITIS_MID_TABLE_REUSE)))) { midTableReUse = false; } - if (execParams.keySet().contains(QualitisConstants.QUALITIS_UNION_ALL_SAVE) && Boolean.TRUE.equals(Boolean.parseBoolean(execParams.get(QualitisConstants.QUALITIS_UNION_ALL_SAVE)))) { - unionAllForSaveResult = true; + if (execParams.keySet().contains(QualitisConstants.QUALITIS_UNION_WAY)) { + unionWay = Integer.valueOf(execParams.getOrDefault(QualitisConstants.QUALITIS_UNION_WAY, UnionWayEnum.NO_COLLECT_CALCULATE.getCode().toString())); } if (execParams.keySet().contains(QualitisConstants.QUALITIS_ENGINE_TYPE) && !EngineTypeEnum.DEFAULT_ENGINE.getMessage().equals(execParams.get(QualitisConstants.QUALITIS_ENGINE_TYPE))) { - engineType = EngineTypeEnum.SPARK_ENGINE.getMessage(); + engineType = execParams.get(QualitisConstants.QUALITIS_ENGINE_TYPE); } List initSentence = abstractTranslator.getInitSentence(); - job.getJobCode().addAll(initSentence); + if (! EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType) || withSpark) { + job.getJobCode().addAll(initSentence); + job.getJobCode().add("spark.sql(\"SET spark.sql.hive.convertMetastoreOrc=false\")"); + } List envNames = new ArrayList<>(); boolean shareConnect = CollectionUtils.isNotEmpty(dataQualityTask.getConnectShare()); - List communalSentence = getCommunalSentence(dataQualityTask, envNames); - job.getJobCode().addAll(communalSentence); + List communalSentence = new ArrayList<>(); + if (! EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType) || withSpark) { + communalSentence = getCommunalSentence(dataQualityTask, envNames, runDate, runToday, engineType); + job.getJobCode().addAll(communalSentence); + } int count = 0; for (RuleTaskDetail ruleTaskDetail : dataQualityTask.getRuleTaskDetails()) { - if (Boolean.TRUE.equals(ruleTaskDetail.getRule().getUnionAll())) { - unionAllForSaveResult = true; + if (Objects.nonNull(ruleTaskDetail.getRule().getUnionWay())) { + unionWay = ruleTaskDetail.getRule().getUnionWay(); } // Get current rule left cols and right cols. List currentRuleLeftCols = new ArrayList<>(); @@ -314,18 +365,35 @@ public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String job.getJobCode().addAll(fpsCodes); // Handle hive engine task depend on startup param(qualitis.linkis.engineType=shell,spark(default)) - if (CollectionUtils.isEmpty(fpsCodes) && !withSpark && taskDataSourceConfig.getMysqlsecOpen() && EngineTypeEnum.DEFAULT_ENGINE.getMessage().equals(engineType) && MUL_SOURCE_FULL_TEMPLATE_ID.equals(ruleTaskDetail.getRule().getTemplate().getId()) && CollectionUtils.isEmpty(dataSourceMysqlConnect.keySet())) { - job.getJobCode().clear(); - // Hql - List codes = generateShellSqlByTask(ruleTaskDetail.getRule(), date, dataQualityTask.getApplicationId(), dataQualityTask.getCreateTime(), new StringBuilder(dataQualityTask.getPartition()), count, runDate, currentRuleLeftCols, currentRuleRightCols, complexCols, queueName, createUser); - job.setEngineType(EngineTypeEnum.DEFAULT_ENGINE.getMessage()); - job.getJobCode().addAll(codes); - continue; + if (CollectionUtils.isEmpty(complexCols) && CollectionUtils.isEmpty(fpsCodes) && !withSpark && taskDataSourceConfig.getMysqlsecOpen()) { + boolean generated = false; + if (EngineTypeEnum.DEFAULT_ENGINE.getMessage().equals(engineType) && QualitisConstants.MULTI_SOURCE_FULL_TEMPLATE_NAME.equals(ruleTaskDetail.getRule().getTemplate().getEnName()) + && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(ruleTaskDetail.getRule().getTemplate().getTemplateType()) + && CollectionUtils.isEmpty(dataSourceMysqlConnect.keySet())) { + job.getJobCode().clear(); + // Hql + List codes = generateShellSqlByTask(ruleTaskDetail.getRule(), date, dataQualityTask.getApplicationId(), dataQualityTask.getCreateTime(), new StringBuilder(dataQualityTask.getPartition()), count, runDate, runToday, currentRuleLeftCols, currentRuleRightCols, complexCols, queueName, createUser); + job.setEngineType(EngineTypeEnum.DEFAULT_ENGINE.getMessage()); + job.getJobCode().addAll(codes); + generated = true; + } else if (EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType) && ! QualitisConstants.isAcrossCluster(ruleTaskDetail.getRule().getTemplate().getEnName())) { + // Tsql + List codes = generateTrinoSqlByTask(job, ruleTaskDetail.getRule(), date, dataQualityTask.getApplicationId(), dataQualityTask.getCreateTime(), new StringBuilder(dataQualityTask.getPartition()), execParams, runDate, runToday, currentRuleLeftCols, currentRuleRightCols, complexCols, queueName, createUser); + job.setEngineType(EngineTypeEnum.TRINO_ENGINE.getMessage()); + job.getJobCode().addAll(codes); + generated = true; + } + if (generated) { + continue; + } + } + String sharePart = ""; + if (StringUtils.isNotEmpty(dataQualityTask.getDbShare()) && StringUtils.isNotEmpty(dataQualityTask.getTableShare())) { + sharePart = dataQualityTask.getDbShare() + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + dataQualityTask.getTableShare(); } - List codes = generateSparkSqlByTask(job, ruleTaskDetail.getRule(), date, dataQualityTask.getApplicationId(), ruleTaskDetail.getMidTableName() - , dataQualityTask.getCreateTime(), new StringBuilder(dataQualityTask.getPartition()), execParams, runDate, dataSourceMysqlConnect, user, midTableReUse - , unionAllForSaveResult, currentRuleLeftCols, currentRuleRightCols, complexCols, createUser, shareConnect, dataQualityTask.getDbShare() + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + dataQualityTask.getTableShare()); + , dataQualityTask.getCreateTime(), new StringBuilder(dataQualityTask.getPartition()), execParams, runDate, runToday, dataSourceMysqlConnect, user, midTableReUse + , unionWay, currentRuleLeftCols, currentRuleRightCols, complexCols, createUser, shareConnect, sharePart); job.setEngineType(EngineTypeEnum.SPARK_ENGINE.getMessage()); job.getJobCode().addAll(codes); @@ -345,20 +413,41 @@ public DataQualityJob convert(DataQualityTask dataQualityTask, Date date, String } } LOGGER.info("Succeed to convert all rule into actual scala code."); - job.setTaskId(dataQualityTask.getTaskId()); job.setStartupParam(startupParam); job.setEngineReuse(engineReUse); return job; } - private List getCommunalSentence(DataQualityTask dataQualityTask, List envNames) throws UnExpectedRequestException { + private List getCommunalSentence(DataQualityTask dataQualityTask, List envNames, String runDate, String runToday, String engineType) throws UnExpectedRequestException { List sqlList = new ArrayList<>(); if (StringUtils.isEmpty(dataQualityTask.getDbShare()) || StringUtils.isEmpty(dataQualityTask.getTableShare())) { return sqlList; } - List columnList = new ArrayList<>(); + // 自定义规则不使用公共缓存 + try { + Rule rule = dataQualityTask.getRuleTaskDetails().get(0).getRule(); + if(compareProjectName(rule)){ + return sqlList; + } + if(StringUtils.isNotBlank(intellectCheckFieldsProjectName) && + intellectCheckFieldsProjectName.equals(rule.getProject().getName())){ + return sqlList; + } + } catch (Exception e) { + LOGGER.error("compare projectName error. can't get rule info"); + } + List columnList = new ArrayList<>(); String filterPart = dataQualityTask.getFilterShare(); + if (StringUtils.isNotEmpty(filterPart) && StringUtils.isNotEmpty(runDate)) { + filterPart = filterPart.replace("${run_date}", runDate); +// filterPart = filterPart.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotEmpty(filterPart) && StringUtils.isNotEmpty(runToday)) { + filterPart = filterPart.replace("${run_today}", runToday); +// filterPart = filterPart.replace("${run_today_std}", runToday); + } + String fromPart = dataQualityTask.getDbShare() + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + dataQualityTask.getTableShare(); String selectPart = "*"; @@ -371,7 +460,6 @@ private List getCommunalSentence(DataQualityTask dataQualityTask, List getCommunalSentence(DataQualityTask dataQualityTask, List getCommunalSentence(DataQualityTask dataQualityTask, List generateShellSqlByTask(Rule rule, Date date, String applicationId, String createTime, StringBuilder partition, int count - , String runDate, List leftCols, List rightCols, List complexCols, String queueName, String createUser) throws Exception { + , String runDate, String runToday, List leftCols, List rightCols, List complexCols, String queueName, String createUser) throws Exception { List sqlList = new ArrayList<>(); @@ -420,11 +511,10 @@ private List generateShellSqlByTask(Rule rule, Date date, String applica Map ruleMetricMap = collectRuleMetric(rule); Map dbTableMap = new HashMap<>(4); Map filters = new HashMap<>(2); - StringBuilder realFilter = new StringBuilder(); StringBuilder realColumn = new StringBuilder(); - templateMidTableAction = getMultiDatasourceFiltesAndUpdateMidTableAction(rule, templateMidTableAction, date, filters); - replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realFilter, realColumn, dbTableMap, date, createUser); + templateMidTableAction = getMultiDatasourceFiltesAndUpdateMidTableAction(rule, templateMidTableAction, date, filters, leftCols, rightCols, complexCols, null, runDate, runToday, false); + replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realColumn, dbTableMap, date, rule.getStandardValueVersionId(), createUser, "", runDate, runToday, EngineTypeEnum.DEFAULT_ENGINE.getMessage()); // If partition is not specified, replace with filter in rule configuration. if (StringUtils.isBlank(partition.toString())) { @@ -460,7 +550,7 @@ private List generateShellSqlByTask(Rule rule, Date date, String applica if (StringUtils.isNotEmpty(queueName)) { setQueue.append("set mapreduce.job.queuename=").append(queueName).append(";"); } - String hiveSql = "count_result_" + count + "=`hive -S -e \"" + setQueue.toString() + createFunc.toString() + "select count(1) as diff_count from (select line_md5, count(1) as md5_count from (select md5(concat_ws(''," + leftConcat.toString() + ")) as line_md5 from " + dbTableMap.get("left_database") + dbTableMap.get("left_table") + " where " + filters.get("left_table") + ") left_tmp group by left_tmp.line_md5) qulaitis_left_tmp ${contrast_type} (select line_md5, count(1) as md5_count from (select md5(concat_ws(''," + rightConcat.toString() + ")) as line_md5 from " + dbTableMap.get("right_database") + dbTableMap.get("right_table") + " where " + filters.get("right_table") + ") right_tmp group by right_tmp.line_md5) qulaitis_right_tmp ON (qulaitis_left_tmp.line_md5 = qulaitis_right_tmp.line_md5 AND qulaitis_left_tmp.md5_count = qulaitis_right_tmp.md5_count) where (qulaitis_left_tmp.line_md5 is null AND qulaitis_left_tmp.md5_count is null) OR (qulaitis_right_tmp.line_md5 is null AND qulaitis_right_tmp.md5_count is null) ${outer_filter};\"`"; + String hiveSql = "count_result_" + count + "=`hive -e \"" + setQueue.toString() + createFunc.toString() + "select count(1) as diff_count from (select line_md5, count(1) as md5_count from (select md5(concat_ws(''," + leftConcat.toString() + ")) as line_md5 from " + dbTableMap.get("left_database") + dbTableMap.get("left_table") + " where " + filters.get("left_table") + ") left_tmp group by left_tmp.line_md5) qulaitis_left_tmp ${contrast_type} (select line_md5, count(1) as md5_count from (select md5(concat_ws(''," + rightConcat.toString() + ")) as line_md5 from " + dbTableMap.get("right_database") + dbTableMap.get("right_table") + " where " + filters.get("right_table") + ") right_tmp group by right_tmp.line_md5) qulaitis_right_tmp ON (qulaitis_left_tmp.line_md5 = qulaitis_right_tmp.line_md5 AND qulaitis_left_tmp.md5_count = qulaitis_right_tmp.md5_count) where (qulaitis_left_tmp.line_md5 is null AND qulaitis_left_tmp.md5_count is null) OR (qulaitis_right_tmp.line_md5 is null AND qulaitis_right_tmp.md5_count is null) ${outer_filter};\"`"; hiveSql = hiveSql.replace("${contrast_type}", ContrastTypeEnum.getJoinType(rule.getContrastType())); if (StringUtils.isNotEmpty(partition.toString())) { hiveSql = hiveSql.replace("${outer_filter}", "AND (" + partition.toString() + ")"); @@ -478,7 +568,11 @@ private List generateShellSqlByTask(Rule rule, Date date, String applica sqlList.add(mysqlConn); String ruleVersion = rule.getWorkFlowVersion() == null ? "" : rule.getWorkFlowVersion(); if (StringUtils.isEmpty(runDate)) { - runDate = "-1"; + if (StringUtils.isNotBlank(runToday)) { + runDate = runToday; + } else { + runDate = "-1"; + } } String insertSql = "sql_" + count + "=\"INSERT INTO qualitis_application_task_result (application_id, create_time, result_type, rule_id, value, rule_metric_id, run_date, version) VALUES('" + applicationId + "', '" + createTime + "', 'Long', " + rule.getId() + ", $count_value_" + count + ", -1, " + runDate + ", '" + ruleVersion + "');\""; if (CollectionUtils.isNotEmpty(ruleMetricMap.values())) { @@ -489,6 +583,264 @@ private List generateShellSqlByTask(Rule rule, Date date, String applica return sqlList; } + private List generateTrinoSqlByTask(DataQualityJob job, Rule rule, Date date, String applicationId, String createTime, StringBuilder partition, Map execParams, String runDate, String runToday, List leftCols, List rightCols, List complexCols, String queueName, String createUser) throws UnExpectedRequestException, ConvertException, MetaDataAcquireFailedException { + List sqlList = new ArrayList<>(); + Map filters = new HashMap<>(2); + + // Collect rule metric and build in save sentence sql. + Map ruleMetricMap = collectRuleMetric(rule); + + // Get SQL from template after remove '\n' + String templateMidTableAction = rule.getTemplate().getMidTableAction().replace("\n", " "); + // trino compatibility handling start + LOGGER.info("Before handle trino schema: {}", templateMidTableAction); + templateMidTableAction = templateMidTableAction.replace("${fields} not regexp '${regexp}'", "regexp_like(cast(${fields} as varchar), '${regexp}')"); + templateMidTableAction = templateMidTableAction.replace("trim(${fields}) = ''", "trim(cast(${fields} as varchar)) = ''"); + templateMidTableAction = templateMidTableAction.replace("concat_ws(',',${fields})", "concat_ws(',',cast(${fields} as varchar))"); + LOGGER.info("After handle trino schema: {}", templateMidTableAction); + // trino compatibility handling end + + templateMidTableAction = templateMidTableAction.replace("${database}.${table}", "hive.${database}.${table}"); + templateMidTableAction = templateMidTableAction.replace("${left_database}.${left_table}", "hive.${left_database}.${left_table}"); + templateMidTableAction = templateMidTableAction.replace("${right_database}.${right_table}", "hive.${right_database}.${right_table}"); + + if (MUL_SOURCE_RULE.intValue() == rule.getRuleType()) { + if (QualitisConstants.MULTI_SOURCE_ACCURACY_TEMPLATE_NAME.equals(rule.getTemplate().getEnName())) { + templateMidTableAction = templateMidTableAction.replace("hive.${left_database}.${left_table}", "(select * from " + "hive.${left_database}.${left_table} where " + "${filter_left})"); + templateMidTableAction = templateMidTableAction.replace("hive.${right_database}.${right_table}", "(select * from " + "hive.${right_database}.${right_table} where " + "${filter_right})"); + templateMidTableAction = "SELECT * from (" + templateMidTableAction + ") tmp3 where tmp3.compare_result = 0"; + } + templateMidTableAction = getMultiDatasourceFiltesAndUpdateMidTableAction(rule, templateMidTableAction, date, filters, leftCols, rightCols, complexCols, job.getIndex(), + runDate, runToday, true); + } else if (CUSTOM_RULE.intValue() == rule.getRuleType()) { + templateMidTableAction = customMidTableActionUpdate(rule, templateMidTableAction, date, execParams, partition, ruleMetricMap, runDate, runToday, EngineTypeEnum.TRINO_ENGINE.getMessage()); + } + + if (Boolean.TRUE.equals(rule.getTemplate().getFilterFields()) && RuleTemplateTypeEnum.SINGLE_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) { + String filterColName = rule.getRuleDataSources().stream().filter(dataSource -> dataSource.getDatasourceIndex() == null).iterator().next().getColName(); + List filterColNameList = new ArrayList<>(); + if (StringUtils.isNotEmpty(filterColName)) { + StringBuilder repeat = new StringBuilder(); + String[] realColumns = filterColName.split(SpecCharEnum.VERTICAL_BAR.getValue()); + for (String column : realColumns) { + String[] colInfo = column.split(SpecCharEnum.COLON.getValue()); + String colName = colInfo[0]; + filterColNameList.add("cast(" + colName + " as varchar)"); + } + repeat.append("select md5, count(1) as md5_count from (select md5(cast(concat_ws('', ").append(Strings.join(filterColNameList, ',')).append(") as varbinary)) as md5 from (" + templateMidTableAction + ") template_source) template_group_source group by template_group_source.md5 having count(*) > 1"); + templateMidTableAction = repeat.toString(); + } + } + + // Get statistics meta + Set templateStatisticsInputMetas = rule.getTemplate().getStatisticAction(); + // Get select input meta + List inputMetaRuleVariables = rule.getRuleVariables().stream().filter(ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.TEMPLATE_INPUT_META.getCode())).collect(Collectors.toList()); + + // If partition is not specified, replace with filter from rule datasource. + if (StringUtils.isBlank(partition.toString())) { + fillPartitionWithRuleConfiguration(partition, rule, templateMidTableAction, inputMetaRuleVariables); + } + + // Get dbs and tables + Map dbTableMap = new HashMap<>(4); + // Get column and filter + StringBuilder realColumn = new StringBuilder(); + // Get template sql and replace all replaceholders + String midTableAction = replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realColumn, dbTableMap, date, rule.getStandardValueVersionId(), createUser, "", runDate, runToday, EngineTypeEnum.TRINO_ENGINE.getMessage()); + + if (QualitisConstants.SINGLE_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(rule.getTemplate().getEnName())) { + midTableAction = addHivePrefixToTables(midTableAction); + midTableAction = "SELECT * from (" + midTableAction + ") tmp3 where tmp3.compare_result = 0"; + } + + StringBuilder trinoSql = new StringBuilder(); + trinoSql.append("insert into mysql").append(SpecCharEnum.PERIOD_NO_ESCAPE.getValue()).append(resultDbName).append(SpecCharEnum.PERIOD_NO_ESCAPE.getValue()).append(resultTableName); + trinoSql.append(" (application_id, create_time, result_type, rule_id, value, rule_metric_id, run_date, version) "); + + String ruleVersion = rule.getWorkFlowVersion() == null ? "" : rule.getWorkFlowVersion(); + + if (StringUtils.isEmpty(runDate)) { + if (StringUtils.isNotBlank(runToday)) { + runDate = runToday; + } else { + runDate = "-1"; + } + } + Long ruleMetricId = -1L; + + if (ruleMetricMap != null && ruleMetricMap.size() > 0) { + for (String key : ruleMetricMap.keySet()) { + if (null != ruleMetricMap.get(key)) { + ruleMetricId = ruleMetricMap.get(key); + } + + if (CUSTOM_RULE.intValue() == rule.getRuleType()) { + key = key.replace("-", "_"); + StringBuilder customTrinoSql = new StringBuilder(trinoSql.toString()); + customTrinoSql.append("select ") + .append("'").append(applicationId).append("', ") + .append("'").append(createTime).append("', ") + .append("'").append("Long").append("', ") + .append(rule.getId()).append(", ") + .append("CAST(").append(key).append(" AS VARCHAR)").append(", ") + .append(ruleMetricId).append(", ") + .append(runDate).append(", ") + .append("'").append(ruleVersion).append("'") + .append(" from ").append("(") + .append(midTableAction) + .append(") tmp;"); + sqlList.add(customTrinoSql.toString()); + } + } + if (CUSTOM_RULE.intValue() == rule.getRuleType()) { + return sqlList; + } + } + + trinoSql.append("select ") + .append("'").append(applicationId).append("', ") + .append("'").append(createTime).append("', ") + .append("'").append("Long").append("', ") + .append(rule.getId()).append(", ") + .append("${value}").append(", ") + .append(ruleMetricId).append(", ") + .append(runDate).append(", ") + .append("'").append(ruleVersion).append("'") + .append(" from ").append("(") + .append(midTableAction) + .append(") tmp;"); + + for (TemplateStatisticsInputMeta templateStatisticsInputMeta : templateStatisticsInputMetas) { + String functionName = templateStatisticsInputMeta.getFuncName(); + String value = templateStatisticsInputMeta.getValue(); + + String tsql = trinoSql.toString().replace("${value}", "CAST(" + functionName + "(" + value + ")" + " AS VARCHAR)"); + sqlList.add(tsql); + } + + // New value save + Set templateMidTableInputMetas = rule.getTemplate().getTemplateMidTableInputMetas(); + boolean saveNewValue = templateMidTableInputMetas.stream().anyMatch(templateMidTableInputMeta -> Boolean.TRUE.equals(templateMidTableInputMeta.getWhetherNewValue())); + if (saveNewValue) { + StringBuilder basicNewValueSql = new StringBuilder(); + basicNewValueSql.append("insert into mysql").append(SpecCharEnum.PERIOD_NO_ESCAPE.getValue()).append(resultDbName).append(SpecCharEnum.PERIOD_NO_ESCAPE.getValue()).append(newValueTableName); + basicNewValueSql.append(" (rule_id, status, create_user, rule_version, create_time, result_value) "); + + boolean numRangeNewValue = templateMidTableInputMetas.stream().anyMatch(templateMidTableInputMeta -> TemplateInputTypeEnum.INTERMEDIATE_EXPRESSION.getCode().equals(templateMidTableInputMeta.getInputType())); + boolean enumListNewValue = templateMidTableInputMetas.stream().anyMatch(templateMidTableInputMeta -> TemplateInputTypeEnum.LIST.getCode().equals(templateMidTableInputMeta.getInputType()) || TemplateInputTypeEnum.STANDARD_VALUE_EXPRESSION.getCode().equals(templateMidTableInputMeta.getInputType())); + + if (numRangeNewValue) { + StringBuilder numRangeNewValueSql = new StringBuilder(basicNewValueSql.toString()); + numRangeNewValueSql.append("select ") + .append(rule.getId()).append(", ") + .append("1, ") + .append("'").append(createUser).append("', ") + .append("'").append(ruleVersion).append("', ") + .append("'").append(createTime).append("', ") + .append("CAST(custom_column AS VARCHAR)") + .append(" from ").append("(") + .append(midTableAction) + .append(") tmp;"); + sqlList.add(numRangeNewValueSql.toString()); + } + + if (enumListNewValue) { + StringBuilder numRangeNewValueSql = new StringBuilder(basicNewValueSql.toString()); + numRangeNewValueSql.append("select ") + .append(rule.getId()).append(", ") + .append("1, ") + .append("'").append(createUser).append("', ") + .append("'").append(ruleVersion).append("', ") + .append("'").append(createTime).append("', ") + .append("concat_ws(',', cast(" + realColumn.toString() + " as varchar))") + .append(" from ").append("(") + .append(midTableAction) + .append(") tmp;"); + sqlList.add(numRangeNewValueSql.toString()); + } + } + + + return sqlList; + } + + private String addHivePrefixToTables(String midTableAction) throws ConvertException{ + try { + Statement statement = CCJSqlParserUtil.parse(midTableAction); + statement.accept(new StatementVisitorAdapter() { + @Override + public void visit(Select select) { + select.getSelectBody().accept(new SelectVisitorAdapter() { + @Override + public void visit(PlainSelect plainSelect) { + processFromItem(plainSelect.getFromItem()); + if (plainSelect.getJoins() != null) { + plainSelect.getJoins().forEach(join -> processFromItem(join.getRightItem())); + } + } + + @Override + public void visit(SetOperationList setOpList) { + for (SelectBody selectBody : setOpList.getSelects()) { + selectBody.accept(this); + } + } + + @Override + public void visit(WithItem withItem) { + withItem.getSubSelect().getSelectBody().accept(this); + } + }); + } + }); + return statement.toString(); + } catch (JSQLParserException e) { + throw new ConvertException("Your sql cannot be parsed to get db and table, sql: " + midTableAction); + } + } + + private void processFromItem(FromItem fromItem) { + if (fromItem instanceof Table) { + Table table = (Table) fromItem; + modifyTable(table); + } else if (fromItem instanceof SubSelect) { + SubSelect subSelect = (SubSelect) fromItem; + subSelect.getSelectBody().accept(new SelectVisitorAdapter() { + @Override + public void visit(PlainSelect plainSelect) { + processFromItem(plainSelect.getFromItem()); + if (plainSelect.getJoins() != null) { + plainSelect.getJoins().forEach(join -> processFromItem(join.getRightItem())); + } + } + + @Override + public void visit(SetOperationList setOpList) { + for (SelectBody selectBody : setOpList.getSelects()) { + selectBody.accept(this); + } + } + + @Override + public void visit(WithItem withItem) { + withItem.getSubSelect().getSelectBody().accept(this); + } + }); + } + } + + private void modifyTable(Table table) { + String originalTableName = table.getFullyQualifiedName(); + if (originalTableName.contains(".")) { + String[] parts = originalTableName.split("\\.", 2); + String dbName = parts[0]; + String tableName = parts[1]; + table.setSchemaName("hive." + dbName); + table.setName(tableName); + } + } + private List dropHiveTable(Rule rule) { List codes = new ArrayList<>(); LOGGER.info("Drop fps temp table after select."); @@ -622,7 +974,7 @@ public List generateTempHiveTable(Rule rule, String user) { * @param runDate * @param dataSourceMysqlConnect * @param midTableReUse - * @param unionAllForSaveResult + * @param unionWay * @param leftCols * @param rightCols * @param complexCols @@ -634,8 +986,10 @@ public List generateTempHiveTable(Rule rule, String user) { * @throws RuleVariableNotFoundException */ private List generateSparkSqlByTask(DataQualityJob job, Rule rule, Date date, String applicationId, String midTableName, String createTime - , StringBuilder partition, Map execParams, String runDate, Map>> dataSourceMysqlConnect, String user - , boolean midTableReUse, boolean unionAllForSaveResult, List leftCols, List rightCols, List complexCols, String createUser, boolean shareConnect, String shareFromPart) throws ConvertException, RuleVariableNotSupportException, RuleVariableNotFoundException, UnExpectedRequestException, MetaDataAcquireFailedException { + , StringBuilder partition, Map execParams, String runDate, String runToday, Map>> dataSourceMysqlConnect, String user + , boolean midTableReUse, int unionWay, List leftCols, List rightCols, List complexCols + , String createUser, boolean shareConnect, String shareFromPart) + throws ConvertException, RuleVariableNotSupportException, RuleVariableNotFoundException, UnExpectedRequestException, MetaDataAcquireFailedException { List sqlList = new ArrayList<>(); Map filters = new HashMap<>(2); @@ -644,15 +998,36 @@ private List generateSparkSqlByTask(DataQualityJob job, Rule rule, Date // Get SQL from template after remove '\n' String templateMidTableAction = rule.getTemplate().getMidTableAction().replace("\n", " "); + // Replace execution variable parameters + if(compareProjectName(rule) || intellectCheckFieldsProjectName.equals(rule.getProject().getName())) { + if (!MapUtils.isEmpty(execParams)) { + for (Map.Entry< String, String > entry : execParams.entrySet()) { + String expressKey = entry.getKey(); + String expressValue = entry.getValue(); + templateMidTableAction = templateMidTableAction.replace("${" + expressKey + "}", expressValue); + } + if (execParams.containsKey("partition_attr") && execParams.containsKey("partition_day")) { + String filter = ""; + if (execParams.get("partition_day").contains(",")) { + filter = execParams.get("partition_attr") + " in (" + execParams.get("partition_day") + ")"; + } else { + filter = execParams.get("partition_attr") + " = " + execParams.get("partition_day"); + } + templateMidTableAction = templateMidTableAction.replace("${partition_filter}", filter); + } + } + } String templateEnName = StringUtils.isNotEmpty(rule.getTemplate().getEnName()) ? rule.getTemplate().getEnName() : "defaultCheckDF"; if (MUL_SOURCE_RULE.intValue() == rule.getRuleType()) { - templateMidTableAction = getMultiDatasourceFiltesAndUpdateMidTableAction(rule, templateMidTableAction, date, filters); + templateMidTableAction = getMultiDatasourceFiltesAndUpdateMidTableAction(rule, templateMidTableAction, date, filters, leftCols, rightCols, complexCols, job.getIndex(), + runDate, runToday, false); } else if (CUSTOM_RULE.intValue() == rule.getRuleType()) { - templateMidTableAction = customMidTableActionUpdate(rule, templateMidTableAction, date, execParams, partition, ruleMetricMap); + templateMidTableAction = customMidTableActionUpdate(rule, templateMidTableAction, date, execParams, partition, ruleMetricMap, runDate, runToday, EngineTypeEnum.SPARK_ENGINE.getMessage()); templateEnName = "customCheckDF"; } + // Get statistics meta List statisticsRuleVariables = rule.getRuleVariables().stream().filter(ruleVariable -> ruleVariable.getInputActionStep().equals(InputActionStepEnum.STATISTICS_ARG.getCode())).collect(Collectors.toList()); // Get select input meta @@ -667,9 +1042,8 @@ private List generateSparkSqlByTask(DataQualityJob job, Rule rule, Date Map dbTableMap = new HashMap<>(4); // Get column and filter StringBuilder realColumn = new StringBuilder(); - StringBuilder realFilter = new StringBuilder(); // Get template sql and replace all replaceholders - String midTableAction = replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realFilter, realColumn, dbTableMap, date, createUser); + String midTableAction = replaceVariable(templateMidTableAction, inputMetaRuleVariables, partition.toString(), realColumn, dbTableMap, date, rule.getStandardValueVersionId(), createUser, shareFromPart, runDate, runToday, EngineTypeEnum.SPARK_ENGINE.getMessage()); // Prepare for multiple rule List> sourceConnect = new ArrayList<>(); @@ -677,14 +1051,19 @@ private List generateSparkSqlByTask(DataQualityJob job, Rule rule, Date prepareDecrptedConnectParamForMultipleRule(sourceConnect, targetConnect, dataSourceMysqlConnect, rule); Map selectResult = new LinkedHashMap<>(rule.getRuleDataSources().size()); - String partOfVariableName = templateEnName.replace(" ", "") + SpecCharEnum.EQUAL.getValue() + rule.getName(); - handleRuleSelectSql(rule, midTableName, partition, partOfVariableName, runDate, dataSourceMysqlConnect, sqlList, filters, dbTableMap, midTableAction, sourceConnect, targetConnect, selectResult, midTableReUse, unionAllForSaveResult, leftCols, rightCols, complexCols, shareConnect, shareFromPart); + String result = checkRuleNameWhetherContainSpecialCharacters(rule.getName()); + String partOfVariableName = templateEnName.replace(" ", "") + SpecCharEnum.EQUAL.getValue() + result; + + formatEnvNameForSpark(sourceConnect); + formatEnvNameForSpark(targetConnect); + + handleRuleSelectSql(rule, midTableName, partition, partOfVariableName, runDate, runToday, dataSourceMysqlConnect, sqlList, filters, dbTableMap, midTableAction, sourceConnect, targetConnect, selectResult, midTableReUse, unionWay, leftCols, rightCols, complexCols, shareConnect, shareFromPart, execParams, job.getIndex(), job.getEngineType()); Set templateMidTableInputMetas = rule.getTemplate().getTemplateMidTableInputMetas(); boolean saveNewValue = templateMidTableInputMetas.stream().anyMatch(templateMidTableInputMeta -> Boolean.TRUE.equals(templateMidTableInputMeta.getWhetherNewValue())); boolean numRangeNewValue = saveNewValue && templateMidTableInputMetas.stream().anyMatch(templateMidTableInputMeta -> TemplateInputTypeEnum.INTERMEDIATE_EXPRESSION.getCode().equals(templateMidTableInputMeta.getInputType())); boolean enumListNewValue = saveNewValue && templateMidTableInputMetas.stream().anyMatch(templateMidTableInputMeta -> TemplateInputTypeEnum.LIST.getCode().equals(templateMidTableInputMeta.getInputType()) || TemplateInputTypeEnum.STANDARD_VALUE_EXPRESSION.getCode().equals(templateMidTableInputMeta.getInputType())); - sqlList.addAll(saveStatisticAndSaveMySqlSentence(rule.getWorkFlowVersion() != null ? rule.getWorkFlowVersion() : "", rule.getId(), ruleMetricMap, rule.getTemplate().getStatisticAction(), applicationId, statisticsRuleVariables, createTime, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, user, realColumn, enumListNewValue, numRangeNewValue, selectResult, unionAllForSaveResult)); + sqlList.addAll(saveStatisticAndSaveMySqlSentence(rule.getWorkFlowVersion() != null ? rule.getWorkFlowVersion() : "", rule.getId(), ruleMetricMap, rule.getTemplate().getStatisticAction(), applicationId, job.getTaskId(), statisticsRuleVariables, createTime, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, runToday, user, realColumn, enumListNewValue, numRangeNewValue, selectResult, unionWay)); job.setResultNum(selectResult.size()); return sqlList; } @@ -725,57 +1104,124 @@ private Map collectRuleMetric(Rule rule) { return ruleMetricMap; } - private void handleRuleSelectSql(Rule rule, String midTableName, StringBuilder partition, String partOfVariableName, String runDate, Map>> dataSourceMysqlConnect - , List sqlList, Map filters, Map dbTableMap, String midTableAction, List> sourceConnect, List> targetConnect - , Map selectResult, boolean midTableReUse, boolean unionAllForSaveResult, List leftCols, List rightCols, List complexCols, boolean shareConnect, String shareFromPart) throws UnExpectedRequestException { + private void formatEnvNameForSpark(List> sourceConnect) { + if (CollectionUtils.isEmpty(sourceConnect)) { + return; + } + for (Map connectMap: sourceConnect) { + if (connectMap.containsKey("envName")) { + String envNameForSpark = sparkEnvNameAdapter((String) connectMap.get("envName")); + connectMap.put("envName", envNameForSpark); + } + } + } + + /** + * before: UR1-10.108.192.127-15202(epccmaindb_G-DCN_9F1_set_2) + * after: UR11010819212715202 + * @param envName + * @return + */ + private String sparkEnvNameAdapter(String envName) { + if (StringUtils.isBlank(envName)) { + return envName; + } + String input = envName; + if (input.indexOf("(") != -1 && input.indexOf(")") != -1) { + input = StringUtils.substring(input, 0, input.indexOf("(")); + } + // Remove special characters + return input.replaceAll("[^a-zA-Z0-9_]", ""); + } + - boolean systemCompareTemplate = rule.getTemplate().getId().longValue() == MUL_SOURCE_ACCURACY_TEMPLATE_ID.longValue() || rule.getTemplate().getId().longValue() == MUL_SOURCE_FULL_TEMPLATE_ID.longValue(); + private void handleRuleSelectSql(Rule rule, String midTableName, StringBuilder partition, String partOfVariableName, String runDate, String runToday, Map>> dataSourceMysqlConnect + , List sqlList, Map filters, Map dbTableMap, String midTableAction, List> sourceConnect, List> targetConnect + , Map selectResult, boolean midTableReUse, int unionWay, List leftCols, List rightCols, List complexCols, boolean shareConnect, String shareFromPart + , Map execParams, Integer dataSourceIndex, String engineType) throws UnExpectedRequestException { + String templateEnName = rule.getTemplate().getEnName(); + boolean systemCompareTemplate = (QualitisConstants.MULTI_SOURCE_ACCURACY_TEMPLATE_NAME.equals(templateEnName) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) || (QualitisConstants.MULTI_SOURCE_FULL_TEMPLATE_NAME.equals(templateEnName) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) + || (QualitisConstants.MULTI_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(templateEnName) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) + || (QualitisConstants.SINGLE_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(templateEnName) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) + || (QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(templateEnName) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) + || (QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(templateEnName) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())); + + + if ((UnionWayEnum.COLLECT_AFTER_CALCULATE.getCode().equals(unionWay) || UnionWayEnum.NO_COLLECT_CALCULATE.getCode().equals(unionWay)) + && CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isNotEmpty(targetConnect) && sourceConnect.size() != targetConnect.size()) { + throw new UnExpectedRequestException("Source envs'size can not be different from target envs'size."); + } + + boolean unionAllForSaveResult = UnionWayEnum.COLLECT_AFTER_CALCULATE.getCode().equals(unionWay); if (systemCompareTemplate && dbTableMap.size() > 0) { - if (rule.getTemplate().getId().longValue() == MUL_SOURCE_ACCURACY_TEMPLATE_ID.longValue()) { + if (QualitisConstants.MULTI_SOURCE_ACCURACY_TEMPLATE_NAME.equals(templateEnName)) { if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isNotEmpty(targetConnect)) { - for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { - Map sourceConnectMap = sourceIterator.next(); - Map targetConnectMap = targetIterator.next(); - String sourceEnvName = (String) sourceConnectMap.get("envName"); - String targetEnvName = (String) targetConnectMap.get("envName"); - if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceAccuracyFromSqlList(midTableAction, dbTableMap, filters + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + Map targetConnectMap = targetIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, sourceConnectMap, targetConnectMap, selectResult)); } - sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, sourceConnectMap, targetConnectMap, selectResult)); } String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isEmpty(targetConnect)) { - for (Iterator> sourceIterator = sourceConnect.iterator(); sourceIterator.hasNext(); ) { - Map sourceConnectMap = sourceIterator.next(); - String sourceEnvName = (String) sourceConnectMap.get("envName"); - if (StringUtils.isEmpty(sourceEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceAccuracyFromSqlList(midTableAction, dbTableMap, filters + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(); sourceIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName, sourceIterator.next(), null, selectResult)); } - sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName, sourceIterator.next(), null, selectResult)); } String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else if (CollectionUtils.isNotEmpty(targetConnect) && CollectionUtils.isEmpty(sourceConnect)) { - for (Iterator> targetIterator = targetConnect.iterator(); targetIterator.hasNext(); ) { - Map targetConnectMap = targetIterator.next(); - String targetEnvName = (String) targetConnectMap.get("envName"); - if (StringUtils.isEmpty(targetEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceAccuracyFromSqlList(midTableAction, dbTableMap, filters + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> targetIterator = targetConnect.iterator(); targetIterator.hasNext(); ) { + Map targetConnectMap = targetIterator.next(); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + targetEnvName, null, targetIterator.next(), selectResult)); } - sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + targetEnvName, null, targetIterator.next(), selectResult)); } String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else { - sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], null, null, selectResult)); - - if (Boolean.TRUE.equals(rule.getTemplate().getSaveMidTable())) { + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceAccuracyFromSqlList(midTableAction, dbTableMap, filters + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + , sourceConnect, targetConnect, selectResult)); + } else { + sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], null, null, selectResult)); + } + if ((StringUtils.isNotEmpty(rule.getAbnormalDatabase())) && ! MID_TABLE_NAME_PATTERN.matcher(midTableName).find()) { sqlList.addAll(getSaveMidTableSentenceSettings()); - sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, midTableReUse)); + sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, runToday, midTableReUse)); } } - } else if (rule.getTemplate().getId().longValue() == MUL_SOURCE_FULL_TEMPLATE_ID.longValue()) { + } else if (QualitisConstants.MULTI_SOURCE_FULL_TEMPLATE_NAME.equals(rule.getTemplate().getEnName())) { sqlList.add("val UUID = java.util.UUID.randomUUID.toString"); // Import sql function. sqlList.addAll(getImportSql()); @@ -799,73 +1245,174 @@ private void handleRuleSelectSql(Rule rule, String midTableName, StringBuilder p } } if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isNotEmpty(targetConnect)) { - for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { - Map sourceConnectMap = sourceIterator.next(); - Map targetConnectMap = targetIterator.next(); - String sourceEnvName = (String) sourceConnectMap.get("envName"); - String targetEnvName = (String) targetConnectMap.get("envName"); - if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getSpecialTransformSqlList(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], partition.toString(), filters, Strings.join(columns, ',') + , sourceConnect, targetConnect, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + Map targetConnectMap = targetIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, partition.toString(), filters, Strings.join(columns, ',') + , sourceConnectMap, targetConnectMap, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); } - sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, partition.toString(), filters, Strings.join(columns, ',') - , sourceIterator.next(), targetIterator.next(), rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); } String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isEmpty(targetConnect)) { - for (Iterator> sourceIterator = sourceConnect.iterator(); sourceIterator.hasNext(); ) { - Map sourceConnectMap = sourceIterator.next(); - String sourceEnvName = (String) sourceConnectMap.get("envName"); - if (StringUtils.isEmpty(sourceEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getSpecialTransformSqlList(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], partition.toString(), filters, Strings.join(columns, ',') + , sourceConnect, null, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(); sourceIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName)) { + continue; + } + sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName, partition.toString(), filters, Strings.join(columns, ',') + , sourceIterator.next(), null, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); } - sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName, partition.toString(), filters, Strings.join(columns, ',') - , sourceIterator.next(), null, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); } String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else if (CollectionUtils.isEmpty(sourceConnect) && CollectionUtils.isNotEmpty(targetConnect)) { - for (Iterator> targetIterator = targetConnect.iterator(); targetIterator.hasNext(); ) { - Map targetConnectMap = targetIterator.next(); - String targetEnvName = (String) targetConnectMap.get("envName"); - if (StringUtils.isEmpty(targetEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getSpecialTransformSqlList(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], partition.toString(), filters, Strings.join(columns, ',') + , null, targetConnect, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); + } else { + for (Iterator> targetIterator = targetConnect.iterator(); targetIterator.hasNext(); ) { + Map targetConnectMap = targetIterator.next(); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + targetEnvName, partition.toString(), filters, Strings.join(columns, ',') + , null, targetIterator.next(), rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); } - sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + targetEnvName, partition.toString(), filters, Strings.join(columns, ',') - , null, targetIterator.next(), rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); } - String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else { sqlList.addAll(getSpecialTransformSql(dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], partition.toString(), filters, Strings.join(columns, ',') - , null, null, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); + , null, null, rule.getContrastType(), leftCols, rightCols, complexCols, selectResult)); + + if (StringUtils.isNotEmpty(midTableName) && ! MID_TABLE_NAME_PATTERN.matcher(midTableName).find()) { + sqlList.addAll(getSaveMidTableSentenceSettings()); + sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, runToday, midTableReUse)); + } + } + } else if (QualitisConstants.MULTI_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(rule.getTemplate().getEnName()) || QualitisConstants.SINGLE_CLUSTER_CUSTOM_TEMPLATE_NAME.equals(rule.getTemplate().getEnName())) { + if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isNotEmpty(targetConnect)) { + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceCustomFromSqlList(midTableAction, dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + Map targetConnectMap = targetIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceCustomFromSql(midTableAction, dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, sourceConnectMap, targetConnectMap, selectResult)); + } + } - if (Boolean.TRUE.equals(rule.getTemplate().getSaveMidTable())) { + String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); + unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); + } else if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isEmpty(targetConnect)) { + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceCustomFromSqlList(midTableAction, dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(); sourceIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceCustomFromSql(midTableAction, dbTableMap + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + , sourceConnectMap, null, selectResult)); + } + } + String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); + unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); + } else if (CollectionUtils.isNotEmpty(targetConnect) && CollectionUtils.isEmpty(sourceConnect)) { + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceCustomFromSqlList(midTableAction, dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> targetIterator = targetConnect.iterator(); targetIterator.hasNext(); ) { + Map targetConnectMap = targetIterator.next(); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceCustomFromSql(midTableAction, dbTableMap + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + targetEnvName + , null, targetConnectMap, selectResult)); + } + } + String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); + unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); + } else { + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceCustomFromSqlList(midTableAction, dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] , sourceConnect, targetConnect, selectResult)); + } else { + sqlList.addAll(getMultiSourceCustomFromSql(midTableAction, dbTableMap, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], null, null, selectResult)); + } + if (StringUtils.isNotEmpty(midTableName) && ! MID_TABLE_NAME_PATTERN.matcher(midTableName).find()) { sqlList.addAll(getSaveMidTableSentenceSettings()); - sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, midTableReUse)); + sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, runToday, midTableReUse)); } } + } else if (QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(rule.getTemplate().getEnName()) || QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(rule.getTemplate().getEnName()) ) { + sqlList.add("// 生成规则 " + partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + " 的校验查询代码"); + if (QualitisConstants.LEFT_INDEX.equals(dataSourceIndex) && CollectionUtils.isNotEmpty(sourceConnect)) { + getTableRowsWithEnvs(partOfVariableName, sourceConnect, midTableAction, sqlList); + } else if (QualitisConstants.RIGHT_INDEX.equals(dataSourceIndex) && CollectionUtils.isNotEmpty(targetConnect)) { + getTableRowsWithEnvs(partOfVariableName, targetConnect, midTableAction, sqlList); + } else { + sqlList.add(getSparkSqlSentence(midTableAction, partOfVariableName, "", "", "", RuleTypeEnum.CUSTOM_RULE.getCode().equals(rule.getRuleType()))); + } + String variableFormer = getVariableNameByRule(partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); + String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); + formatSchema(sqlList, partOfVariableName, variableFormer, variableLatter); } else { if (CollectionUtils.isNotEmpty(sourceConnect) && CollectionUtils.isNotEmpty(targetConnect)) { - for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { - Map sourceConnectMap = sourceIterator.next(); - Map targetConnectMap = targetIterator.next(); - String sourceEnvName = (String) sourceConnectMap.get("envName"); - String targetEnvName = (String) targetConnectMap.get("envName"); - if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { - continue; + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceAccuracyFromSqlList(midTableAction, dbTableMap, filters + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + , sourceConnect, targetConnect, selectResult)); + } else { + for (Iterator> sourceIterator = sourceConnect.iterator(), targetIterator = targetConnect.iterator(); sourceIterator.hasNext() && targetIterator.hasNext(); ) { + Map sourceConnectMap = sourceIterator.next(); + Map targetConnectMap = targetIterator.next(); + String sourceEnvName = (String) sourceConnectMap.get("envName"); + String targetEnvName = (String) targetConnectMap.get("envName"); + if (StringUtils.isEmpty(sourceEnvName) || StringUtils.isEmpty(targetEnvName)) { + continue; + } + sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, sourceConnectMap, targetConnectMap, selectResult)); } - sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + sourceEnvName + targetEnvName, sourceIterator.next(), targetIterator.next(), selectResult)); } String lastVariable = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + "Last"); unionAllSaveResult(lastVariable, selectResult, sqlList, unionAllForSaveResult); } else { - sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], null, null, selectResult)); - - if (Boolean.TRUE.equals(rule.getTemplate().getSaveMidTable())) { + if (UnionWayEnum.CALCULATE_AFTER_COLLECT.getCode().equals(unionWay)) { + sqlList.addAll(getMultiSourceAccuracyFromSqlList(midTableAction, dbTableMap, filters + , partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + , null, null, selectResult)); + } else { + sqlList.addAll(getMultiSourceAccuracyfromSql(midTableAction, dbTableMap, filters, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], null, null, selectResult)); + } + if (StringUtils.isNotEmpty(midTableName) && ! MID_TABLE_NAME_PATTERN.matcher(midTableName).find()) { sqlList.addAll(getSaveMidTableSentenceSettings()); - sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, midTableReUse)); + sqlList.addAll(getSaveMidTableSentence(midTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, runToday, midTableReUse)); } } } @@ -904,8 +1451,47 @@ private void handleRuleSelectSql(Rule rule, String midTableName, StringBuilder p } else { decryptedMysqlInfo = decryptMysqlInfo(dataSourceMysqlConnect.get(ruleDataSource.getId())); } - sqlList.addAll(generateSparkSqlAndSaveSentence(midTableAction, midTableName, rule, partOfVariableName, decryptedMysqlInfo, runDate, selectResult, midTableReUse, unionAllForSaveResult, filterFields.toString(), tableEnvs, shareConnect, shareFromPart)); + sqlList.addAll(generateSparkSqlAndSaveSentence(midTableAction, midTableName, rule, partOfVariableName, decryptedMysqlInfo, runDate, runToday, selectResult, midTableReUse, unionAllForSaveResult, filterFields.toString(), tableEnvs, shareConnect, shareFromPart, execParams)); + } + } + + private void getTableRowsWithEnvs(String partOfVariableName, List> connect, String midTableAction, List sqlList) { + List varList = new ArrayList<>(connect.size()); + String prefix = getVariableNameByRule(partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); + for (Iterator> connectIterator = connect.iterator(); connectIterator.hasNext(); ) { + Map connectMap = connectIterator.next(); + String envName = (String) connectMap.get("envName"); + if (StringUtils.isEmpty(envName)) { + continue; + } + + String host = (String) connectMap.get("host"); + String port = (String) connectMap.get("port"); + String user = (String) connectMap.get("username"); + String pwd = (String) connectMap.get("password"); + String dataType = (String) connectMap.get("dataType"); + String var = prefix + "_" + envName; + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, midTableAction).replace(VARIABLE_NAME_PLACEHOLDER, var) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + + sqlList.add(str); + varList.add(var); + } + StringBuilder stringBuilder = new StringBuilder("val " + prefix + " = "); + boolean firstVar = true; + for (String varName : varList) { + if (firstVar) { + stringBuilder.append(varName); + firstVar = false; + } else { + stringBuilder.append(".unionAll(").append(varName).append(")"); + } } + sqlList.add(stringBuilder.toString()); } private String preSelectEnvsSql(RuleDataSource ruleDataSource, List parsedRuleDataSource, List> tableEnvs, Map>> dataSourceMysqlConnect @@ -984,6 +1570,7 @@ private List> decryptMysqlInfo(List> con currentConnectParams.put("password", password); } else { currentConnectParams.put("password", CryptoUtils.decode(password)); + currentConnectParams.put("needDecode", "false"); } connParamMapsReal.add(currentConnectParams); } @@ -993,31 +1580,31 @@ private List> decryptMysqlInfo(List> con } private Map getUserNameAndPassword(Map connectParams) throws UnExpectedRequestException { -// String appId = (String) connectParams.get("appid"); -// String objectId = (String) connectParams.get("objectid"); -// String timestamp = (String) connectParams.get("timestamp"); -// -// String dk = (String) connectParams.get("dk"); -// String datasourceInf = LocalNetwork.getNetCardName(); -// AccountInfoObtainer obtainer = new AccountInfoObtainer(dpmConfig.getDatasourceServer(), dpmConfig.getDatasourcePort(), datasourceInf); -// obtainer.init(); -// try { -// AccountInfoSys accountInfoSys = obtainer.getAccountInfo_system(dk, timestamp, dpmConfig.getDatasourceSystemAppId() -// , dpmConfig.getDatasourceSystemAppKey(), appId, objectId); -// String userName = accountInfoSys.getName(); -// String passwordTest = accountInfoSys.getPassword(); -// -// connectParams.put("username", userName); -// connectParams.put("password", passwordTest); -// } catch (AccountInfoObtainException e) { -// LOGGER.error(e.getMessage(), e); -// throw new UnExpectedRequestException("{&FAILED_TO_GET_USERNAME_PASSWORD}", 500); -// } + String appId = (String) connectParams.get("appid"); + String objectId = (String) connectParams.get("objectid"); + String timestamp = (String) connectParams.get("timestamp"); + + String dk = (String) connectParams.get("dk"); + String datasourceInf = LocalNetwork.getNetCardName(); + AccountInfoObtainer obtainer = new AccountInfoObtainer(dpmConfig.getDatasourceServer(), dpmConfig.getDatasourcePort(), datasourceInf, false); + obtainer.init(); + try { + AccountInfoSys accountInfoSys = obtainer.getAccountInfo_system(dk, timestamp, dpmConfig.getDatasourceSystemAppId() + , dpmConfig.getDatasourceSystemAppKey(), appId, objectId, "fakeIp", 0); + String userName = accountInfoSys.getName(); + String passwordTest = accountInfoSys.getPassword(); + + connectParams.put("username", userName); + connectParams.put("password", passwordTest); + } catch (Exception e) { + LOGGER.error(e.getMessage(), e); + throw new UnExpectedRequestException("{&FAILED_TO_GET_USERNAME_PASSWORD}", 500); + } return connectParams; } private String customMidTableActionUpdate(Rule rule, String templateMidTableAction, Date date, Map execParams, - StringBuilder partition, Map ruleMetricMap) throws UnExpectedRequestException { + StringBuilder partition, Map ruleMetricMap, String runDate, String runToday, String engineType) throws UnExpectedRequestException { if (StringUtils.isNotBlank(rule.getCsId())) { templateMidTableAction = templateMidTableAction.replace(RuleConstraintEnum.CUSTOM_DATABASE_PREFIS.getValue().concat(SpecCharEnum.PERIOD.getValue()), ""); } @@ -1031,6 +1618,15 @@ private String customMidTableActionUpdate(Rule rule, String templateMidTableActi String value = entry.getValue(); templateMidTableAction = templateMidTableAction.replace("${" + key + "}", value); } + if (StringUtils.isNotBlank(runDate)) { + templateMidTableAction = templateMidTableAction.replace("${run_date}", runDate); +// templateMidTableAction = templateMidTableAction.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotBlank(runToday)) { + templateMidTableAction = templateMidTableAction.replace("${run_today}", runToday); +// templateMidTableAction = templateMidTableAction.replace("${run_today_std}", runToday); + } + templateMidTableAction = DateExprReplaceUtil.replaceRunDate(date, templateMidTableAction); Set ruleMetricNames = ruleMetricMap.keySet(); @@ -1038,75 +1634,339 @@ private String customMidTableActionUpdate(Rule rule, String templateMidTableActi String cleanRuleMetricName = ruleMetricName.replace("-", "_"); templateMidTableAction = templateMidTableAction.replace(ruleMetricName, cleanRuleMetricName); } - + templateMidTableAction = MyStringEscaper.escapeStringForQuotes(templateMidTableAction); + if (EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType)) { + Set ruleDataSourceSet = rule.getRuleDataSources(); + if (CollectionUtils.isNotEmpty(ruleDataSourceSet)) { + for (RuleDataSource ruleDataSource : ruleDataSourceSet) { + if (StringUtils.isNotBlank(ruleDataSource.getDbName()) && StringUtils.isNotBlank(ruleDataSource.getTableName())) { + templateMidTableAction = templateMidTableAction.replace(SpecCharEnum.EMPTY.getValue() + ruleDataSource.getDbName() + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + ruleDataSource.getTableName() + , SpecCharEnum.EMPTY.getValue() + "hive." + ruleDataSource.getDbName() + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + ruleDataSource.getTableName()); + } + } + } + } return templateMidTableAction; } - private String getMultiDatasourceFiltesAndUpdateMidTableAction(Rule rule, String templateMidTableAction, Date date, Map filters) throws UnExpectedRequestException { + private String getMultiDatasourceFiltesAndUpdateMidTableAction(Rule rule, String templateMidTableAction, Date date, Map filters, List leftCols, List rightCols, List complexCols, Integer datasourceIndex, String runDate, String runToday, boolean trino) throws UnExpectedRequestException { Set ruleDataSources = rule.getRuleDataSources(); + if (datasourceIndex != null) { + return handleWithAcrossClusterDatasource(templateMidTableAction, rule.getRuleDataSources(), date, datasourceIndex, runDate, runToday); + } for (RuleDataSource ruleDataSource : ruleDataSources) { + String filter = ruleDataSource.getFilter(); + if (StringUtils.isNotEmpty(filter) && StringUtils.isNotBlank(runDate)) { + filter = filter.replace("${run_date}", runDate); +// filter = filter.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotEmpty(filter) && StringUtils.isNotBlank(runToday)) { + filter = filter.replace("${run_today}", runToday); +// filter = filter.replace("${run_today_std}", runToday); + } + if (ruleDataSource.getDatasourceIndex().equals(0)) { - String leftFilter = ruleDataSource.getFilter(); - leftFilter = DateExprReplaceUtil.replaceFilter(date, leftFilter); - templateMidTableAction = templateMidTableAction.replace(FILTER_LEFT_PLACEHOLDER, leftFilter); - filters.put("left_table", leftFilter); + filter = DateExprReplaceUtil.replaceFilter(date, filter); + templateMidTableAction = templateMidTableAction.replace(FILTER_LEFT_PLACEHOLDER, filter); + filters.put("left_table", filter); } else { - String rightFilter = ruleDataSource.getFilter(); - rightFilter = DateExprReplaceUtil.replaceFilter(date, rightFilter); - templateMidTableAction = templateMidTableAction.replace(FILTER_RIGHT_PLACEHOLDER, rightFilter); - filters.put("right_table", rightFilter); + filter = DateExprReplaceUtil.replaceFilter(date, filter); + templateMidTableAction = templateMidTableAction.replace(FILTER_RIGHT_PLACEHOLDER, filter); + filters.put("right_table", filter); } } - if (rule.getTemplate().getId().longValue() != MUL_SOURCE_ACCURACY_TEMPLATE_ID.longValue()) { - return templateMidTableAction; - } - List ruleDataSourceMappings = rule.getRuleDataSourceMappings().stream() - .filter(ruleDataSourceMapping -> ruleDataSourceMapping.getMappingType() != null && ruleDataSourceMapping.getMappingType().equals(MappingTypeEnum.MATCHING_FIELDS.getCode())).collect(Collectors.toList()); - - if (CollectionUtils.isNotEmpty(ruleDataSourceMappings)) { - StringBuilder compareColumns = new StringBuilder(); - int indexCol = 1; - for (RuleDataSourceMapping ruleDataSourceMapping : ruleDataSourceMappings) { - compareColumns.append(ruleDataSourceMapping.getLeftStatement()).append(" AS ").append("col" + indexCol).append(", "); - indexCol++; - compareColumns.append(ruleDataSourceMapping.getRightStatement()).append(" AS ").append("col" + indexCol).append(", "); - indexCol++; - } - int index = templateMidTableAction.indexOf("CASE WHEN"); + StringBuilder leftConcat = new StringBuilder(); + List> leftConcatList = new ArrayList<>(); + for (String col : leftCols) { + if (trino) { + if (CollectionUtils.isEmpty(leftConcatList)) { + List leftSubConcatList = new ArrayList<>(); + leftSubConcatList.add("coalesce(cast(" + col + " as varchar), '')"); + leftConcatList.add(leftSubConcatList); + continue; + } - templateMidTableAction = new StringBuffer(templateMidTableAction).insert(index, compareColumns.toString()).toString(); - } - return templateMidTableAction; - } + if (leftConcatList.get(leftConcatList.size() - 1).size() < trinoColumnSize) { + leftConcatList.get(leftConcatList.size() - 1).add("coalesce(cast(" + col + " as varchar), '')"); + } else { + List leftSubConcatList = new ArrayList<>(); + leftSubConcatList.add("coalesce(cast(" + col + " as varchar), '')"); + leftConcatList.add(leftSubConcatList); + } - private String fillPartitionWithRuleConfiguration(StringBuilder partition, Rule rule, String templateMidTableAction, List inputMetaRuleVariables) { - if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.SINGLE_SOURCE_TEMPLATE.getCode())) { - partition.append(new ArrayList<>(rule.getRuleDataSources()).get(0).getFilter()); - } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.CUSTOM.getCode())) { - // Replace placeholder. - if (StringUtils.isNotEmpty(rule.getWhereContent())) { - partition.append(rule.getWhereContent()); + continue; } - } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode())) { - // Replace placeholder. - List filterVariable = inputMetaRuleVariables.stream().filter( - r -> r.getTemplateMidTableInputMeta().getInputType().equals(TemplateInputTypeEnum.COMPARISON_RESULTS_FOR_FILTER.getCode()) - ).collect(Collectors.toList()); - if (CollectionUtils.isNotEmpty(filterVariable)) { - partition.append(filterVariable.iterator().next().getValue()); + if (Boolean.TRUE.equals(taskDataSourceConfig.getHiveSortUdfOpen()) && CollectionUtils.isNotEmpty(complexCols) && complexCols.contains(col)) { + leftConcat.append("nvl(").append(taskDataSourceConfig.getHiveSortUdf()).append("(").append(col).append("),''),"); + continue; } + leftConcat.append("nvl(cast(").append(col).append(" as string),''),"); } - return templateMidTableAction; - } - + if (StringUtils.isNotBlank(leftConcat.toString())) { + leftConcat.deleteCharAt(leftConcat.length() - 1); + } + StringBuilder leftTrinoConcat = new StringBuilder(); + if (CollectionUtils.isNotEmpty(leftConcatList)) { + if (leftConcatList.size() == 1) { + leftTrinoConcat.append("md5(cast(concat("); + leftTrinoConcat.append(StringUtils.join(leftConcatList.iterator().next(), SpecCharEnum.COMMA.getValue())); + leftTrinoConcat.append(") as varbinary)) as line_md5"); + } else { + leftTrinoConcat.append("concat("); + for (List subLeftTrinoConcat : leftConcatList) { + StringBuilder innerLeftTrinoConcat = new StringBuilder(); + if (subLeftTrinoConcat.size() == 1) { + innerLeftTrinoConcat.append("md5(cast(").append(StringUtils.join(subLeftTrinoConcat, SpecCharEnum.COMMA.getValue())).append(" as varbinary))").append(SpecCharEnum.COMMA.getValue()); + } else { + innerLeftTrinoConcat.append("md5(cast(concat(").append(StringUtils.join(subLeftTrinoConcat, SpecCharEnum.COMMA.getValue())).append(") as varbinary))").append(SpecCharEnum.COMMA.getValue()); + } + leftTrinoConcat.append(innerLeftTrinoConcat.toString()); + } + leftTrinoConcat.deleteCharAt(leftTrinoConcat.length() - 1); + leftTrinoConcat.append(") as line_md5"); + } + } + + StringBuilder rightConcat = new StringBuilder(); + List> rightConcatList = new ArrayList<>(); + for (String col : rightCols) { + if (trino) { + if (CollectionUtils.isEmpty(rightConcatList)) { + List rightSubConcatList = new ArrayList<>(); + rightSubConcatList.add("coalesce(cast(" + col + " as varchar), '')"); + rightConcatList.add(rightSubConcatList); + continue; + } + + if (rightConcatList.get(rightConcatList.size() - 1).size() < trinoColumnSize) { + rightConcatList.get(rightConcatList.size() - 1).add("coalesce(cast(" + col + " as varchar), '')"); + } else { + List rightSubConcatList = new ArrayList<>(); + rightSubConcatList.add("coalesce(cast(" + col + " as varchar), '')"); + rightConcatList.add(rightSubConcatList); + } + continue; + } + if (Boolean.TRUE.equals(taskDataSourceConfig.getHiveSortUdfOpen()) && CollectionUtils.isNotEmpty(complexCols) && complexCols.contains(col)) { + rightConcat.append("nvl(").append(taskDataSourceConfig.getHiveSortUdf()).append("(").append(col).append("),''),"); + continue; + } + rightConcat.append("nvl(cast(").append(col).append(" as string),''),"); + } + if (StringUtils.isNotBlank(rightConcat.toString())) { + rightConcat.deleteCharAt(rightConcat.length() - 1); + } + StringBuilder rightTrinoConcat = new StringBuilder(); + if (CollectionUtils.isNotEmpty(rightConcatList)) { + if (rightConcatList.size() == 1) { + rightTrinoConcat.append("md5(cast(concat("); + rightTrinoConcat.append(StringUtils.join(rightConcatList.iterator().next(), SpecCharEnum.COMMA.getValue())); + rightTrinoConcat.append(") as varbinary)) as line_md5"); + } else { + rightTrinoConcat.append("concat("); + for (List subrightTrinoConcat : rightConcatList) { + StringBuilder innerrightTrinoConcat = new StringBuilder(); + if (subrightTrinoConcat.size() == 1) { + innerrightTrinoConcat.append("md5(cast(").append(StringUtils.join(subrightTrinoConcat, SpecCharEnum.COMMA.getValue())).append(" as varbinary))").append(SpecCharEnum.COMMA.getValue()); + } else { + innerrightTrinoConcat.append("md5(cast(concat(").append(StringUtils.join(subrightTrinoConcat, SpecCharEnum.COMMA.getValue())).append(") as varbinary))").append(SpecCharEnum.COMMA.getValue()); + } + rightTrinoConcat.append(innerrightTrinoConcat.toString()); + } + rightTrinoConcat.deleteCharAt(rightTrinoConcat.length() - 1); + rightTrinoConcat.append(") as line_md5"); + } + } + + templateMidTableAction = templateMidTableAction.replace("${left_columns}", StringUtils.isNotBlank(leftTrinoConcat.toString()) ? leftTrinoConcat.toString() : leftConcat.toString()); + templateMidTableAction = templateMidTableAction.replace("${right_columns}", StringUtils.isNotBlank(rightTrinoConcat.toString()) ? rightTrinoConcat.toString() : rightConcat.toString()); + if (QualitisConstants.MULTI_SOURCE_FULL_TEMPLATE_NAME.equals(rule.getTemplate().getEnName()) && RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode().equals(rule.getTemplate().getTemplateType())) { + return templateMidTableAction; + } + if (CollectionUtils.isEmpty(rule.getRuleDataSourceMappings())) { + return templateMidTableAction; + } + List ruleDataSourceMappings = rule.getRuleDataSourceMappings().stream() + .filter(ruleDataSourceMapping -> ruleDataSourceMapping.getMappingType() != null && ruleDataSourceMapping.getMappingType().equals(MappingTypeEnum.MATCHING_FIELDS.getCode())).collect(Collectors.toList()); + + if (CollectionUtils.isNotEmpty(ruleDataSourceMappings)) { + StringBuilder compareColumns = new StringBuilder(); + int indexCol = 1; + for (RuleDataSourceMapping ruleDataSourceMapping : ruleDataSourceMappings) { + if (StringUtils.isNotEmpty(ruleDataSourceMapping.getLeftStatement())) { + compareColumns.append(ruleDataSourceMapping.getLeftStatement()).append(" AS ").append("col" + indexCol).append(", "); + indexCol++; + } + if (StringUtils.isNotEmpty(ruleDataSourceMapping.getRightStatement())) { + compareColumns.append(ruleDataSourceMapping.getRightStatement()).append(" AS ").append("col" + indexCol).append(", "); + indexCol++; + } + } + + int index = templateMidTableAction.indexOf("CASE WHEN"); + + templateMidTableAction = new StringBuffer(templateMidTableAction).insert(index, compareColumns.toString()).toString(); + } + return templateMidTableAction; + } + + private static String handleWithAcrossClusterDatasource(String templateMidTableAction, Set ruleDataSources, Date date, + Integer datasourceIndex, String runDate, String runToday) throws UnExpectedRequestException { + for (RuleDataSource ruleDataSource : ruleDataSources) { + String filter = ruleDataSource.getFilter(); + if (datasourceIndex.equals(ruleDataSource.getDatasourceIndex()) && StringUtils.isNotBlank(filter)) { + if (StringUtils.isNotBlank(runDate)) { + filter = filter.replace("${run_date}", runDate); +// filter = filter.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotBlank(runToday)) { + filter = filter.replace("${run_today}", runToday); +// filter = filter.replace("${run_today_std}", runToday); + } + + filter = DateExprReplaceUtil.replaceFilter(date, filter); + templateMidTableAction = templateMidTableAction.replace(FILTER_PLACEHOLDER, filter); + if (StringUtils.isNotEmpty(ruleDataSource.getDbName())) { + templateMidTableAction = templateMidTableAction.replace(DATABASE_PLACEHOLDER, ruleDataSource.getDbName()); + } else { + templateMidTableAction = templateMidTableAction.replace(DATABASE_PLACEHOLDER + SpecCharEnum.PERIOD_NO_ESCAPE.getValue(), ""); + } + if (StringUtils.isNotEmpty(ruleDataSource.getTableName())) { + templateMidTableAction = templateMidTableAction.replace(TABLE_PLACEHOLDER, ruleDataSource.getTableName()); + } + return templateMidTableAction; + } + } + return templateMidTableAction; + } + + private String fillPartitionWithRuleConfiguration(StringBuilder partition, Rule rule, String templateMidTableAction, List inputMetaRuleVariables) { + if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.SINGLE_SOURCE_TEMPLATE.getCode())) { + partition.append(new ArrayList<>(rule.getRuleDataSources()).get(0).getFilter()); + } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.CUSTOM.getCode())) { + // Replace placeholder. + if (StringUtils.isNotEmpty(rule.getWhereContent())) { + partition.append(rule.getWhereContent()); + } + } else if (rule.getTemplate().getTemplateType().equals(RuleTemplateTypeEnum.MULTI_SOURCE_TEMPLATE.getCode())) { + // Replace placeholder. + List filterVariable = inputMetaRuleVariables.stream().filter( + r -> r.getTemplateMidTableInputMeta().getInputType().equals(TemplateInputTypeEnum.COMPARISON_RESULTS_FOR_FILTER.getCode()) + ).collect(Collectors.toList()); + if (CollectionUtils.isNotEmpty(filterVariable)) { + partition.append(filterVariable.iterator().next().getValue()); + } + } + return templateMidTableAction; + } + + private List getMultiSourceAccuracyFromSqlList(String midTableAction, Map dbTableMap, Map filters, String partOfOriginVariableName + , List> sourceConnects, List> targetConnects, Map selectResult) { + // sql + List transformSql = new ArrayList<>(); + StringBuilder envName = new StringBuilder(); + List sourceSqlVariableNameList = new ArrayList<>(); + StringBuilder sourceSql = new StringBuilder(); + sourceSql.append("select *").append(" from ") + .append(dbTableMap.get("left_database")).append(dbTableMap.get("left_table")) + .append(" where ").append(filters.get("left_table")); + StringBuilder targetSql = new StringBuilder(); + targetSql.append("select *").append(" from ") + .append(dbTableMap.get("right_database")).append(dbTableMap.get("right_table")) + .append(" where ").append(filters.get("right_table")); + +// connect to databases with envs' connection info: spark.read.format("jdbc").option("driver","com.mysql.jdbc.Driver").option + if (CollectionUtils.isNotEmpty(sourceConnects)) { + for (Map sourceConnect: sourceConnects) { + String sourceEnvName = (String) sourceConnect.get("envName"); + String partOfVariableName = partOfOriginVariableName + sourceEnvName; + String host = (String) sourceConnect.get("host"); + String port = (String) sourceConnect.get("port"); + String user = (String) sourceConnect.get("username"); + String pwd = (String) sourceConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + envName.append("[").append((String) sourceConnect.get("envName")).append("]"); + sourceSqlVariableNameList.add("originalDFLeft_" + partOfVariableName); + } + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfOriginVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString())); + sourceSqlVariableNameList.add("originalDFLeft_" + partOfOriginVariableName); + } +// merge multi-environment with unionAll + String sourceCollectSqlName = "originalDFLeft_" + partOfOriginVariableName; + StringBuilder sourceCollectSql = new StringBuilder("val " + sourceCollectSqlName + " = "); + sourceCollectSql.append(sourceSqlVariableNameList.get(0)); + for (int i = 1; i < sourceSqlVariableNameList.size(); i++) { + sourceCollectSql.append(".union(" + sourceSqlVariableNameList.get(i) + ")"); + } + transformSql.add(sourceCollectSql.toString()); +// register to a temporary table with multi-environment + transformSql.add(sourceCollectSqlName + ".registerTempTable(\"tmp1_" + partOfOriginVariableName + dbTableMap.get("left_table") + "\")"); + + List targetSqlVariableNameList = new ArrayList<>(); + if (CollectionUtils.isNotEmpty(targetConnects)) { + for (Map targetConnect : targetConnects) { + String targetEnvName = (String) targetConnect.get("envName"); + String partOfVariableName = partOfOriginVariableName + targetEnvName; + String host = (String) targetConnect.get("host"); + String port = (String) targetConnect.get("port"); + String user = (String) targetConnect.get("username"); + String pwd = (String) targetConnect.get("password"); + String dataType = (String) targetConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + envName.append("[").append((String) targetConnect.get("envName")).append("]"); + targetSqlVariableNameList.add("originalDFRight_" + partOfVariableName); + } + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfOriginVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString())); + targetSqlVariableNameList.add("originalDFRight_" + partOfOriginVariableName); + } +// merge multi-environment + String targetCollectSqlName = "originalDFRight_" + partOfOriginVariableName; + StringBuilder targetCollectSql = new StringBuilder("val " + targetCollectSqlName + " = "); + targetCollectSql.append(targetSqlVariableNameList.get(0)); + for (int i = 1; i < targetSqlVariableNameList.size(); i++) { + targetCollectSql.append(".union(" + targetSqlVariableNameList.get(i) + ")"); + } + transformSql.add(targetCollectSql.toString()); +// register to temporary table + transformSql.add(targetCollectSqlName + ".registerTempTable(\"tmp2_" + partOfOriginVariableName + dbTableMap.get("right_table") + "\")"); + +// compare left database and right databases' result with their temporary table + String commonJoin = midTableAction + .replace(dbTableMap.get("left_database") + dbTableMap.get("left_table") + " ", "tmp1_" + partOfOriginVariableName + dbTableMap.get("left_table") + " ") + .replace(dbTableMap.get("right_database") + dbTableMap.get("right_table") + " ", "tmp2_" + partOfOriginVariableName + dbTableMap.get("right_table") + " "); + String variableFormer = getVariableNameByRule(OptTypeEnum.ORIGINAL_STATISTIC_DF.getMessage(), partOfOriginVariableName); + String joinSql = "val " + variableFormer + " = spark.sql(\"" + commonJoin + "\")"; + transformSql.add(joinSql); + String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfOriginVariableName); + // Select compare_result = 0 + transformSql.add("val " + variableLatter + " = " + variableFormer + ".where(" + variableFormer + "(\"compare_result\") === 0)"); + selectResult.put(variableLatter, envName.toString()); + + return transformSql; + } + private List getMultiSourceAccuracyfromSql(String midTableAction, Map dbTableMap, Map filters, String partOfVariableName , Map sourceConnect, Map targetConnect, Map selectResult) { // Solve partition, value, hash value List transformSql = new ArrayList<>(); StringBuilder sourceSql = new StringBuilder(); StringBuilder targetSql = new StringBuilder(); - StringBuilder envName = new StringBuilder(); sourceSql.append("select *").append(" from ") @@ -1165,6 +2025,354 @@ private List getMultiSourceAccuracyfromSql(String midTableAction, Map getMultiSourceCustomFromSqlList(String midTableAction, Map dbTableMap, String partOfOriginVariableName + , List> sourceConnects, List> targetConnects, Map selectResult) { + List transformSql = new ArrayList<>(); + StringBuilder sourceSql = new StringBuilder(); + StringBuilder targetSql = new StringBuilder(); + StringBuilder envName = new StringBuilder(); + List sourceSqlVariableNameList = new ArrayList<>(); + + String leftCollectSql = dbTableMap.get("left_collect_sql"); + String rightCollectSql = dbTableMap.get("right_collect_sql"); + + sourceSql.append(leftCollectSql); + targetSql.append(rightCollectSql); + +// spark.read.format("jdbc").option("driver","com.mysql.jdbc.Driver").option + if (CollectionUtils.isNotEmpty(sourceConnects)) { + for (Map sourceConnect: sourceConnects) { + String sourceEnvName = (String) sourceConnect.get("envName"); + String partOfVariableName = partOfOriginVariableName + sourceEnvName; + String host = (String) sourceConnect.get("host"); + String port = (String) sourceConnect.get("port"); + String user = (String) sourceConnect.get("username"); + String pwd = (String) sourceConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + envName.append("[").append((String) sourceConnect.get("envName")).append("]"); + sourceSqlVariableNameList.add("originalDFLeft_" + partOfVariableName); + } + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfOriginVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString())); + sourceSqlVariableNameList.add("originalDFLeft_" + partOfOriginVariableName); + } +// 合并环境 + String sourceCollectSqlName = "originalDFLeft_" + partOfOriginVariableName; + StringBuilder sourceCollectSql = new StringBuilder("val " + sourceCollectSqlName + " = "); + sourceCollectSql.append(sourceSqlVariableNameList.get(0)); + for (int i = 1; i < sourceSqlVariableNameList.size(); i++) { + sourceCollectSql.append(".union(" + sourceSqlVariableNameList.get(i) + ")"); + } + transformSql.add(sourceCollectSql.toString()); +// 注册hive临时表 + transformSql.add(sourceCollectSqlName + ".registerTempTable(\"tmp1_" + partOfOriginVariableName + dbTableMap.get("left_table") + "\")"); + + List targetSqlVariableNameList = new ArrayList<>(); + if (CollectionUtils.isNotEmpty(targetConnects)) { + for (Map targetConnect : targetConnects) { + String targetEnvName = (String) targetConnect.get("envName"); + String partOfVariableName = partOfOriginVariableName + targetEnvName; + String host = (String) targetConnect.get("host"); + String port = (String) targetConnect.get("port"); + String user = (String) targetConnect.get("username"); + String pwd = (String) targetConnect.get("password"); + String dataType = (String) targetConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + envName.append("[").append((String) targetConnect.get("envName")).append("]"); + targetSqlVariableNameList.add("originalDFRight_" + partOfVariableName); + } + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfOriginVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString())); + targetSqlVariableNameList.add("originalDFRight_" + partOfOriginVariableName); + } +// 合并环境 + String targetCollectSqlName = "originalDFRight_" + partOfOriginVariableName; + StringBuilder targetCollectSql = new StringBuilder("val " + targetCollectSqlName + " = "); + targetCollectSql.append(targetSqlVariableNameList.get(0)); + for (int i = 1; i < targetSqlVariableNameList.size(); i++) { + targetCollectSql.append(".union(" + targetSqlVariableNameList.get(i) + ")"); + } + transformSql.add(targetCollectSql.toString()); +// 注册hive临时表 + transformSql.add(targetCollectSqlName + ".registerTempTable(\"tmp2_" + partOfOriginVariableName + dbTableMap.get("right_table") + "\")"); + + String commonJoin = midTableAction + .replaceFirst("\\Q" + dbTableMap.get("left_collect_sql") + "\\E", "tmp1_" + partOfOriginVariableName + dbTableMap.get("left_table")) + .replaceFirst("\\Q" + dbTableMap.get("right_collect_sql") + "\\E", "tmp2_" + partOfOriginVariableName + dbTableMap.get("right_table")); + + String variableFormer = getVariableNameByRule(OptTypeEnum.ORIGINAL_STATISTIC_DF.getMessage(), partOfOriginVariableName); + String joinSql = "val " + variableFormer + " = spark.sql(\"" + commonJoin + "\")"; + transformSql.add(joinSql); + String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfOriginVariableName); + // Select compare_result = 0 + transformSql.add("val " + variableLatter + " = " + variableFormer + ".where(" + variableFormer + "(\"compare_result\") === 0)"); + selectResult.put(variableLatter, envName.toString()); + return transformSql; + } + + /** + * + * @param midTableAction + * @param dbTableMap + * @param partOfVariableName templateEnName + sourceEnvName + targetEnvName + * @param sourceConnect + * @param targetConnect + * @param selectResult + * @return + */ + private List getMultiSourceCustomFromSql(String midTableAction, Map dbTableMap, String partOfVariableName + , Map sourceConnect, Map targetConnect, Map selectResult) { + // Solve partition, value, hash value + List transformSql = new ArrayList<>(); + StringBuilder sourceSql = new StringBuilder(); + StringBuilder targetSql = new StringBuilder(); + StringBuilder envName = new StringBuilder(); + + sourceSql.append(dbTableMap.get("left_collect_sql")); + targetSql.append(dbTableMap.get("right_collect_sql")); + + if (sourceConnect != null && sourceConnect.size() > 0) { + String host = (String) sourceConnect.get("host"); + String port = (String) sourceConnect.get("port"); + String user = (String) sourceConnect.get("username"); + String pwd = (String) sourceConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + envName.append("[").append((String) sourceConnect.get("envName")).append("]"); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString())); + } + if (targetConnect != null && targetConnect.size() > 0) { + String host = (String) targetConnect.get("host"); + String port = (String) targetConnect.get("port"); + String user = (String) targetConnect.get("username"); + String pwd = (String) targetConnect.get("password"); + String dataType = (String) targetConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + envName.append("[").append((String) targetConnect.get("envName")).append("]"); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString())); + } + String tmpTemplateName1 = "tmp1_" + partOfVariableName; + String tmpTemplateName2 = "tmp2_" + partOfVariableName; + + transformSql.add("originalDFLeft_" + partOfVariableName + ".registerTempTable(\"" + tmpTemplateName1 + "\")"); + transformSql.add("originalDFRight_" + partOfVariableName + ".registerTempTable(\"" + tmpTemplateName2 + "\")"); + +// To replace respectively the repeating part between left sql and right sql + String commonJoin = midTableAction + .replaceFirst("\\Q" + dbTableMap.get("left_collect_sql") + "\\E", tmpTemplateName1) + .replaceFirst("\\Q" + dbTableMap.get("right_collect_sql") + "\\E", tmpTemplateName2); + + String variableFormer = getVariableNameByRule(OptTypeEnum.ORIGINAL_STATISTIC_DF.getMessage(), partOfVariableName); + String joinSql = "val " + variableFormer + " = spark.sql(\"" + commonJoin + "\")"; + transformSql.add(joinSql); + String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName); + // Select compare_result = 0 + transformSql.add("val " + variableLatter + " = " + variableFormer + ".where(" + variableFormer + "(\"compare_result\") === 0)"); + selectResult.put(variableLatter, envName.toString()); + return transformSql; + } + + private List getSpecialTransformSqlList(Map dbTableMap, String partOfOriginalVariableName, String filter, Map filters, String columns + , List> sourceConnects, List> targetConnects, Integer contrastType, List leftCols, List rightCols, List complexCols, Map selectResult) { + List transformSql = new ArrayList<>(); + StringBuilder envName = new StringBuilder(); + + StringBuilder tmpRegisterTableLeft = new StringBuilder(); + StringBuilder tmpRegisterTableRight = new StringBuilder(); + tmpRegisterTableLeft.append("md5_table_left_" + partOfOriginalVariableName); + tmpRegisterTableRight.append("md5_table_right_" + partOfOriginalVariableName); + +// sql + if (CollectionUtils.isNotEmpty(sourceConnects)) { + List unionLeftVariableNameList = new ArrayList<>(); + for (Map sourceConnect: sourceConnects) { + StringBuilder sourceSql = new StringBuilder(); + String partOfVariableName = (String) sourceConnect.get("envName"); + if (StringUtils.isNotBlank(columns)) { + sourceSql.append("select ").append(columns); + } else { + if (taskDataSourceConfig.getHiveSortUdfOpen() && CollectionUtils.isNotEmpty(complexCols)) { + List leftColsReal = new ArrayList<>(leftCols.size()); + sourceSql.append("select "); + for (String col : leftCols) { + if (complexCols.contains(col)) { + leftColsReal.add(taskDataSourceConfig.getHiveSortUdf() + "(" + col + ")"); + } else { + leftColsReal.add(col); + } + } + sourceSql.append(String.join(",", leftColsReal)); + } else { + sourceSql.append("select *"); + } + } + sourceSql.append(" from ").append(dbTableMap.get("left_database")).append(dbTableMap.get("left_table")).append(" where ").append(filters.get("left_table")); + if (sourceConnect != null && sourceConnect.size() > 0) { + String host = (String) sourceConnect.get("host"); + String port = (String) sourceConnect.get("port"); + String user = (String) sourceConnect.get("username"); + String pwd = (String) sourceConnect.get("password"); + String dataType = (String) sourceConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + + envName.append("[").append((String) sourceConnect.get("envName")).append("]"); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFLeft_" + partOfVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sourceSql.toString())); + } + +// na.fill + transformSql.add("val fillNullDFLeft_" + partOfVariableName + " = originalDFLeft_" + partOfVariableName + ".na.fill(UUID)"); + transformSql.add("val qualitis_names_left_" + partOfVariableName + " = fillNullDFLeft_" + partOfVariableName + ".schema.fieldNames"); + transformSql.add("val fillNullWithFullLineWithHashDF_left_" + partOfVariableName + " = fillNullDFLeft_" + partOfVariableName + ".withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"qualitis_full_line_hash_value\", md5(to_json(struct($\"*\"))))"); + transformSql.add("val qualitis_names_left_" + partOfVariableName + "_buffer = qualitis_names_left_" + partOfVariableName + ".toBuffer"); + transformSql.add("val finalDF_left_" + partOfVariableName + " = fillNullWithFullLineWithHashDF_left_" + partOfVariableName + ".drop(qualitis_names_left_" + partOfVariableName + ":_*)"); + unionLeftVariableNameList.add("finalDF_left_" + partOfVariableName); + } + String sourceCollectSqlName = "collect_fillNullDFLeft_" + partOfOriginalVariableName; + if (unionLeftVariableNameList.size() > 1) { + StringBuilder sourceCollectSql = new StringBuilder("val " + sourceCollectSqlName + " = "); + sourceCollectSql.append(unionLeftVariableNameList.get(0)); + for (int i = 1; i < unionLeftVariableNameList.size(); i++) { + sourceCollectSql.append(".union(" + unionLeftVariableNameList.get(i) + ")"); + } + transformSql.add(sourceCollectSql.toString()); + } else { + sourceCollectSqlName = unionLeftVariableNameList.get(0); + } + transformSql.add(sourceCollectSqlName + ".registerTempTable(\"" + tmpRegisterTableLeft + "\")"); + } + + if (CollectionUtils.isNotEmpty(targetConnects)) { + List unionRightVariableNameList = new ArrayList<>(); + for (Map targetConnect: targetConnects) { + StringBuilder targetSql = new StringBuilder(); + String partOfVariableName = (String) targetConnect.get("envName"); + if (StringUtils.isNotBlank(columns)) { + targetSql.append("select ").append(columns); + } else { + if (taskDataSourceConfig.getHiveSortUdfOpen() && CollectionUtils.isNotEmpty(complexCols)) { + List rightColsReal = new ArrayList<>(rightCols.size()); + targetSql.append("select "); + for (String col : leftCols) { + if (complexCols.contains(col)) { + rightColsReal.add(taskDataSourceConfig.getHiveSortUdf() + "(" + col + ")"); + } else { + rightColsReal.add(col); + } + } + targetSql.append(String.join(",", rightColsReal)); + } else { + targetSql.append("select *"); + } + } + targetSql.append(" from ").append(dbTableMap.get("right_database")).append(dbTableMap.get("right_table")).append(" where ").append(filters.get("right_table")); + if (targetConnect != null && targetConnect.size() > 0) { + String host = (String) targetConnect.get("host"); + String port = (String) targetConnect.get("port"); + String user = (String) targetConnect.get("username"); + String pwd = (String) targetConnect.get("password"); + String dataType = (String) targetConnect.get("dataType"); + String str = SPARK_MYSQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString()).replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfVariableName) + .replace("${JDBC_DRIVER}", JDBC_DRIVER.get(dataType)) + .replace("${MYSQL_IP}", host) + .replace("${MYSQL_PORT}", port) + .replace("${MYSQL_USER}", user) + .replace("${MYSQL_PASSWORD}", pwd); + transformSql.add(str); + + envName.append("[").append((String) targetConnect.get("envName")).append("]"); + } else { + transformSql.add(SPARK_SQL_TEMPLATE.replace(VARIABLE_NAME_PLACEHOLDER, "originalDFRight_" + partOfVariableName).replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, targetSql.toString())); + } + +// na.fill + transformSql.add("val fillNullDFRight_" + partOfVariableName + " = originalDFRight_" + partOfVariableName + ".na.fill(UUID)"); + transformSql.add("val qualitis_names_right_" + partOfVariableName + " = fillNullDFRight_" + partOfVariableName + ".schema.fieldNames"); + transformSql.add("val fillNullWithFullLineWithHashDF_right_" + partOfVariableName + " = fillNullDFRight_" + partOfVariableName + ".withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"qualitis_full_line_hash_value\", md5(to_json(struct($\"*\"))))"); + transformSql.add("val qualitis_names_right_" + partOfVariableName + "_buffer = qualitis_names_right_" + partOfVariableName + ".toBuffer"); + transformSql.add("val finalDF_right_" + partOfVariableName + " = fillNullWithFullLineWithHashDF_right_" + partOfVariableName + ".drop(qualitis_names_right_" + partOfVariableName + ":_*)"); + unionRightVariableNameList.add("finalDF_right_" + partOfVariableName); + } + + String targetCollectSqlName = "collect_fillNullDFRight_" + partOfOriginalVariableName; + if (unionRightVariableNameList.size() > 1) { + StringBuilder targetCollectSql = new StringBuilder("val " + targetCollectSqlName + " = "); + targetCollectSql.append(unionRightVariableNameList.get(0)); + for (int i = 1; i < unionRightVariableNameList.size(); i++) { + targetCollectSql.append(".union(" + unionRightVariableNameList.get(i) + ")"); + } + transformSql.add(targetCollectSql.toString()); + } else { + targetCollectSqlName = unionRightVariableNameList.get(0); + } + transformSql.add(targetCollectSqlName + ".registerTempTable(\"" + tmpRegisterTableRight + "\")"); + } + + String originalVariableName = getVariableNameByRule(OptTypeEnum.ORIGINAL_STATISTIC_DF.getMessage(), partOfOriginalVariableName); + String joinSql = "val " + originalVariableName + " = spark.sql(\"SELECT qulaitis_left_tmp.qualitis_full_line_hash_value as left_full_hash_line, qulaitis_left_tmp.qualitis_mul_db_accuracy_num as left_full_line_num, qulaitis_right_tmp.qualitis_full_line_hash_value as right_full_hash_line, qulaitis_right_tmp.qualitis_mul_db_accuracy_num as right_full_line_num FROM (SELECT qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM " + tmpRegisterTableLeft.toString() + " WHERE true group by qualitis_full_line_hash_value) qulaitis_left_tmp ${contrast_type} (SELECT qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM " + tmpRegisterTableRight.toString() + " WHERE true group by qualitis_full_line_hash_value) qulaitis_right_tmp ON (qulaitis_left_tmp.qualitis_full_line_hash_value = qulaitis_right_tmp.qualitis_full_line_hash_value AND qulaitis_left_tmp.qualitis_mul_db_accuracy_num = qulaitis_right_tmp.qualitis_mul_db_accuracy_num) WHERE (qulaitis_right_tmp.qualitis_full_line_hash_value is null AND qulaitis_right_tmp.qualitis_mul_db_accuracy_num is null) OR (qulaitis_left_tmp.qualitis_full_line_hash_value is null AND qulaitis_left_tmp.qualitis_mul_db_accuracy_num is null) ${outer_filter}\")"; + joinSql = joinSql.replace("${contrast_type}", ContrastTypeEnum.getJoinType(contrastType)); + if (StringUtils.isNotEmpty(filter)) { + joinSql = joinSql.replace("${outer_filter}", "AND (" + filter + ")"); + } else { + joinSql = joinSql.replace("${outer_filter}", ""); + } + transformSql.add(joinSql); + String statisticVariableName = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfOriginalVariableName); + if (fpsConfig.getLightweightQuery()) { + String leftVariableName = getVariableNameByRule(OptTypeEnum.LEFT_JOIN_STATISTIC_DF.getMessage(), partOfOriginalVariableName); + String rightVariableName = getVariableNameByRule(OptTypeEnum.RIGHT_JOIN_STATISTIC_DF.getMessage(), partOfOriginalVariableName); + + transformSql.add(originalVariableName + ".registerTempTable(\"md5_table_total_" + partOfOriginalVariableName + "\")"); + String joinSqlWithLeft = "val " + leftVariableName + " = spark.sql(\"\"\"SELECT \"left\" as source, " + tmpRegisterTableLeft.toString() + ".qualitis_full_line_value as full_line, md5_table_total_" + partOfOriginalVariableName + ".left_full_line_num FROM " + tmpRegisterTableLeft.toString() + " full outer join md5_table_total_" + partOfOriginalVariableName + " on " + tmpRegisterTableLeft.toString() + ".qualitis_full_line_hash_value = md5_table_total_" + partOfOriginalVariableName + ".left_full_hash_line where " + tmpRegisterTableLeft.toString() + ".qualitis_full_line_hash_value is not null and md5_table_total_" + partOfOriginalVariableName + ".left_full_hash_line is not null\"\"\")"; + String joinSqlWithRight = "val " + rightVariableName + " = spark.sql(\"\"\"SELECT \"right\" as source, " + tmpRegisterTableRight.toString() + ".qualitis_full_line_value as full_line, md5_table_total_" + partOfOriginalVariableName + ".right_full_line_num FROM " + tmpRegisterTableRight.toString() + " full outer join md5_table_total_" + partOfOriginalVariableName + " on " + tmpRegisterTableRight.toString() + ".qualitis_full_line_hash_value = md5_table_total_" + partOfOriginalVariableName + ".right_full_hash_line where " + tmpRegisterTableRight.toString() + ".qualitis_full_line_hash_value is not null and md5_table_total_" + partOfOriginalVariableName + ".right_full_hash_line is not null\"\"\")"; + + transformSql.add(joinSqlWithLeft); + transformSql.add(joinSqlWithRight); + + transformSql.add("val " + statisticVariableName + " = " + leftVariableName + ".union(" + rightVariableName + ")"); + } else { + transformSql.add("val " + statisticVariableName + " = " + originalVariableName); + } + if (StringUtils.isNotEmpty(envName)) { + selectResult.put(statisticVariableName, envName.toString()); + } + + return transformSql; + } + private List getSpecialTransformSql(Map dbTableMap, String partOfVariableName, String filter, Map filters, String columns , Map sourceConnect, Map targetConnect, Integer contrastType, List leftCols, List rightCols, List complexCols, Map selectResult) { @@ -1177,7 +2385,7 @@ private List getSpecialTransformSql(Map dbTableMap, Stri // Full line to MD5 with dataframe api transformation. StringBuilder tmpRegisterTableLeft = new StringBuilder(); StringBuilder tmpRegisterTableRight = new StringBuilder(); - fuleLineToHashLine(transformSql, partOfVariableName, tmpRegisterTableLeft, tmpRegisterTableRight); + fullLineToHashLine(transformSql, partOfVariableName, tmpRegisterTableLeft, tmpRegisterTableRight); String originalVariableName = getVariableNameByRule(OptTypeEnum.ORIGINAL_STATISTIC_DF.getMessage(), partOfVariableName); String joinSql = "val " + originalVariableName + " = spark.sql(\"SELECT qulaitis_left_tmp.qualitis_full_line_hash_value as left_full_hash_line, qulaitis_left_tmp.qualitis_mul_db_accuracy_num as left_full_line_num, qulaitis_right_tmp.qualitis_full_line_hash_value as right_full_hash_line, qulaitis_right_tmp.qualitis_mul_db_accuracy_num as right_full_line_num FROM (SELECT qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM " + tmpRegisterTableLeft.toString() + " WHERE true group by qualitis_full_line_hash_value) qulaitis_left_tmp ${contrast_type} (SELECT qualitis_full_line_hash_value, count(1) as qualitis_mul_db_accuracy_num FROM " + tmpRegisterTableRight.toString() + " WHERE true group by qualitis_full_line_hash_value) qulaitis_right_tmp ON (qulaitis_left_tmp.qualitis_full_line_hash_value = qulaitis_right_tmp.qualitis_full_line_hash_value AND qulaitis_left_tmp.qualitis_mul_db_accuracy_num = qulaitis_right_tmp.qualitis_mul_db_accuracy_num) WHERE (qulaitis_right_tmp.qualitis_full_line_hash_value is null AND qulaitis_right_tmp.qualitis_mul_db_accuracy_num is null) OR (qulaitis_left_tmp.qualitis_full_line_hash_value is null AND qulaitis_left_tmp.qualitis_mul_db_accuracy_num is null) ${outer_filter}\")"; joinSql = joinSql.replace("${contrast_type}", ContrastTypeEnum.getJoinType(contrastType)); @@ -1283,7 +2491,7 @@ private void handleSourceAndTargetSql(Map dbTableMap, Map transformSql, String partOfVariableName, StringBuilder tmpRegisterTableLeft, StringBuilder tmpRegisterTableRight) { + private void fullLineToHashLine(List transformSql, String partOfVariableName, StringBuilder tmpRegisterTableLeft, StringBuilder tmpRegisterTableRight) { transformSql.add("val fillNullDFLeft_" + partOfVariableName + " = originalDFLeft_" + partOfVariableName + ".na.fill(UUID)"); transformSql.add("val qualitis_names_left_" + partOfVariableName + " = fillNullDFLeft_" + partOfVariableName + ".schema.fieldNames"); transformSql.add("val fillNullWithFullLineWithHashDF_left_" + partOfVariableName + " = fillNullDFLeft_" + partOfVariableName + ".withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"qualitis_full_line_hash_value\", md5(to_json(struct($\"*\"))))"); @@ -1311,11 +2519,379 @@ private List getImportSql() { } private List saveStatisticAndSaveMySqlSentence(String workFlowVersion, Long ruleId, Map ruleMetricIds - , Set templateStatisticsInputMetas, String applicationId, List ruleVariables, String createTime - , String partOfVariableName, String runDate, String user, StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, - Map selectResult, boolean unionAllForSaveResult) throws RuleVariableNotSupportException, RuleVariableNotFoundException { - return abstractTranslator.persistenceTranslate(workFlowVersion, ruleId, ruleMetricIds, templateStatisticsInputMetas, applicationId, ruleVariables, createTime - , partOfVariableName, runDate, user, realColumn, enumListNewValue, numRangeNewValue, selectResult, unionAllForSaveResult); + , Set templateStatisticsInputMetas, String applicationId, Long taskId, + List ruleVariables, String createTime + , String partOfVariableName, String runDate,String runToday,String user, StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, + Map selectResult, int unionWay) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + return abstractTranslator.persistenceTranslate(workFlowVersion, ruleId, ruleMetricIds, templateStatisticsInputMetas, applicationId, taskId, ruleVariables, createTime + , partOfVariableName, runDate, runToday, user, realColumn, enumListNewValue, numRangeNewValue, selectResult, unionWay); + } + + public static String jointVariableName(String prefix, String suffixes) { + return ScalaCodeConstant.jointVariableName(prefix, suffixes); + } + + public static String stateValInitString(String fieldName,String fieldValue) { + return ScalaCodeConstant.stateValString(fieldName, fieldValue); + } + + public static String stateVarInitString(String fieldName,String fieldValue) { + return ScalaCodeConstant.stateVarString(fieldName, fieldValue); + } + + public static String stateValInitNumber(String fieldName, Object fieldValue) { + return ScalaCodeConstant.stateValNumber(fieldName, fieldValue); + } + + public static String stateVarInitNumber(String fieldName, Object fieldValue) { + return ScalaCodeConstant.stateVarNumber(fieldName, fieldValue); + } + + public static String stateNumber(String fieldName, Object fieldValue) { + return ScalaCodeConstant.stateNumber(fieldName, fieldValue); + } + + public static String stateString(String fieldName, Object fieldValue) { + return ScalaCodeConstant.stateString(fieldName, fieldValue); + } + + public static String stateVarInitEmptyString(String fieldName) { + return ScalaCodeConstant.stateVarString(fieldName, ""); + } + + private RuleDataSource getRuleDataSource(Rule rule){ + // dataSources info + Set ruleDataSources = rule.getRuleDataSources(); + Iterator it = ruleDataSources.iterator(); + RuleDataSource ruleDataSource = new RuleDataSource(); + while(it.hasNext()){ + ruleDataSource = it.next(); + } + return ruleDataSource; + } + + private long initExecParams(List sparkSqlList, Map execParams, String partitionAttr){ + if(MapUtils.isEmpty(execParams)){ + return 0; + } + long dataTime = 0; + for (Map.Entry entry : execParams.entrySet()) { + String entryKey = String.valueOf(entry.getKey()); + String entryValue = String.valueOf(entry.getValue()); + if ("partition_day".equals(entryKey)) { + dataTime = DateExprReplaceUtil.getDateTimeSeconds(entryValue); + } + if (StringUtils.isNotBlank(partitionAttr) && "partition_attr".equals(entryKey)) { + sparkSqlList.add(stateValInitString(partitionAttr, entryValue)); + } + } + return dataTime; + } + + private void analyseFieldsCountCode(List< String> sparkSqlList, Rule rule, String partOfVariableName, Map< String, String> execParams,String variableFormer) { + if(StringUtils.isBlank(intellectCheckFieldsProjectName) ||! intellectCheckFieldsProjectName.equals(rule.getProject().getName())){ + return ; + } + String variableNamePrefix = getVariableNameByRule(partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); + String partitionAttr = jointVariableName(variableNamePrefix, "partition_attr"); + String values = jointVariableName(variableNamePrefix, "values"); + String array = jointVariableName(variableNamePrefix, "array"); + RuleDataSource ruleDataSource = getRuleDataSource(rule); + int datasourceTypeInt = ruleDataSource.getDatasourceType(); + String databaseNameStr = ruleDataSource.getDbName(); + String tableNameStr = ruleDataSource.getTableName(); + String proxyUserStr = ruleDataSource.getProxyUser(); + long dataTime = initExecParams(sparkSqlList, execParams, partitionAttr); + Long ruleId = rule.getId(); + + String rolName = ""; + Set variableSet = rule.getRuleVariables(); + for (RuleVariable ruleVariable : variableSet) { + String placeholder = ruleVariable.getTemplateMidTableInputMeta().getPlaceholder(); + if("fields".equals(placeholder) || "${fields}".equals(placeholder)){ + rolName = ruleVariable.getValue(); + } + } + String nowTime = DateUtils.now(); + sparkSqlList.add(stateValInitNumber(array,variableFormer + ".collect")); + sparkSqlList.add(stateValInitNumber(values, array+ "(0)(0).toString")); + + sparkSqlList.add(stateValInitNumber("conn",abstractTranslator.getDataSourceConn())); + sparkSqlList.add(stateVarInitNumber("stmt", "conn.prepareStatement(\"\",1)")); + String analyseFieldsUpsertSql = "\"INSERT INTO qualitis_imsmetric_fields_analyse (rule_id, analyse_type, datasource_type," + + "database_name,table_name,field_name,value,data_date,create_time,update_time,datasource_user,partition_attrs,remark) VALUES %s " + + "ON DUPLICATE KEY UPDATE value = values(value) , update_time = values(update_time)\""; + String analyseFieldsUpsertSqlValue = "(" + ruleId +", 1, " + datasourceTypeInt + ",'" + databaseNameStr + "','" + + tableNameStr + "','" + rolName +"',%s," + dataTime + ",'" + nowTime + "','" + nowTime+ "','" + proxyUserStr +"','%s','') "; + String finalSql = String.format(analyseFieldsUpsertSql, analyseFieldsUpsertSqlValue); + sparkSqlList.add("stmt.executeUpdate(" + finalSql + ".format("+values+"," +partitionAttr+"))"); + } + + private void handleCustomRuleCode(List sparkSqlList, Rule rule, String partOfVariableName,Map execParams) { + if(compareProjectName(rule)){ + String variableNamePrefix = getVariableNameByRule(partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); + String ageing = jointVariableName(variableNamePrefix, "ageing"); + String partitionAttr = jointVariableName(variableNamePrefix, "partition_attr"); + // dataSources info + Set ruleDataSources = rule.getRuleDataSources(); + Iterator it = ruleDataSources.iterator(); + RuleDataSource ruleDataSource = new RuleDataSource(); + while(it.hasNext()){ + ruleDataSource = it.next(); + } + int datasourceTypeInt = ruleDataSource.getDatasourceType(); + String databaseNameStr = ruleDataSource.getDbName(); + String tableNameStr = ruleDataSource.getTableName(); + String proxyUserStr = ruleDataSource.getProxyUser(); + + // init exec params + int enumMaxLength = 1000; + int batchInsertSize = 1000; + long dataTime = 0; + if(!execParams.isEmpty()) { + for (Map.Entry entry : execParams.entrySet()) { + String entryKey = String.valueOf(entry.getKey()); + String entryValue = String.valueOf(entry.getValue()); + if ("partition_day".equals(entryKey)) { + dataTime = DateExprReplaceUtil.getDateTimeSeconds(entryValue); + } + if ("ageing".equals(entryKey)) { + sparkSqlList.add(stateValInitString(ageing, entryValue)); + } + if ("partition_attr".equals(entryKey)) { + sparkSqlList.add(stateValInitString(partitionAttr, entryValue)); + } + if ("enum_max_length".equals(entryKey)) { + try { + if(StringUtils.isNotBlank(entryValue)){ + enumMaxLength = Integer.valueOf(entryValue); + } + } catch (Exception e) { + LOGGER.error("exec param error"); + } + } + if ("batch_insert_size".equals(entryKey)) { + try { + if(StringUtils.isNotBlank(entryValue)){ + batchInsertSize = Integer.valueOf(entryValue); + } + } catch (Exception e) { + LOGGER.error("exec param error"); + } + } + } + } + String rolName = ""; + Set variableSet = rule.getRuleVariables(); + for (RuleVariable ruleVariable : variableSet) { + String placeholder = ruleVariable.getTemplateMidTableInputMeta().getPlaceholder(); + if("fields".equals(placeholder) || "${fields}".equals(placeholder)){ + rolName = ruleVariable.getValue(); + } + } + + // add spark sql + int metricTypeInt = 1; + String templateEnName = rule.getTemplate().getEnName(); + if(intellectCheckTableTemplateName.equals(templateEnName)){ + metricTypeInt = MetricTypeEnum.TABLE_STATISTICS.getCode(); + }else if(intellectCheckEnumTemplateName.equals(templateEnName)){ + metricTypeInt = MetricTypeEnum.ENUM_STATISTICS.getCode(); + }else if(intellectCheckOriginTemplateName.equals(templateEnName)){ + metricTypeInt = MetricTypeEnum.ORIGIN_STATISTICS.getCode(); + }else{ + throw new RuntimeException(ScalaCodeConstant.NOT_SUPPORTED_METRIC_TYPE); + } + + String datasourceType = jointVariableName(variableNamePrefix, "datasource_type"); + String databaseName = jointVariableName(variableNamePrefix, "database_name"); + String tableName = jointVariableName(variableNamePrefix, "table_name"); + String metricType = jointVariableName(variableNamePrefix, "metric_type"); + String attrName = jointVariableName(variableNamePrefix, "attr_name"); + String rowkeyName = jointVariableName(variableNamePrefix, "rowkey_name"); + String proxyUser = jointVariableName(variableNamePrefix, "proxy_user"); + String dataArray = jointVariableName(variableNamePrefix, "array"); + String errorList = jointVariableName(variableNamePrefix, "error_list"); + String nowTime = jointVariableName(variableNamePrefix, "now_time"); + String insertList = jointVariableName(variableNamePrefix, "insert_list"); + String metricValue = jointVariableName(variableNamePrefix,"value"); + String metricId = jointVariableName(variableNamePrefix,"metric_id"); + String calcType = jointVariableName(variableNamePrefix,"calcType"); + String errorMsg = jointVariableName(variableNamePrefix,"error_msg"); + String rowvalueenumName = jointVariableName(variableNamePrefix, "rowvalueenum_name"); + String dsDateTime = jointVariableName(variableNamePrefix, "ds_date_time"); + String querySql = jointVariableName(variableNamePrefix, "query_sql"); + String insertSql = jointVariableName(variableNamePrefix, "insert_sql"); + String insertCommonData = jointVariableName(variableNamePrefix, "insert_common_data"); + String insertData = jointVariableName(variableNamePrefix, "insert_data"); + String groupbyattrNames = jointVariableName(variableNamePrefix, "groupbyattr_names"); + String identifyRs = jointVariableName(variableNamePrefix, "identify_rs"); + String metricIdRs = jointVariableName(variableNamePrefix, "metricid_rs"); + String splitList = jointVariableName(variableNamePrefix, "split_list"); + + sparkSqlList.add("import scala.math.BigDecimal"); + sparkSqlList.add(stateValInitNumber("conn",abstractTranslator.getDataSourceConn())); + sparkSqlList.add(stateVarInitNumber("stmt", "conn.prepareStatement(\"\",1)")); + sparkSqlList.add(stateValInitNumber(dataArray,variableNamePrefix + ".collect")); + sparkSqlList.add(stateValInitNumber(datasourceType, datasourceTypeInt)); + sparkSqlList.add(stateValInitString(databaseName, databaseNameStr)); + sparkSqlList.add(stateValInitString(tableName, tableNameStr)); + sparkSqlList.add(stateValInitString(proxyUser, proxyUserStr)); + sparkSqlList.add(stateValInitNumber(metricType, metricTypeInt)); + sparkSqlList.add(stateVarInitString(attrName, rolName)); + sparkSqlList.add(stateVarInitEmptyString(rowkeyName + " : String")); + sparkSqlList.add(stateVarInitEmptyString(rowvalueenumName + " : String")); + sparkSqlList.add(stateVarInitEmptyString(groupbyattrNames + " : String")); + sparkSqlList.add(stateValInitString(nowTime, DateUtils.now())); + sparkSqlList.add(stateVarInitNumber(calcType,0)); + sparkSqlList.add(stateVarInitNumber(metricId,0.0)); + sparkSqlList.add(stateVarInitNumber(metricValue , "BigDecimal(\"0.0\")")); + sparkSqlList.add(stateVarInitNumber(insertList + " : List[String]" ,"List()")); + sparkSqlList.add(stateVarInitNumber(errorList + " : List[String]" ,"List()")); + sparkSqlList.add(stateValInitString(insertCommonData, ScalaCodeConstant.INSERT_DATA_COMMON_SQL)); + sparkSqlList.add(stateValInitString(insertData, ScalaCodeConstant.getInsertDataSql(metricTypeInt, ruleDataSource, nowTime))); + + sparkSqlList.add(stateVarInitEmptyString(errorMsg)); + sparkSqlList.add(ScalaCodeConstant.errorMsg(errorMsg, databaseName, tableName, attrName, rowkeyName, rowvalueenumName, calcType)); + if(MetricTypeEnum.ORIGIN_STATISTICS.getCode() == metricTypeInt) { + sparkSqlList.add(stateValInitString(querySql, ScalaCodeConstant.getQueryIdentifySql(metricTypeInt, ruleDataSource, attrName))); + sparkSqlList.add(stateValInitString(insertSql, ScalaCodeConstant.getInsertIdentifySql(metricTypeInt, ruleDataSource, attrName, nowTime, ageing, partitionAttr))); + sparkSqlList.add("import java.text.SimpleDateFormat"); + sparkSqlList.add(stateVarInitNumber("sdf", "new SimpleDateFormat(\"yyyyMMdd HH:mm:ss\")")); + sparkSqlList.add("try{"); + sparkSqlList.add("for(i <- 0 to " + dataArray + ".length - 1){"); + sparkSqlList.add("try{"); + sparkSqlList.add(stateNumber(metricValue, "BigDecimal(" + dataArray + "(i)(0).toString)")); + sparkSqlList.add(stateNumber(calcType, dataArray + "(i)(1).toString.toInt")); + sparkSqlList.add(stateNumber(rowkeyName,dataArray + "(i)(2).toString")); + sparkSqlList.add(ScalaCodeConstant.errorMsg(errorMsg, databaseName, tableName, attrName, rowkeyName, rowvalueenumName, calcType)); + sparkSqlList.add(stateValInitNumber(dsDateTime,"sdf.parse(" + dataArray + "(i)(3).toString + \" 00:00:00\").getTime/1000")); + sparkSqlList.add(stateVarInitNumber(identifyRs, "stmt.executeQuery(" + querySql + ".format(" + rowkeyName + "))")); + sparkSqlList.add("if(" + identifyRs + ".next()){"); + sparkSqlList.add(stateNumber(metricId, identifyRs + ".getInt(\"metric_id\")")); + sparkSqlList.add("}else{"); + sparkSqlList.add(stateNumber("stmt", "conn.prepareStatement(" + insertSql + ".format(" + rowkeyName + "), 1)")); + sparkSqlList.add("stmt.executeUpdate()"); + sparkSqlList.add(stateVarInitNumber(metricIdRs, "stmt.getGeneratedKeys()")); + sparkSqlList.add("if(" + metricIdRs + ".next()){"); + sparkSqlList.add(stateNumber(metricId, metricIdRs + ".getInt(1)")); + sparkSqlList.add("}"); + sparkSqlList.add("}"); + sparkSqlList.add(stateNumber(insertList, insertData + ".format(" + metricId + "," + metricValue + "," + dsDateTime +") +: " + insertList)); + sparkSqlList.add("} catch {"); + }else if(MetricTypeEnum.TABLE_STATISTICS.getCode() == metricTypeInt) { + sparkSqlList.add(stateValInitString(querySql, ScalaCodeConstant.getQueryIdentifySql(metricTypeInt, ruleDataSource, attrName))); + sparkSqlList.add(stateValInitString(insertSql, ScalaCodeConstant.getInsertIdentifySql(metricTypeInt, ruleDataSource, attrName, nowTime, ageing, partitionAttr))); + sparkSqlList.add("try{"); + sparkSqlList.add("for(i <- 0 to " + dataArray + ".length - 1){"); + sparkSqlList.add("for(j <- 0 to " + dataArray + "(i).length - 1){"); + sparkSqlList.add("try{"); + // 无数据补零 + sparkSqlList.add("try{"); + sparkSqlList.add("if(j == 0){"); + sparkSqlList.add(stateNumber(calcType,1)); + sparkSqlList.add("}else if(j == 1){"); + sparkSqlList.add(stateNumber(calcType,5)); + sparkSqlList.add("}"); + sparkSqlList.add(stateNumber(metricValue, "BigDecimal(" + dataArray + "(i)(j).toString)")); + sparkSqlList.add("} catch {"); + sparkSqlList.add("case e: Exception => {"); + sparkSqlList.add(stateNumber(metricValue, "BigDecimal(\"0.0\")")); + sparkSqlList.add("}}"); + sparkSqlList.add(ScalaCodeConstant.errorMsg(errorMsg, databaseName, tableName, attrName, rowkeyName, rowvalueenumName, calcType)); + sparkSqlList.add(stateVarInitNumber(identifyRs, "stmt.executeQuery(" + querySql + ".format(" + calcType + "))")); + sparkSqlList.add("if(" + identifyRs + ".next()){"); + sparkSqlList.add(stateNumber(metricId, identifyRs + ".getInt(\"metric_id\")")); + sparkSqlList.add("}else{"); + sparkSqlList.add(stateNumber("stmt", "conn.prepareStatement(" + insertSql + ".format(" + calcType + "), 1)")); + sparkSqlList.add("stmt.executeUpdate()"); + sparkSqlList.add(stateVarInitNumber(metricIdRs, "stmt.getGeneratedKeys()")); + sparkSqlList.add("if(" + metricIdRs + ".next()){"); + sparkSqlList.add(stateNumber(metricId, metricIdRs + ".getInt(1)")); + sparkSqlList.add("}"); + sparkSqlList.add("}"); + sparkSqlList.add(stateNumber(insertList, insertData + ".format(" + metricId + "," + metricValue + "," + dataTime +") +: " + insertList)); + sparkSqlList.add("} catch {"); + + }else if(MetricTypeEnum.ENUM_STATISTICS.getCode() == metricTypeInt){ + sparkSqlList.add(stateValInitString(querySql, ScalaCodeConstant.getQueryIdentifySql(metricTypeInt, ruleDataSource, attrName))); + sparkSqlList.add(stateValInitString(insertSql, ScalaCodeConstant.getInsertIdentifySql(metricTypeInt, ruleDataSource, attrName, nowTime, ageing, partitionAttr))); + + + sparkSqlList.add("if(" + dataArray + ".length > " + enumMaxLength + "){"); + sparkSqlList.add("throw new RuntimeException(\"枚举类型规则计算数据量已超过最大长度,请检查分组字段\")"); + sparkSqlList.add("}"); + sparkSqlList.add("try{"); + sparkSqlList.add("for(i <- 0 to " + dataArray + ".length - 1){"); + sparkSqlList.add("for(j <- 0 to " + dataArray + "(i).length - 2){"); + sparkSqlList.add("try{"); + sparkSqlList.add("if(j == 0){"); + sparkSqlList.add(stateNumber(calcType,5)); + sparkSqlList.add("}else if(j == 1){"); + sparkSqlList.add(stateNumber(calcType,8)); + sparkSqlList.add("}"); + sparkSqlList.add(ScalaCodeConstant.errorMsg(errorMsg, databaseName, tableName, attrName, rowkeyName, rowkeyName, calcType)); + sparkSqlList.add("try{"); + sparkSqlList.add(stateNumber(rowvalueenumName,dataArray + "(i)(2).toString")); + sparkSqlList.add("} catch {"); + sparkSqlList.add("case e: Exception => {"); + sparkSqlList.add(stateNumber(rowvalueenumName,"\"NULL\"")); +// sparkSqlList.add(stateNumber(errorList, "(" + errorMsg + "+\",name为空,\") +: " + errorList)); + sparkSqlList.add("}"); + sparkSqlList.add("}"); + sparkSqlList.add("try{"); + sparkSqlList.add(stateNumber(metricValue, "BigDecimal(" + dataArray + "(i)(j).toString)")); + sparkSqlList.add("} catch {"); + sparkSqlList.add("case e: Exception => throw new NullPointerException(\"metric_value值为空\")"); + sparkSqlList.add("}"); + sparkSqlList.add(ScalaCodeConstant.errorMsg(errorMsg, databaseName, tableName, attrName, rowkeyName, rowvalueenumName, calcType)); + sparkSqlList.add(stateVarInitNumber(identifyRs, "stmt.executeQuery(" + querySql + ".format(" + rowvalueenumName + "," + calcType +"))")); + sparkSqlList.add("if(" + identifyRs + ".next()){"); + sparkSqlList.add(stateNumber(metricId, identifyRs + ".getInt(\"metric_id\")")); + sparkSqlList.add("}else{"); + sparkSqlList.add(stateNumber("stmt", "conn.prepareStatement(" + insertSql + ".format(" + calcType + "," + rowvalueenumName +"), 1)")); + sparkSqlList.add("stmt.executeUpdate()"); + sparkSqlList.add(stateVarInitNumber(metricIdRs, "stmt.getGeneratedKeys()")); + sparkSqlList.add("if(" + metricIdRs + ".next()){"); + sparkSqlList.add(stateNumber(metricId, metricIdRs + ".getInt(1)")); + sparkSqlList.add("}"); + sparkSqlList.add("}"); + sparkSqlList.add(stateNumber(insertList, insertData + ".format(" + metricId + "," + metricValue + "," + dataTime +") +: " + insertList)); + sparkSqlList.add("} catch {"); + + } + + sparkSqlList.add(ScalaCodeConstant.exceptionCollect("NullPointerException", errorList, "指标数据为空", errorMsg)); + sparkSqlList.add(ScalaCodeConstant.exceptionCollect("NumberFormatException", errorList, "指标数据为空", errorMsg)); + sparkSqlList.add(ScalaCodeConstant.exceptionCollect("Exception", errorList, "scala执行异常", errorMsg)); + sparkSqlList.add("}"); + sparkSqlList.add("}"); + if(MetricTypeEnum.TABLE_STATISTICS.getCode() == metricTypeInt || MetricTypeEnum.ENUM_STATISTICS.getCode() == metricTypeInt) { + sparkSqlList.add("}"); + } + + sparkSqlList.add("try{"); + sparkSqlList.add(insertList + ".grouped(" + batchInsertSize + ").toList.foreach { " + splitList + " =>"); + sparkSqlList.add("stmt.executeUpdate(" + insertCommonData + ".format(" + splitList + ".reverse.mkString(\",\")))"); + sparkSqlList.add("}"); + sparkSqlList.add("}catch{"); + sparkSqlList.add(ScalaCodeConstant.exceptionCollect("Exception", errorList, "写入指标数据异常", errorMsg)); + sparkSqlList.add("}"); + sparkSqlList.add("if(!" + errorList + ".isEmpty){"); + sparkSqlList.add("if(" + metricType + " == 1 && " + errorList + ".reverse.mkString(\"\\n\").indexOf(\"指标数据为空:\") != -1" + "){"); + sparkSqlList.add(variableNamePrefix + " = " + variableNamePrefix + ".na.drop()"); + sparkSqlList.add("}else{"); + sparkSqlList.add("throw new RuntimeException(" + errorList + ".reverse.mkString(\"\\n\"))"); + sparkSqlList.add("}"); + sparkSqlList.add("}"); + sparkSqlList.add("}catch{"); + sparkSqlList.add("case e: Exception => throw e"); + sparkSqlList.add("}finally{"); + sparkSqlList.add("stmt.close()"); + sparkSqlList.add("conn.close()"); + sparkSqlList.add("}"); + + } } /** @@ -1327,6 +2903,7 @@ private List saveStatisticAndSaveMySqlSentence(String workFlowVersion, L * @param partOfVariableName * @param connParamMaps * @param runDate + * @param runToday * @param selectResult * @param midTableReUse * @param unionAllForSaveResult @@ -1337,10 +2914,10 @@ private List saveStatisticAndSaveMySqlSentence(String workFlowVersion, L * @return */ private List generateSparkSqlAndSaveSentence(String sql, String saveTableName, Rule rule, String partOfVariableName, List> connParamMaps - , String runDate, Map selectResult, boolean midTableReUse, boolean unionAllForSaveResult, String filterFields, List> tableEnvs, boolean shareConnect, String shareFromPart) { + , String runDate, String runToday, Map selectResult, boolean midTableReUse, boolean unionAllForSaveResult, String filterFields, List> tableEnvs, boolean shareConnect, String shareFromPart, Map execParams) { String sparkSqlSentence; List sparkSqlList = new ArrayList<>(); - boolean linePrimaryRepeat = QualitisConstants.EXPECT_LINES_NOT_REPEAT_ID.equals(rule.getTemplate().getId()) || QualitisConstants.EXPECT_DATA_NOT_REPEAT_ID.equals(rule.getTemplate().getId()); + boolean linePrimaryRepeat = QualitisConstants.isRepeatDataCheck(rule.getTemplate().getEnName()); if (CollectionUtils.isEmpty(connParamMaps)) { if (CollectionUtils.isNotEmpty(tableEnvs)) { @@ -1350,7 +2927,7 @@ private List generateSparkSqlAndSaveSentence(String sql, String saveTabl StringBuilder envName = new StringBuilder(); for (String replaceStr : subList) { String[] subStrs = replaceStr.split(SpecCharEnum.COLON.getValue()); - envName.append("[").append(subStrs[2]).append("]"); + envName.append("[").append(subStrs[2].split(SpecCharEnum.MINUS.getValue())[0]).append("]"); String registerTable = subStrs[1]; String realTable = subStrs[0]; @@ -1358,11 +2935,11 @@ private List generateSparkSqlAndSaveSentence(String sql, String saveTabl } String partOfVariableNameWithEnv = partOfVariableName + envName.toString().replace("[", "").replace("]", ""); sparkSqlList.add("// 生成规则 " + partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + " 的校验查询代码"); - sparkSqlSentence = getSparkSqlSentence(sql, partOfVariableNameWithEnv, "", "", ""); + sparkSqlSentence = getSparkSqlSentence(sql, partOfVariableNameWithEnv, "", "", "", RuleTypeEnum.CUSTOM_RULE.getCode().equals(rule.getRuleType())); sparkSqlList.add(sparkSqlSentence); - String variableFormer = getVariableNameByRule(partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); - String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); + String variableFormer = getVariableNameByRule(partOfVariableNameWithEnv.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableNameWithEnv.split(SpecCharEnum.EQUAL.getValue())[1]); + String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableNameWithEnv.split(SpecCharEnum.EQUAL.getValue())[1]); formatSchema(sparkSqlList, partOfVariableName, variableFormer, variableLatter); selectResult.put(variableLatter, envName.toString()); @@ -1373,10 +2950,18 @@ private List generateSparkSqlAndSaveSentence(String sql, String saveTabl if (linePrimaryRepeat) { sparkSqlList.add("val UUID = java.util.UUID.randomUUID.toString"); } - sparkSqlSentence = getSparkSqlSentence(sql, partOfVariableName, filterFields, shareFromPart, ""); + if(compareProjectName(rule)){ + sparkSqlSentence = getSparkSqlSentence(sql, partOfVariableName, filterFields); + }else if(StringUtils.isNotBlank(intellectCheckFieldsProjectName) && intellectCheckFieldsProjectName.equals(rule.getProject().getName())){ + sparkSqlSentence = getSparkSqlSentence(sql, partOfVariableName, filterFields); + }else{ + sparkSqlSentence = getSparkSqlSentence(sql, partOfVariableName, filterFields, shareFromPart, "", RuleTypeEnum.CUSTOM_RULE.getCode().equals(rule.getRuleType())); + } LOGGER.info("Succeed to generate spark sql. sentence: {}", sparkSqlSentence); sparkSqlList.add("// 生成规则 " + partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + " 的校验查询代码"); sparkSqlList.add(sparkSqlSentence); + // 特殊处理规则生成scala代码 + handleCustomRuleCode(sparkSqlList, rule, partOfVariableName,execParams); if (linePrimaryRepeat) { handleLinePrimaryRepeat(sparkSqlList, partOfVariableName); @@ -1385,29 +2970,24 @@ private List generateSparkSqlAndSaveSentence(String sql, String saveTabl String variableFormer = getVariableNameByRule(partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[0], partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); String variableLatter = getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1]); formatSchema(sparkSqlList, partOfVariableName, variableFormer, variableLatter); + analyseFieldsCountCode(sparkSqlList, rule, partOfVariableName,execParams, variableFormer); - if (Boolean.TRUE.equals(rule.getTemplate().getSaveMidTable())) { + if (StringUtils.isNotEmpty(saveTableName) && ! MID_TABLE_NAME_PATTERN.matcher(saveTableName).find()) { sparkSqlList.addAll(getSaveMidTableSentenceSettings()); - sparkSqlList.addAll(getSaveMidTableSentence(saveTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, midTableReUse)); + sparkSqlList.addAll(getSaveMidTableSentence(saveTableName, partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1], runDate, runToday, midTableReUse)); LOGGER.info("Succeed to generate spark sql. sentence."); } } return sparkSqlList; } else { + boolean saveMidTable = StringUtils.isNotEmpty(saveTableName) && ! MID_TABLE_NAME_PATTERN.matcher(saveTableName).find(); // Repeat with envs. When polymerization, repeat one more time. - selectResult.putAll(getSparkSqlSententceWithMysqlConnParams(sql, partOfVariableName, connParamMaps, sparkSqlList, linePrimaryRepeat, rule.getTemplate().getSaveMidTable(), saveTableName, runDate, midTableReUse, unionAllForSaveResult, filterFields, shareConnect, shareFromPart)); + selectResult.putAll(getSparkSqlSententceWithMysqlConnParams(rule, sql, partOfVariableName, connParamMaps, sparkSqlList, linePrimaryRepeat, saveMidTable, saveTableName, runDate, runToday, midTableReUse, unionAllForSaveResult, filterFields, shareConnect, shareFromPart)); } return sparkSqlList; } - private void handleLinePrimaryRepeat(List sparkSqlList, Integer count) { - sparkSqlList.add("val fillNullDF_" + count + " = " + getVariableName(count) + ".na.fill(UUID)"); - sparkSqlList.add("val fillNullWithFullLineWithHashDF_" + count + " = fillNullDF_" + count + ".withColumn(\"qualitis_full_line_value\", to_json(struct($\"*\"))).withColumn(\"md5\", md5(to_json(struct($\"*\"))))"); - sparkSqlList.add("fillNullWithFullLineWithHashDF_" + count + ".registerTempTable(\"tmp_table_" + count + "\")"); - sparkSqlList.add("val " + getVariableName(count) + " = spark.sql(\"select md5, count(1) as md5_count from tmp_table_" + count + " group by md5 having count(*) > 1\")"); - } - private void handleLinePrimaryRepeat(List sparkSqlList, String fullName) { String suffix = fullName.split(SpecCharEnum.EQUAL.getValue())[1]; sparkSqlList.add("val fillNullDF_" + suffix + " = " + getVariableNameByRule(fullName.split(SpecCharEnum.EQUAL.getValue())[0], suffix) + ".na.fill(UUID)"); @@ -1416,11 +2996,11 @@ private void handleLinePrimaryRepeat(List sparkSqlList, String fullName) sparkSqlList.add("val " + getVariableNameByRule(fullName.split(SpecCharEnum.EQUAL.getValue())[0], suffix) + " = spark.sql(\"select md5, count(1) as md5_count from tmp_table_" + suffix + " group by md5 having count(*) > 1\")"); } - private Map getSparkSqlSententceWithMysqlConnParams(String sql, String partOfVariableName, List> connParamMaps, List sparkSqlList - , boolean linePrimaryRepeat, Boolean saveMidTable, String saveTableName, String runDate, boolean midTableReUse, boolean unionAllForSaveResult, String filterFields, boolean shareConnect, String shareFromPart) { + private Map getSparkSqlSententceWithMysqlConnParams(Rule rule, String sql, String partOfVariableName, List> connParamMaps, List sparkSqlList + , boolean linePrimaryRepeat, Boolean saveMidTable, String saveTableName, String runDate, String runToday, boolean midTableReUse, boolean unionAllForSaveResult, String filterFields, boolean shareConnect, String shareFromPart) { Map selectResult = new HashMap<>(connParamMaps.size()); for (Map connParams : connParamMaps) { - String envName = (String) connParams.get("envName"); + String envName = ((String) connParams.get("envName")).split(SpecCharEnum.MINUS.getValue())[0]; if (StringUtils.isEmpty(envName)) { continue; } @@ -1429,7 +3009,7 @@ private Map getSparkSqlSententceWithMysqlConnParams(String sql, if (shareConnect) { sparkSqlList.add("// 生成规则 " + partOfVariableName.split(SpecCharEnum.EQUAL.getValue())[1] + ",在环境 " + envName + " 的校验查询代码"); - sparkSqlList.add(getSparkSqlSentence(sql, tmpVariableName, filterFields, shareFromPart, SpecCharEnum.BOTTOM_BAR.getValue() + envName)); + sparkSqlList.add(getSparkSqlSentence(sql, tmpVariableName, filterFields, shareFromPart, SpecCharEnum.BOTTOM_BAR.getValue() + envName, RuleTypeEnum.CUSTOM_RULE.getCode().equals(rule.getRuleType()))); } else { String tmp = sql.replace("\"", "\\\""); @@ -1461,7 +3041,7 @@ private Map getSparkSqlSententceWithMysqlConnParams(String sql, if (saveMidTable) { sparkSqlList.addAll(getSaveMidTableSentenceSettings()); - sparkSqlList.addAll(getSaveMidTableSentence(saveTableName, runDate, midTableReUse, selectResult)); + sparkSqlList.addAll(getSaveMidTableSentence(saveTableName, runDate, runToday, midTableReUse, selectResult)); } return selectResult; @@ -1493,7 +3073,7 @@ private void unionAllSaveResult(String lastVariable, Map selectR } } - private List getSaveMidTableSentence(String saveMidTableName, String runDate, boolean midTableReUse, Map selectResult) { + private List getSaveMidTableSentence(String saveMidTableName, String runDate, String runToday, boolean midTableReUse, Map selectResult) { SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, -7); @@ -1519,13 +3099,13 @@ private List getSaveMidTableSentence(String saveMidTableName, String run for (Map.Entry entry : selectResult.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); - saveSqls.addAll(parsefirstHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION_WITH_ENV, value, key, saveMidTableName, runDate, date, format)); + saveSqls.addAll(parsefirstHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION_WITH_ENV, value, key, saveMidTableName, runDate, runToday, date, format)); } saveSqls.add(ELSE_EXIST); for (Map.Entry entry : selectResult.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); - saveSqls.addAll(parseSecondHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE_WITH_ENV, value, key, saveMidTableName, runDate, date, format)); + saveSqls.addAll(parseSecondHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE_WITH_ENV, value, key, saveMidTableName, runDate, runToday, date, format)); } saveSqls.add(END_EXIST); return saveSqls; @@ -1543,9 +3123,9 @@ private void formatSchema(List sparkSqlList, String variableFormer, Stri private void formatSchema(List sparkSqlList, String prefix, String variableFormer, String variableLatter) { if (QualitisConstants.BDAP.equals(localConfig.getCluster())) { prefix = prefix.split(SpecCharEnum.EQUAL.getValue())[1]; - String str1 = "val " + prefix + "_schemas = " + variableFormer + ".schema.fields.map(f => f.name).toList"; - String str2 = "val " + prefix + "_replacedSchemas = " + prefix + "_schemas.map(s => s.replaceAll(\"[()]\", \"\")).toList"; - String str3 = "val " + variableLatter + " = " + variableFormer + ".toDF(" + prefix + "_replacedSchemas: _*)"; + String str1 = "val " + "schemas_" + prefix + " = " + variableFormer + ".schema.fields.map(f => f.name).toList"; + String str2 = "val replacedSchemas_" + prefix + " = schemas_" + prefix + ".map(s => s.replaceAll(\"[()]\", \"\")).toList"; + String str3 = "val " + variableLatter + " = " + variableFormer + ".toDF(" + "replacedSchemas_" + prefix + ": _*)"; sparkSqlList.add(str1); sparkSqlList.add(str2); sparkSqlList.add(str3); @@ -1563,7 +3143,7 @@ private List getSaveMidTableSentenceSettings() { return settings; } - private List getSaveMidTableSentence(String saveMidTableName, Integer count, String runDate, boolean midTableReUse) { + private List getSaveMidTableSentence(String saveMidTableName, Integer count, String runDate, String runToday, boolean midTableReUse) { SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, -7); @@ -1586,15 +3166,15 @@ private List getSaveMidTableSentence(String saveMidTableName, Integer co String foreachDrop = "partition_list_" + saveMidTableName.replace(SpecCharEnum.PERIOD_NO_ESCAPE.getValue(), SpecCharEnum.BOTTOM_BAR.getValue()) + ".foreach(f => spark.sql(\"alter table " + saveMidTableName + " drop if exists partition (qualitis_partition_key=\" + f + \")\"))"; saveSqls.add(foreachDrop); - saveSqls.addAll(parsefirstHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION, "", getVariableName(count), saveMidTableName, runDate, date, format)); + saveSqls.addAll(parsefirstHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION, "", getVariableName(count), saveMidTableName, runDate, runToday, date, format)); saveSqls.add(ELSE_EXIST); - saveSqls.addAll(parseSecondHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE, "", getVariableName(count), saveMidTableName, runDate, date, format)); + saveSqls.addAll(parseSecondHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE, "", getVariableName(count), saveMidTableName, runDate, runToday, date, format)); saveSqls.add(END_EXIST); return saveSqls; } - private List getSaveMidTableSentence(String saveMidTableName, String partOfVariableName, String runDate, boolean midTableReUse) { + private List getSaveMidTableSentence(String saveMidTableName, String partOfVariableName, String runDate, String runToday, boolean midTableReUse) { SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, -7); @@ -1617,25 +3197,25 @@ private List getSaveMidTableSentence(String saveMidTableName, String par String foreachDrop = "partition_list_" + saveMidTableName.replace(SpecCharEnum.PERIOD_NO_ESCAPE.getValue(), SpecCharEnum.BOTTOM_BAR.getValue()) + ".foreach(f => spark.sql(\"alter table " + saveMidTableName + " drop if exists partition (qualitis_partition_key=\" + f + \")\"))"; saveSqls.add(foreachDrop); - saveSqls.addAll(parsefirstHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION, "", getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName), saveMidTableName, runDate, date, format)); + saveSqls.addAll(parsefirstHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_INSERT_OVERWRITE_PARTITION, "", getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName), saveMidTableName, runDate, runToday, date, format)); saveSqls.add(ELSE_EXIST); - saveSqls.addAll(parseSecondHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE, "", getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName), saveMidTableName, runDate, date, format)); + saveSqls.addAll(parseSecondHalf(SAVE_MID_TABLE_SENTENCE_TEMPLATE_CREATE, "", getVariableNameByRule(OptTypeEnum.STATISTIC_DF.getMessage(), partOfVariableName), saveMidTableName, runDate, runToday, date, format)); saveSqls.add(END_EXIST); return saveSqls; } private List parsefirstHalf(String saveMidTableSentenceTemplateInsertOverwritePartition, String envName, String val, String saveMidTableName - , String runDate, Date date, SimpleDateFormat format) { + , String runDate, String runToday, Date date, SimpleDateFormat format) { List saveSqls = new ArrayList<>(); - + String result = getLastRunTime(runDate, runToday, date, format); if (StringUtils.isEmpty(envName) && StringUtils.isEmpty(val)) { saveSqls.add(saveMidTableSentenceTemplateInsertOverwritePartition - .replace("${QUALITIS_PARTITION_KEY}", StringUtils.isBlank(runDate) ? format.format(date) : runDate) + .replace("${QUALITIS_PARTITION_KEY}", result) .replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName).replace(VARIABLE_NAME_PLACEHOLDER, val)); } else { saveSqls.add(saveMidTableSentenceTemplateInsertOverwritePartition - .replace("${QUALITIS_PARTITION_KEY}", StringUtils.isBlank(runDate) ? format.format(date) : runDate) + .replace("${QUALITIS_PARTITION_KEY}", result) .replace("${QUALITIS_PARTITION_KEY_ENV}", envName) .replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName).replace(VARIABLE_NAME_PLACEHOLDER, val)); } @@ -1643,37 +3223,47 @@ private List parsefirstHalf(String saveMidTableSentenceTemplateInsertOve return saveSqls; } - private List parseSecondHalf(String saveMidTableSentenceTemplateCreate, String envName, String val, String saveMidTableName, String runDate + public String getLastRunTime(String runDate, String runToday, Date date, SimpleDateFormat format) { + if (StringUtils.isNotBlank(runDate)) { + return runDate; + } + return StringUtils.isNotBlank(runToday) ? runToday : format.format(date); + } + + private List parseSecondHalf(String saveMidTableSentenceTemplateCreate, String envName, String val, String saveMidTableName, String runDate, String runToday , Date date, SimpleDateFormat format) { List saveSqls = new ArrayList<>(); + String result = getLastRunTime(runDate, runToday, date, format); if (StringUtils.isEmpty(envName) && StringUtils.isEmpty(val)) { saveSqls.add( - saveMidTableSentenceTemplateCreate.replace("${QUALITIS_PARTITION_KEY}", StringUtils.isBlank(runDate) ? format.format(date) : runDate) + saveMidTableSentenceTemplateCreate.replace("${QUALITIS_PARTITION_KEY}", result) .replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName).replace(VARIABLE_NAME_PLACEHOLDER, val)); } else { saveSqls.add( - saveMidTableSentenceTemplateCreate.replace("${QUALITIS_PARTITION_KEY}", StringUtils.isBlank(runDate) ? format.format(date) : runDate) + saveMidTableSentenceTemplateCreate.replace("${QUALITIS_PARTITION_KEY}", result) .replace("${QUALITIS_PARTITION_KEY_ENV}", envName) .replace(SAVE_MID_TABLE_NAME_PLACEHOLDER, saveMidTableName).replace(VARIABLE_NAME_PLACEHOLDER, val)); } return saveSqls; } - private String getSparkSqlSentence(String sql, Integer count, String filterFields) { - sql = sql.replace("\"", "\\\""); + private String getSparkSqlSentence(String sql, String fullName, String filterFields, String shareFromPart, String envName, boolean isCustomRule) { + if (! isCustomRule) { + sql = sql.replace("\"", "\\\""); + } + + if (StringUtils.isNotEmpty(shareFromPart)) { + sql = sql.replace(shareFromPart, commonTableName + envName); + } String str = SPARK_SQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sql); if (StringUtils.isNotEmpty(filterFields)) { str += filterFields; } - return str.replace(VARIABLE_NAME_PLACEHOLDER, getVariableName(count)); + return str.replace(VARIABLE_NAME_PLACEHOLDER, getVariableNameByRule(fullName.split(SpecCharEnum.EQUAL.getValue())[0], fullName.split(SpecCharEnum.EQUAL.getValue())[1])); } - private String getSparkSqlSentence(String sql, String fullName, String filterFields, String shareFromPart, String envName) { + private String getSparkSqlSentence(String sql, String fullName, String filterFields) { sql = sql.replace("\"", "\\\""); - - if (StringUtils.isNotEmpty(shareFromPart)) { - sql = sql.replace(shareFromPart, commonTableName + envName); - } String str = SPARK_SQL_TEMPLATE.replace(SPARK_SQL_TEMPLATE_PLACEHOLDER, sql); if (StringUtils.isNotEmpty(filterFields)) { str += filterFields; @@ -1687,24 +3277,37 @@ private String getSparkSqlSentence(String sql, String fullName, String filterFie * @param template * @param variables * @param filter - * @param realFilter * @param realColumn * @param dbTableMap for pick up source db.table & target db.table * @param date + * @param standardValueVersionId + * @param shareFromPart + * @param runDate + * @param runToday + * @param engineType * @return * @throws ConvertException */ - private String replaceVariable(String template, List variables, String filter, StringBuilder realFilter, StringBuilder realColumn - , Map dbTableMap, Date date, String createUser) throws ConvertException, UnExpectedRequestException, MetaDataAcquireFailedException { + private String replaceVariable(String template, List variables, String filter, StringBuilder realColumn, Map dbTableMap, Date date, Long standardValueVersionId, String createUser, String shareFromPart, String runDate, String runToday, String engineType) throws ConvertException, UnExpectedRequestException, MetaDataAcquireFailedException { String sqlAction = template; + + if (StringUtils.isNotEmpty(shareFromPart)) { + sqlAction = sqlAction.replace(FILTER_PLACEHOLDER, "true"); + } if (StringUtils.isNotBlank(filter)) { + if (StringUtils.isNotBlank(runDate)) { + filter = filter.replace("${run_date}", runDate); +// filter = filter.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotBlank(runToday)) { + filter = filter.replace("${run_today}", runToday); +// filter = filter.replace("${run_today_std}", runToday); + } + String tmpfilter = DateExprReplaceUtil.replaceFilter(date, filter); sqlAction = sqlAction.replace(FILTER_PLACEHOLDER, tmpfilter); - realFilter.append(tmpfilter); LOGGER.info("Succeed to replace {} into {}", FILTER_PLACEHOLDER, tmpfilter); - } else { - realFilter.append("true"); } for (RuleVariable ruleVariable : variables) { String midInputMetaPlaceHolder = ruleVariable.getTemplateMidTableInputMeta().getPlaceholder(); @@ -1726,8 +3329,73 @@ private String replaceVariable(String template, List variables, St } else { dbTableMap.put("right_database", ""); } + } else if ("left_collect_sql".equals(midInputMetaPlaceHolder)) { + String value = ruleVariable.getValue(); + if (StringUtils.isNotEmpty(value) && StringUtils.isNotBlank(runDate)) { + value = value.replace("${run_date}", runDate); +// value = value.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotEmpty(value) && StringUtils.isNotBlank(runToday)) { + value = value.replace("${run_today}", runToday); +// value = value.replace("${run_today_std}", runToday); + } + ruleVariable.setOriginValue(DateExprReplaceUtil.replaceRunDate(date, value)); + dbTableMap.put("left_collect_sql", ruleVariable.getOriginValue()); + } else if ("right_collect_sql".equals(midInputMetaPlaceHolder)) { + String value = ruleVariable.getValue(); + if (StringUtils.isNotEmpty(value) && StringUtils.isNotBlank(runDate)) { + value = value.replace("${run_date}", runDate); +// value = value.replace("${run_date_std}", runDate); + } + if (StringUtils.isNotEmpty(value) && StringUtils.isNotBlank(runToday)) { + value = value.replace("${run_today}", runToday); +// value = value.replace("${run_today_std}", runToday); + } + ruleVariable.setOriginValue(DateExprReplaceUtil.replaceRunDate(date, value)); + dbTableMap.put("right_collect_sql", ruleVariable.getOriginValue()); } else if (TemplateInputTypeEnum.FIELD.getCode().equals(ruleVariable.getTemplateMidTableInputMeta().getInputType()) && Boolean.TRUE.equals(ruleVariable.getTemplateMidTableInputMeta().getFieldMultipleChoice())) { realColumn.append(ruleVariable.getValue()); + } else if (TemplateInputTypeEnum.STANDARD_VALUE_EXPRESSION.getCode().equals(ruleVariable.getTemplateMidTableInputMeta().getInputType()) && standardValueVersionId != null) { + LOGGER.info("Start to check current standard value version is or not the most new version. Version ID: " + standardValueVersionId); + StandardValueVersion standardValueVersion = standardValueVersionDao.findById(standardValueVersionId); + // DMS 实时数据同步 如何保证拉取全量的dms的"编码取值" size临时采用Integer.MAX_VALUE + // 1.没有编码,提交异常返回; 2.有编码,但编码为空串,覆盖 + if (standardValueVersion != null) { + LOGGER.info("Start to Real time synchronization of DMS encoding values"); + if (StringUtils.isNotBlank(standardValueVersion.getCode())) { + Map standardCodeTable = dataStandardClient.getStandardCodeTable(0, Integer.MAX_VALUE, createUser, standardValueVersion.getCode()); + List> encodingValueInfo = (List>) standardCodeTable.get("content"); + if (CollectionUtils.isEmpty(encodingValueInfo)) { + throw new UnExpectedRequestException("{&GET_ENCODING_VALUE_FROM_DATASHAPIS_IS_EMPTY}"); + } + + StringBuilder temp = new StringBuilder(); + for (Map map : encodingValueInfo) { + if (map.get("codeTableValue") == null) { + continue; + } + + if (!"".equals(map.get("codeTableValue").toString())) { + temp.append("'" + map.get("codeTableValue").toString() + "'").append(SpecCharEnum.COMMA.getValue()); + } + } + + String result = temp != null && temp.length() > 0 ? temp.deleteCharAt(temp.length() - 1).toString() : ""; + standardValueVersion.setContent(result); + standardValueVersionDao.saveStandardValueVersion(standardValueVersion); + if (StringUtils.isNotBlank(result) && EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType) && "standard_value".equals(midInputMetaPlaceHolder) && ! result.contains("'") && ! result.contains("\"")) { + result = StringUtils.join(Arrays.asList(result.split(SpecCharEnum.COMMA.getValue())).stream().map(ele -> "'" + ele + "'").collect(Collectors.toList()), SpecCharEnum.COMMA.getValue()); + } + sqlAction = sqlAction.replaceAll(placeHolder, result); + } else { + String result = standardValueVersion.getContent(); + if (StringUtils.isNotBlank(result) && EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType) && "standard_value".equals(midInputMetaPlaceHolder) && ! result.contains("'") && ! result.contains("\"")) { + result = StringUtils.join(Arrays.asList(result.split(SpecCharEnum.COMMA.getValue())).stream().map(ele -> "'" + ele + "'").collect(Collectors.toList()), SpecCharEnum.COMMA.getValue()); + } + sqlAction = sqlAction.replaceAll(placeHolder, result); + } + continue; + } } // Fix issue of wedget node in the front. if ("\\$\\{fields}".equals(placeHolder)) { @@ -1743,7 +3411,16 @@ private String replaceVariable(String template, List variables, St if (ruleVariable.getValue() == null || "".equals(ruleVariable.getValue())) { sqlAction = sqlAction.replaceAll(placeHolder + ".", ""); } else { - sqlAction = sqlAction.replaceAll(placeHolder, ruleVariable.getValue()); + if (placeHolder.contains("left_collect_sql") || placeHolder.contains("right_collect_sql")) { + sqlAction = sqlAction.replaceAll(placeHolder, ruleVariable.getOriginValue()); + } else { + String ruleVariableValue = ruleVariable.getValue(); + if (StringUtils.isNotBlank(ruleVariableValue) && EngineTypeEnum.TRINO_ENGINE.getMessage().equals(engineType) && "enumerated_list".equals(midInputMetaPlaceHolder) && ! ruleVariableValue.contains("'") && ! ruleVariableValue.contains("\"")) { + ruleVariableValue = StringUtils.join(Arrays.asList(ruleVariableValue.split(SpecCharEnum.COMMA.getValue())).stream().map(ele -> "'" + ele + "'").collect(Collectors.toList()), SpecCharEnum.COMMA.getValue()); + } + + sqlAction = sqlAction.replaceAll(placeHolder, ruleVariableValue); + } } LOGGER.info("Succeed to replace {} into {}", placeHolder, ruleVariable.getValue()); } @@ -1781,4 +3458,37 @@ public String getVariableName(Integer count) { public String getVariableNameByRule(String optPhase, String partOfVariableName) { return optPhase + "Of" + partOfVariableName; } + + /** + * check rule name does it contain special characters and replace it to hash value + * + * @param ruleName + * @return + */ + private String checkRuleNameWhetherContainSpecialCharacters(String ruleName) { + String resultRuleName = MID_TABLE_NAME_PATTERN.matcher(ruleName).find() ? + MID_TABLE_NAME_PATTERN.matcher(ruleName).replaceAll("") + SpecCharEnum.BOTTOM_BAR.getValue() + generateShortHash(ruleName).toLowerCase() : ruleName; + if (StringUtils.isNotBlank(resultRuleName)) { + return resultRuleName; + } else { + return ruleName; + } + } + + public static String generateShortHash(String input) { + try { + MessageDigest md = MessageDigest.getInstance("MD5"); + byte[] hash = md.digest(input.getBytes()); + String base64Encoded = Base64.getEncoder().encodeToString(hash); + return base64Encoded.substring(0, 6); + } catch (NoSuchAlgorithmException e) { + LOGGER.error(e.getMessage(), e); + } + return ""; + } + + private boolean compareProjectName(Rule rule){ + return intellectCheckProjectName.equals(rule.getProject().getName()); + } + } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java index 45af8e49..3f24f4a5 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/AbstractTranslator.java @@ -36,27 +36,37 @@ public abstract class AbstractTranslator { * @param ruleMetricMaps * @param templateStatisticsInputMetas * @param applicationId + * @param taskId * @param ruleVariables * @param createTime * @param partOfVariableName * @param date * @param runDate + * @param runToday * @param realColumn * @param enumListNewValue * @param numRangeNewValue * @param selectResult - * @param unionAllForSaveResult + * @param unionWay * @return * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ public abstract List persistenceTranslate(String persistenceTranslate, Long ruleId, Map ruleMetricMaps - , Set templateStatisticsInputMetas, String applicationId, List ruleVariables, String createTime - , String partOfVariableName, String date, String runDate, StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, Map selectResult, boolean unionAllForSaveResult) throws RuleVariableNotSupportException, RuleVariableNotFoundException; + , Set templateStatisticsInputMetas, String applicationId, Long taskId, + List ruleVariables, String createTime + , String partOfVariableName, String date, String runDate,String runToday,StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, + Map selectResult, int unionWay) throws RuleVariableNotSupportException, RuleVariableNotFoundException; /** * Generate initial statement. * @return */ public abstract List getInitSentence(); + + /** + * get DataSource conn. + * @return + */ + public abstract String getDataSourceConn(); } diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java index 9e394b19..8b6f76b5 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/translator/JdbcTranslator.java @@ -16,8 +16,11 @@ package com.webank.wedatasphere.qualitis.translator; +import bsp.encrypt.EncryptUtil; import com.webank.wedatasphere.qualitis.config.TaskDataSourceConfig; import com.webank.wedatasphere.qualitis.constant.OptTypeEnum; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.constant.UnionWayEnum; import com.webank.wedatasphere.qualitis.converter.SqlTemplateConverter; import com.webank.wedatasphere.qualitis.dao.RuleMetricDao; import com.webank.wedatasphere.qualitis.exception.RuleVariableNotFoundException; @@ -26,6 +29,8 @@ import com.webank.wedatasphere.qualitis.rule.constant.StatisticsValueTypeEnum; import com.webank.wedatasphere.qualitis.rule.entity.RuleVariable; import com.webank.wedatasphere.qualitis.rule.entity.TemplateStatisticsInputMeta; +import java.util.HashMap; +import java.util.stream.Collectors; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; @@ -40,8 +45,12 @@ import javax.ws.rs.core.Context; import java.text.ParseException; import java.text.SimpleDateFormat; -import java.util.*; -import java.util.stream.Collectors; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Generate scala code of connecting mysql and save data into mysql @@ -59,7 +68,7 @@ public class JdbcTranslator extends AbstractTranslator { @Value("${task.persistent.username}") private String mysqlUsername; @Value("${task.persistent.password}") - private String mysqlSecret; + private String mysqlPassword; @Value("${task.persistent.address}") private String mysqlAddress; @Value("${task.persistent.tableName}") @@ -68,8 +77,6 @@ public class JdbcTranslator extends AbstractTranslator { private String newValueTableName; @Value("${task.new_value.save}") private String newValueTableSave; - @Value("${task.persistent.encrypt: false}") - private Boolean isEncrypt; @Autowired private RuleMetricDao ruleMetricDao; @@ -81,6 +88,7 @@ public class JdbcTranslator extends AbstractTranslator { private static final String STATISTICS_VALUE_FIELD_NAME = "value"; private static final String STATISTICS_RULE_ID_FIELD_NAME = "rule_id"; private static final String STATISTICS_APPLICATION_ID_FIELD_NAME = "application_id"; + private static final String STATISTICS_TASK_ID_FIELD_NAME = "task_id"; private static final String STATISTICS_RULE_METRIC_ID_FIELD_NAME = "rule_metric_id"; private static final String STATISTICS_RUN_DATE_FIELD_NAME = "run_date"; private static final String STATISTICS_ENV_NAME_FIELD_NAME = "env_name"; @@ -101,6 +109,7 @@ public class JdbcTranslator extends AbstractTranslator { private static final String STATISTICS_RUN_DATE_PLACEHOLDER = "${RUN_DATE}"; private static final String STATISTICS_ENV_NAME_PLACEHOLDER = "${ENV_NAME}"; private static final String STATISTICS_APPLICATION_ID_PLACEHOLDER = "${APPLICATION_ID}"; + private static final String STATISTICS_TASK_ID_PLACEHOLDER = "${TASK_ID}"; private static final String STATISTICS_RULE_METRIC_ID_PLACEHOLDER = "${RULE_METRIC_ID}"; private static final String STATISTICS_RESULT_TYPE_PLACEHOLDER = "${RESULT_TYPE}"; private static final String STATISTICS_CREATE_TIME_PLACEHOLDER = "${CREATE_TIME}"; @@ -120,32 +129,36 @@ public class JdbcTranslator extends AbstractTranslator { private static final Logger LOGGER = LoggerFactory.getLogger(JdbcTranslator.class); + private HttpServletRequest httpServletRequest; public JdbcTranslator(@Context HttpServletRequest httpServletRequest) { this.httpServletRequest = httpServletRequest; } + @Override + public String getDataSourceConn() { + return "DriverManager.getConnection(\"" + mysqlAddress + "\", \"" + mysqlUsername + "\", \"" + mysqlPassword + "\")"; + } + /** * Initial statement */ @PostConstruct public void init() { usernamePropSentence = PROP_VARIABLE_NAME + ".setProperty(\"user\", \"" + mysqlUsername + "\");"; - if (isEncrypt) { -// String passwordPrivateKey = taskDataSourceConfig.getPrivateKey(); -// try { -// mysqlSecret = EncryptUtil.decrypt(passwordPrivateKey, taskDataSourceConfig.getPassword()); -// } catch (Exception e) { -// LOGGER.error("Decrypt mysqlsec password exception.", e); -// } - } else { - mysqlSecret = taskDataSourceConfig.getPassword(); + String passwordPrivateKey = taskDataSourceConfig.getPrivateKey(); + String password = taskDataSourceConfig.getPassword(); + try { + mysqlPassword = EncryptUtil.decrypt(passwordPrivateKey, password); + } catch (Exception e) { + LOGGER.error("Decrypt mysqlsec password exception.", e); } - passwordPropSentence = PROP_VARIABLE_NAME + ".setProperty(\"password\", \"" + mysqlSecret + "\");"; + passwordPropSentence = PROP_VARIABLE_NAME + ".setProperty(\"password\", \"" + mysqlPassword + "\");"; statisticsAndSaveResultTemplate = SqlTemplateConverter.VARIABLE_NAME_PLACEHOLDER + ".selectExpr(\"" + STATISTICS_VALUE_PLACEHOLDER + " as " + STATISTICS_VALUE_FIELD_NAME + "\", \"'" + STATISTICS_APPLICATION_ID_PLACEHOLDER + "' as " + STATISTICS_APPLICATION_ID_FIELD_NAME + "\", \"'" + + STATISTICS_TASK_ID_PLACEHOLDER + "' as " + STATISTICS_TASK_ID_FIELD_NAME + "\", \"'" + STATISTICS_RESULT_TYPE_PLACEHOLDER + "' as " + STATISTICS_RESULT_FILED_TYPE + "\", \"'" + STATISTICS_RULE_ID_PLACEHOLDER + "' as " + STATISTICS_RULE_ID_FIELD_NAME + "\", \"'" + STATISTICS_VERSION_PLACEHOLDER + "' as " + STATISTICS_VERSION + "\", \"'" + @@ -186,6 +199,7 @@ public void init() { * @param ruleMetricMaps * @param templateStatisticsInputMetas * @param applicationId + * @param taskId * @param ruleVariables * @param createTime * @param partOfVariableName @@ -195,17 +209,20 @@ public void init() { * @param enumListNewValue * @param numRangeNewValue * @param selectResult - * @param unionAllForSaveResult + * @param unionWay * @return * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ @Override - public List persistenceTranslate(String workFlowVersion, Long ruleId, Map ruleMetricMaps, Set templateStatisticsInputMetas, - String applicationId, List ruleVariables, String createTime, String partOfVariableName, String runDate, String user, StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, Map selectResult, boolean unionAllForSaveResult) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + public List persistenceTranslate(String workFlowVersion, Long ruleId, Map ruleMetricMaps, + Set templateStatisticsInputMetas, + String applicationId, Long taskId, List ruleVariables, String createTime, String partOfVariableName, String runDate,String runToday, + String user, StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, Map selectResult, + int unionWay) throws RuleVariableNotSupportException, RuleVariableNotFoundException { List list = new ArrayList<>(); - list.addAll(getStatisticsAndSaveSentence(workFlowVersion,ruleId, ruleMetricMaps, templateStatisticsInputMetas, applicationId, ruleVariables, createTime, partOfVariableName - , runDate, user, realColumn, enumListNewValue, numRangeNewValue, selectResult, unionAllForSaveResult)); + list.addAll(getStatisticsAndSaveSentence(workFlowVersion, ruleId, ruleMetricMaps, templateStatisticsInputMetas, applicationId, taskId, ruleVariables, createTime, partOfVariableName + , runDate, runToday, user, realColumn, enumListNewValue, numRangeNewValue, selectResult, unionWay)); return list; } @@ -226,22 +243,25 @@ private String getDriver() { * @param ruleMetricMap * @param templateStatisticsInputMetas * @param applicationId + * @param taskId * @param ruleVariables * @param createTime * @param partOfVariableName * @param runDate + * @param runToday * @param realColumn * @param enumListNewValue * @param numRangeNewValue * @param selectResult - * @param unionAllForSaveResult + * @param unionWay * @return * @throws RuleVariableNotSupportException * @throws RuleVariableNotFoundException */ private List getStatisticsAndSaveSentence(String workFlowVersion, Long ruleId, Map ruleMetricMap - , Set templateStatisticsInputMetas, String applicationId, List ruleVariables - , String createTime, String partOfVariableName, String runDate, String user, StringBuilder realColumn, boolean enumListNewValue, boolean numRangeNewValue, Map selectResult, boolean unionAllForSaveResult) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + , Set templateStatisticsInputMetas, String applicationId, Long taskId, List ruleVariables + , String createTime, String partOfVariableName, String runDate, String runToday, String user, StringBuilder realColumn, boolean enumListNewValue, + boolean numRangeNewValue, Map selectResult, int unionWay) throws RuleVariableNotSupportException, RuleVariableNotFoundException { List list = new ArrayList<>(); Map newRuleMetricMap = new HashMap<>(2); @@ -252,22 +272,31 @@ private List getStatisticsAndSaveSentence(String workFlowVersion, Long r newRuleMetricMap.put(key.replace("-", "_"), value); } } + if (StringUtils.isNotBlank(runToday) && StringUtils.isBlank(runDate)) { + runDate = runToday; + } if (StringUtils.isBlank(runDate)) { - sentenceWithoutRunDate(workFlowVersion, templateStatisticsInputMetas, ruleVariables, list, applicationId, createTime, partOfVariableName, ruleId, newRuleMetricMap, user, enumListNewValue, numRangeNewValue, realColumn, selectResult, unionAllForSaveResult); + sentenceWithoutRunDate(workFlowVersion, templateStatisticsInputMetas, ruleVariables, list, applicationId, taskId, createTime, partOfVariableName, ruleId, newRuleMetricMap, user, enumListNewValue, numRangeNewValue, realColumn, selectResult, unionWay); } else { - sentenceWithRunDate(workFlowVersion, templateStatisticsInputMetas, ruleVariables, list, applicationId, createTime, partOfVariableName, ruleId, newRuleMetricMap, user, runDate, enumListNewValue, numRangeNewValue, realColumn, selectResult, unionAllForSaveResult); + sentenceWithRunDate(workFlowVersion, templateStatisticsInputMetas, ruleVariables, list, applicationId, taskId, createTime, partOfVariableName, ruleId, newRuleMetricMap, user, runDate, enumListNewValue, numRangeNewValue, realColumn, selectResult, unionWay); } return list; } private void sentenceWithRunDate(String workFlowVersion, Set templateStatisticsInputMetas, - List ruleVariables, List list, String applicationId, String createTime, String partOfVariableName, Long ruleId, Map ruleMetricMap, - String user, String runDate, boolean enumListNewValue, boolean numRangeNewValue, StringBuilder realColumn, Map selectResult, boolean unionAllForSaveResult) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + List ruleVariables, List list, String applicationId, Long taskId, String createTime, + String partOfVariableName, Long ruleId, Map ruleMetricMap, + String user, String runDate, boolean enumListNewValue, boolean numRangeNewValue, StringBuilder realColumn, Map selectResult, + int unionWay) throws RuleVariableNotSupportException, RuleVariableNotFoundException { Date runRealDate; try { - runRealDate = new SimpleDateFormat("yyyyMMdd").parse(runDate); + if (runDate.contains(SpecCharEnum.MINUS.getValue())) { + runRealDate = new SimpleDateFormat("yyyy-MM-dd").parse(runDate); + } else { + runRealDate = new SimpleDateFormat("yyyyMMdd").parse(runDate); + } } catch (ParseException e) { LOGGER.error(e.getMessage(), e); throw new RuleVariableNotSupportException("{&FAILED_TO_PARSE_RUN_DATE}"); @@ -280,12 +309,12 @@ private void sentenceWithRunDate(String workFlowVersion, Set varList = selectResult.keySet().stream().collect(Collectors.toList()); for (String variable : varList) { - // 聚合处理 - if (unionAllForSaveResult) { - constructStaticSqlWithRunDate(templateStatisticsInputMetas, ruleVariables, runRealDate, applicationId, createTime, ruleId, variable, selectResult.get(variable), workFlowVersion, ruleMetricMap, list); + // 如果聚合处理结果,说明只有一个校验结果,那么只需调用一次constructStaticSqlWithRunDate + if (UnionWayEnum.COLLECT_AFTER_CALCULATE.getCode().equals(unionWay)) { + constructStaticSqlWithRunDate(templateStatisticsInputMetas, ruleVariables, runRealDate, applicationId, taskId, createTime, ruleId, variable, selectResult.get(variable), workFlowVersion, ruleMetricMap, list); break; } - constructStaticSqlWithRunDate(templateStatisticsInputMetas, ruleVariables, runRealDate, applicationId, createTime, ruleId, variable, selectResult.get(variable), workFlowVersion, ruleMetricMap, list); + constructStaticSqlWithRunDate(templateStatisticsInputMetas, ruleVariables, runRealDate, applicationId, taskId, createTime, ruleId, variable, selectResult.get(variable), workFlowVersion, ruleMetricMap, list); } list.add("} catch {"); list.add("\tcase e: Exception => println(\"JDBC operations failed because of \", e.getMessage())"); @@ -297,7 +326,7 @@ private void sentenceWithRunDate(String workFlowVersion, Set println(\"JDBC operations failed because of \", e.getMessage())"); @@ -309,8 +338,10 @@ private void sentenceWithRunDate(String workFlowVersion, Set templateStatisticsInputMetas, List ruleVariables, Date runRealDate - , String applicationId, String createTime, Long ruleId, String variable, String envName, String workFlowVersion, Map ruleMetricMap, List list) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + private void constructStaticSqlWithRunDate(Set templateStatisticsInputMetas, List ruleVariables, + Date runRealDate + , String applicationId, Long taskId, String createTime, Long ruleId, String variable, String envName, String workFlowVersion, + Map ruleMetricMap, List list) throws RuleVariableNotSupportException, RuleVariableNotFoundException { for (TemplateStatisticsInputMeta s : templateStatisticsInputMetas) { String funcName = s.getFuncName(); String value = getValue(ruleVariables, s); @@ -318,6 +349,7 @@ private void constructStaticSqlWithRunDate(Set temp .replace(STATISTICS_VALUE_PLACEHOLDER, funcName + "(" + value + ")") .replace(STATISTICS_RESULT_TYPE_PLACEHOLDER, s.getResultType()) .replace(STATISTICS_APPLICATION_ID_PLACEHOLDER, applicationId) + .replace(STATISTICS_TASK_ID_PLACEHOLDER, taskId + "") .replace(STATISTICS_CREATE_TIME_PLACEHOLDER, createTime) .replace(STATISTICS_RULE_ID_PLACEHOLDER, ruleId + "") .replace(STATISTICS_ENV_NAME_PLACEHOLDER, envName) @@ -358,84 +390,88 @@ private String judgeRuleMetricMap(Long ruleId, Map ruleMetricMap, persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, ruleMetricMap.get(value) + ""); selectSql.append("val selectSql").append("_").append(variable) .append(" = \"(select * from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and (run_date = ").append(runRealDate.getTime()).append(")") .append(" and rule_metric_id = ").append(ruleMetricMap.get(value)) - .append(" and save_result = 1") .append(" and env_name = '").append(envName).append("'") - .append(" and (run_date = ").append(runRealDate.getTime()) - .append(")) qualitis_tmp_table\""); + .append(" and save_result = 1") + .append(") qualitis_tmp_table\""); updateSql.append("val updateSql").append("_").append(variable) .append(" = \"update ").append(resultTableName).append(" set value = \"").append(" + ").append(realValueName).append(" + ").append("\" where rule_id = ").append(ruleId) + .append(" and (run_date = ").append(runRealDate.getTime()).append(")") .append(" and rule_metric_id = ").append(ruleMetricMap.get(value)) - .append(" and save_result = 1") .append(" and env_name = '").append(envName).append("'") - .append(" and (run_date = ").append(runRealDate.getTime()) - .append(")\""); + .append(" and save_result = 1") + .append("\""); } else { if (CollectionUtils.isNotEmpty(ruleMetricMap.values())) { persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, ruleMetricMap.values().iterator().next() + ""); selectSql.append("val selectSql").append("_").append(variable) .append(" = \"(select * from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and (run_date = ").append(runRealDate.getTime()).append(")") .append(" and rule_metric_id = ").append(ruleMetricMap.values().iterator().next()) - .append(" and save_result = 1") .append(" and env_name = '").append(envName).append("'") - .append(" and (run_date = ").append(runRealDate.getTime()) - .append(")) qualitis_tmp_table\""); + .append(" and save_result = 1") + .append(") qualitis_tmp_table\""); updateSql.append("val updateSql").append("_").append(variable) .append(" = \"update ").append(resultTableName).append(" set value = \"").append(" + ").append(realValueName).append(" + ").append("\" where rule_id = ").append(ruleId) + .append(" and (run_date = ").append(runRealDate.getTime()).append(")") .append(" and rule_metric_id = ").append(ruleMetricMap.values().iterator().next()) - .append(" and save_result = 1") .append(" and env_name = '").append(envName).append("'") - .append(" and (run_date = ").append(runRealDate.getTime()) - .append(")\""); + .append(" and save_result = 1") + .append("\""); } else { persistSentence = persistSentence.replace(STATISTICS_RULE_METRIC_ID_PLACEHOLDER, "-1"); selectSql.append("val selectSql").append("_").append(variable) .append(" = \"(select * from ").append(resultTableName).append(" where rule_id = ").append(ruleId) + .append(" and (run_date = ").append(runRealDate.getTime()).append(")") .append(" and rule_metric_id = ").append("-1") - .append(" and save_result = 1") .append(" and env_name = '").append(envName).append("'") - .append(" and (run_date = ").append(runRealDate.getTime()) - .append(")) qualitis_tmp_table\""); + .append(" and save_result = 1") + .append(") qualitis_tmp_table\""); updateSql.append("val updateSql").append("_").append(variable) .append(" = \"update ").append(resultTableName).append(" set value = \"").append(" + ").append(realValueName).append(" + ").append("\" where rule_id = ").append(ruleId) + .append(" and (run_date = ").append(runRealDate.getTime()).append(")") .append(" and rule_metric_id = ").append("-1") - .append(" and save_result = 1") .append(" and env_name = '").append(envName).append("'") - .append(" and (run_date = ").append(runRealDate.getTime()) - .append(")\""); + .append(" and save_result = 1") + .append("\""); } } return persistSentence; } private void sentenceWithoutRunDate(String workFlowVersion, Set templateStatisticsInputMetas - , List ruleVariables, List list, String applicationId, String createTime, String partOfVariableName, Long ruleId - , Map ruleMetricMap, String user, boolean enumListNewValue, boolean numRangeNewValue, StringBuilder realColumn, Map selectResult, boolean unionAllForSaveResult) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + , List ruleVariables, List list, String applicationId, Long taskId, String createTime, + String partOfVariableName, Long ruleId + , Map ruleMetricMap, String user, boolean enumListNewValue, boolean numRangeNewValue, StringBuilder realColumn, + Map selectResult, int unionWay) throws RuleVariableNotSupportException, RuleVariableNotFoundException { if (selectResult != null && CollectionUtils.isNotEmpty(selectResult.keySet())) { List varList = selectResult.keySet().stream().collect(Collectors.toList()); for (String variable : varList) { - // 聚合处理 - if (unionAllForSaveResult) { - constructStaticSql(templateStatisticsInputMetas, ruleVariables, applicationId, createTime, partOfVariableName, ruleId, workFlowVersion, ruleMetricMap, list, variable, selectResult.get(variable)); + // 如何是聚合处理,说明只有一个产出的结果值,那么只需确保通知一次即可 + if (UnionWayEnum.COLLECT_AFTER_CALCULATE.getCode().equals(unionWay)) { + constructStaticSql(templateStatisticsInputMetas, ruleVariables, applicationId, taskId, createTime, partOfVariableName, ruleId, workFlowVersion, ruleMetricMap, list, variable, selectResult.get(variable)); // Handle new value handleNewValue(workFlowVersion, user, realColumn, createTime, partOfVariableName, ruleId, list, enumListNewValue, numRangeNewValue, variable); break; } - constructStaticSql(templateStatisticsInputMetas, ruleVariables, applicationId, createTime, partOfVariableName, ruleId, workFlowVersion, ruleMetricMap, list, variable, selectResult.get(variable)); + constructStaticSql(templateStatisticsInputMetas, ruleVariables, applicationId, taskId, createTime, partOfVariableName, ruleId, workFlowVersion, ruleMetricMap, list, variable, selectResult.get(variable)); // Handle new value handleNewValue(workFlowVersion, user, realColumn, createTime, partOfVariableName, ruleId, list, enumListNewValue, numRangeNewValue, variable); } return; } - constructStaticSql(templateStatisticsInputMetas, ruleVariables, applicationId, createTime, partOfVariableName, ruleId, workFlowVersion, ruleMetricMap, list, "", ""); + constructStaticSql(templateStatisticsInputMetas, ruleVariables, applicationId, taskId, createTime, partOfVariableName, ruleId, workFlowVersion, ruleMetricMap, list, "", ""); // Handle new value handleNewValue(workFlowVersion, user, realColumn, createTime, partOfVariableName, ruleId, list, enumListNewValue, numRangeNewValue, ""); } - private void constructStaticSql(Set templateStatisticsInputMetas, List ruleVariables, String applicationId - , String createTime, String partOfVariableName, Long ruleId, String workFlowVersion, Map ruleMetricMap, List list, String realVariable, String envName) throws RuleVariableNotSupportException, RuleVariableNotFoundException { + private void constructStaticSql(Set templateStatisticsInputMetas, List ruleVariables, + String applicationId + , Long taskId, String createTime, String partOfVariableName, Long ruleId, String workFlowVersion, Map ruleMetricMap, + List list, String realVariable, String envName) throws RuleVariableNotSupportException, RuleVariableNotFoundException { for (TemplateStatisticsInputMeta s : templateStatisticsInputMetas) { String funcName = s.getFuncName(); String value = getValue(ruleVariables, s); @@ -443,6 +479,7 @@ private void constructStaticSql(Set templateStatist .replace(STATISTICS_VALUE_PLACEHOLDER, funcName + "(" + value + ")") .replace(STATISTICS_RESULT_TYPE_PLACEHOLDER, s.getResultType()) .replace(STATISTICS_APPLICATION_ID_PLACEHOLDER, applicationId) + .replace(STATISTICS_TASK_ID_PLACEHOLDER, taskId + "") .replace(STATISTICS_CREATE_TIME_PLACEHOLDER, createTime) .replace(STATISTICS_RULE_ID_PLACEHOLDER, ruleId + "") .replace(STATISTICS_ENV_NAME_PLACEHOLDER, envName + "") diff --git a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java index e19208d8..9f3720fc 100644 --- a/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java +++ b/core/converter/src/main/java/com/webank/wedatasphere/qualitis/util/DateExprReplaceUtil.java @@ -31,6 +31,7 @@ /** * Replace variable like ${yyyyMMdd} - N + * * @author howeye */ public class DateExprReplaceUtil { @@ -45,12 +46,14 @@ private DateExprReplaceUtil() { private static final Pattern DIGITAL_PATTERN = Pattern.compile("[0-9]+"); private static final Pattern CUSTOM_PLACEHOLODER_PATTERN = Pattern.compile("\\$\\{[^ ]*}"); - private static final Map RUN_DATE_FORMAT = new HashMap(2); + private static final Map RUN_DATE_FORMAT = new HashMap(2); static { - RUN_DATE_FORMAT.put("run_date","yyyyMMdd"); + RUN_DATE_FORMAT.put("run_date", "yyyyMMdd"); RUN_DATE_FORMAT.put("run_date_std", "yyyy-MM-dd"); RUN_DATE_FORMAT.put("run_today_h_std", "yyyy-MM-dd HH"); + RUN_DATE_FORMAT.put("run_today", "yyyyMMdd"); + RUN_DATE_FORMAT.put("run_today_std", "yyyy-MM-dd"); } private static final Logger LOGGER = LoggerFactory.getLogger(DateExprReplaceUtil.class); @@ -58,6 +61,7 @@ private DateExprReplaceUtil() { /** * Expr statement replace function * ds=${yyyyMMdd} - 1 and + * * @param source * @return */ @@ -109,8 +113,9 @@ public static String replaceRunDate(Date date, String midTableAction) throws UnE Matcher matcher = CUSTOM_PLACEHOLODER_PATTERN.matcher(midTableAction); while (matcher.find()) { String replaceStr = matcher.group(); - boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std") || replaceStr.contains("run_today_h_std"); - if (! legalSystemParams) { + boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std") + || replaceStr.contains("run_today") || replaceStr.contains("run_today_std") || replaceStr.contains("run_today_h_std"); + if (!legalSystemParams) { throw new UnExpectedRequestException("Custom placeholoder must be system variables."); } String currentParam = replaceStr.substring(2, replaceStr.length() - 1); @@ -122,7 +127,7 @@ public static String replaceRunDate(Date date, String midTableAction) throws UnE calendar.setTime(date); calendar.add(Calendar.DATE, 0 - forwayDay - 1); dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(keys[0])).format(calendar.getTime()); - } else if ("run_today_h_std".equals(currentParam)){ + } else if ("run_today_h_std".equals(currentParam) || "run_today".equals(currentParam) || "run_today_std".equals(currentParam)) { calendar.setTime(date); dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(currentParam)).format(calendar.getTime()); } else { @@ -141,8 +146,9 @@ public static String replaceFilter(Date date, String filter) throws UnExpectedRe Matcher matcher = CUSTOM_PLACEHOLODER_PATTERN.matcher(filter); while (matcher.find()) { String replaceStr = matcher.group(); - boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std") || replaceStr.contains("run_today_h_std"); - if (! legalSystemParams) { + boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std") + || replaceStr.contains("run_today") || replaceStr.contains("run_today_std") || replaceStr.contains("run_today_h_std"); + if (!legalSystemParams) { throw new UnExpectedRequestException("Custom placeholoder must be system variables."); } String currentParam = replaceStr.substring(2, replaceStr.length() - 1); @@ -154,7 +160,7 @@ public static String replaceFilter(Date date, String filter) throws UnExpectedRe calendar.setTime(date); calendar.add(Calendar.DATE, 0 - forwayDay - 1); dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(keys[0])).format(calendar.getTime()); - } else if ("run_today_h_std".equals(currentParam)){ + } else if ("run_today_h_std".equals(currentParam) || "run_today".equals(currentParam) || "run_today_std".equals(currentParam)) { calendar.setTime(date); dateStr = new SimpleDateFormat(RUN_DATE_FORMAT.get(currentParam)).format(calendar.getTime()); } else { @@ -168,4 +174,60 @@ public static String replaceFilter(Date date, String filter) throws UnExpectedRe return filter; } + + public static Long getDateTimeFilterSeconds(String filter) { + Date date = new Date(); + long midnightMillis = 0; + Matcher matcher = CUSTOM_PLACEHOLODER_PATTERN.matcher(filter); + while (matcher.find()) { + String replaceStr = matcher.group(); + boolean legalSystemParams = replaceStr.contains("run_date") || replaceStr.contains("run_date_std") || replaceStr.contains("run_today_h_std"); + if (!legalSystemParams) { + Calendar calendar = Calendar.getInstance(); + calendar.set(Calendar.HOUR_OF_DAY, 0); + calendar.set(Calendar.MINUTE, 0); + calendar.set(Calendar.SECOND, 0); + calendar.set(Calendar.MILLISECOND, 0); + return calendar.getTimeInMillis() / 1000; + } + String currentParam = replaceStr.substring(2, replaceStr.length() - 1); + Calendar calendar = Calendar.getInstance(); + if (currentParam.contains(SpecCharEnum.MINUS.getValue())) { + String[] keys = currentParam.split(SpecCharEnum.MINUS.getValue()); + int forwayDay = Integer.parseInt(keys[1]); + calendar.set(Calendar.DATE, calendar.get(Calendar.DAY_OF_MONTH) + (0 - forwayDay - 1)); + } else if ("run_today_h_std".equals(currentParam)) { + calendar.set(Calendar.DATE, calendar.get(Calendar.DAY_OF_MONTH)); + } else { + calendar.setTime(date); + calendar.set(Calendar.DATE, calendar.get(Calendar.DAY_OF_MONTH) - 1); + } + calendar.set(Calendar.HOUR_OF_DAY, 0); + calendar.set(Calendar.MINUTE, 0); + calendar.set(Calendar.SECOND, 0); + calendar.set(Calendar.MILLISECOND, 0); + midnightMillis = calendar.getTimeInMillis(); + + } + return midnightMillis / 1000; + } + + public static Long getDateTimeSeconds(String dates) { + long dataTime = 0; + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMdd"); + Date date; + try { + date = dateFormat.parse(dates); + Calendar calendar = Calendar.getInstance(); + calendar.setTime(date); + calendar.set(Calendar.HOUR_OF_DAY, 0); + calendar.set(Calendar.MINUTE, 0); + calendar.set(Calendar.SECOND, 0); + // 当天零点时间戳 + dataTime = calendar.getTime().getTime() / 1000; + } catch (Exception e) { + LOGGER.error("Failed to parse date parameter."); + } + return dataTime; + } } diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java index 11ec43a1..84c89f44 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/bean/DataQualityTask.java @@ -41,6 +41,7 @@ public class DataQualityTask { private String filterShare; private String columnShare; private List> connectShare; + private Integer index; public DataQualityTask() { } @@ -178,6 +179,14 @@ public void setConnectShare(List> connectShare) { this.connectShare = connectShare; } + public Integer getIndex() { + return index; + } + + public void setIndex(Integer index) { + this.index = index; + } + @Override public String toString() { return "DataQualityTask{" + @@ -193,6 +202,7 @@ public String toString() { ", tableShare='" + tableShare + '\'' + ", filterShare='" + filterShare + '\'' + ", columnShare='" + columnShare + '\'' + + ", index='" + index + '\'' + '}'; } } diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java index 2cad4b38..c14f87c7 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/AbstractTaskDivider.java @@ -32,6 +32,9 @@ public abstract class AbstractTaskDivider { /** * Divided rules into multi-task. + * + * @param clusterName + * @param datasourceIndex * @param rules * @param applicationId * @param createTime @@ -48,7 +51,9 @@ public abstract class AbstractTaskDivider { * @throws UnExpectedRequestException * @throws MetaDataAcquireFailedException */ - public abstract List divide(List rules, String applicationId, String createTime, String partition, Date date, + public abstract List divide(String clusterName, Integer datasourceIndex, List rules, String applicationId, + String createTime, + String partition, Date date, Map> databaseMap, Map>> dataSourceMysqlConnect, String user, Integer threshold, String splitBy, String startupParam) throws ArgumentException, UnExpectedRequestException, MetaDataAcquireFailedException; diff --git a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java index b9590a12..2f4bdc35 100644 --- a/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java +++ b/core/divider/src/main/java/com/webank/wedatasphere/qualitis/divider/SameDataSourceTaskDivider.java @@ -18,7 +18,9 @@ import com.webank.wedatasphere.qualitis.bean.DataQualityTask; import com.webank.wedatasphere.qualitis.bean.RuleTaskDetail; +import com.webank.wedatasphere.qualitis.constants.QualitisConstants; import com.webank.wedatasphere.qualitis.exception.ArgumentException; +import com.webank.wedatasphere.qualitis.rule.constant.RuleTemplateTypeEnum; import com.webank.wedatasphere.qualitis.rule.dao.ExecutionParametersDao; import com.webank.wedatasphere.qualitis.rule.entity.Rule; import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSource; @@ -49,12 +51,13 @@ public class SameDataSourceTaskDivider extends AbstractTaskDivider { @Override - public List divide(List rules, String applicationId, String createTime, String partition, Date date, Map> ruleReplaceInfo + public List divide(String clusterName, Integer datasourceIndex, List rules, String applicationId, + String createTime, String partition, + Date date, Map> ruleReplaceInfo , Map>> dataSourceMysqlConnect, String user, Integer threshold, String splitBy, String startupParam) throws ArgumentException { LOGGER.info("Start to classify rules by datasource"); Map> sameDataSourceRule = new HashMap<>(4); Map keyUsers = new HashMap<>(2); - StringBuilder columns = new StringBuilder(); for (Rule rule : rules) { StringBuilder realUser = new StringBuilder(); if (StringUtils.isEmpty(splitBy) && StringUtils.isNotEmpty(rule.getExecutionParametersName())) { @@ -63,7 +66,7 @@ public List divide(List rules, String applicationId, Stri splitBy = concurrentcyGranularity.split(":")[1]; } } - String key = getKey(rule, user, realUser, partition, splitBy, columns); + String key = getKey(rule, user, realUser, partition, splitBy); if (ruleReplaceInfo.get(rule.getId()).get("qualitis_startup_param") != null && StringUtils.isNotEmpty((String) ruleReplaceInfo.get(rule.getId()).get("qualitis_startup_param"))) { key = key + ":" + ruleReplaceInfo.get(rule.getId()).get("qualitis_startup_param"); @@ -81,14 +84,15 @@ public List divide(List rules, String applicationId, Stri LOGGER.info("Succeed to classify rules by datasource maybe contains static params. Result: {}", sameDataSourceRule.keySet().stream().collect(Collectors.joining(","))); List result = new ArrayList<>(); - handleSameDataSourceRule(applicationId, createTime, user, keyUsers, partition, ruleReplaceInfo, dataSourceMysqlConnect, threshold, sameDataSourceRule, result, startupParam, columns.toString()); + handleSameDataSourceRule(datasourceIndex, applicationId, createTime, keyUsers, partition, ruleReplaceInfo, dataSourceMysqlConnect, threshold, sameDataSourceRule, result, startupParam); LOGGER.info("Succeed to divide all rules into tasks. Result: {}", result); return result; } - private void handleSameDataSourceRule(String applicationId, String createTime, String user + private void handleSameDataSourceRule(Integer datasourceIndex, String applicationId, String createTime , Map keyUsers, String partition, Map> ruleReplaceInfo - , Map>> dataSourceMysqlConnect, Integer threshold, Map> sameDataSourceRule, List result, String startupParam, String columns) throws ArgumentException { + , Map>> dataSourceMysqlConnect, Integer threshold, Map> sameDataSourceRule, + List result, String startupParam) throws ArgumentException { for (String key : sameDataSourceRule.keySet()) { List ruleList = sameDataSourceRule.get(key); @@ -115,11 +119,7 @@ private void handleSameDataSourceRule(String applicationId, String createTime, S String tableName = generateTable(rule); String database = (String) ruleReplaceInfo.get(rule.getId()).get("qualitis_abnormal_database"); - if (database.equals(user.concat("_ind")) && StringUtils.isNotBlank(proxyUser) && database.contains("_ind")) { - database = proxyUser.concat("_ind"); - } - - String midTableName = database + "." + tableName; + String midTableName = StringUtils.isNotEmpty(database) ? (database + "." + tableName) : ""; LOGGER.info("Rule detail list size is: {}", ruleTaskDetails.size()); if (ruleTaskDetails.size() < threshold) { @@ -128,8 +128,10 @@ private void handleSameDataSourceRule(String applicationId, String createTime, S List ruleTaskDetailCopy = new ArrayList<>(); ruleTaskDetailCopy.addAll(ruleTaskDetails); DataQualityTask tmp = new DataQualityTask(applicationId, createTime, partition, ruleTaskDetailCopy); - checkAndSaveStartupParamAndShareData(tmp, dynamicParam, startupParam, proxyUser, key, partition, currentRuleDataSource, dataSourceMysqlConnect, columns); - + checkAndSaveStartupParamAndShareData(tmp, dynamicParam, startupParam, proxyUser, key, partition, currentRuleDataSource, dataSourceMysqlConnect, ""); + if (datasourceIndex != null) { + tmp.setIndex(datasourceIndex); + } result.add(tmp); ruleTaskDetails = new ArrayList<>(); LOGGER.info("Create new rule detail list"); @@ -138,8 +140,10 @@ private void handleSameDataSourceRule(String applicationId, String createTime, S } if (ruleTaskDetails.size() > 0) { DataQualityTask tmp = new DataQualityTask(applicationId, createTime, partition, ruleTaskDetails); - checkAndSaveStartupParamAndShareData(tmp, dynamicParam, startupParam, proxyUser, key, partition, currentRuleDataSource, dataSourceMysqlConnect, columns); - + checkAndSaveStartupParamAndShareData(tmp, dynamicParam, startupParam, proxyUser, key, partition, currentRuleDataSource, dataSourceMysqlConnect, ""); + if (datasourceIndex != null) { + tmp.setIndex(datasourceIndex); + } result.add(tmp); LOGGER.info("Succeed to divide rules: {} into a task {}", ruleIdList, tmp); } @@ -185,8 +189,8 @@ private String generateTable(Rule rule) { return name.toString(); } - private String getKey(Rule rule, String user, StringBuilder realUser, String partition, String splitBy, StringBuilder columns) { - List ruleDataSourceList = rule.getRuleDataSources().stream().filter(dataSource -> StringUtils.isNotBlank(dataSource.getDbName()) && StringUtils.isNotBlank(dataSource.getTableName())).collect(Collectors.toList()); + private String getKey(Rule rule, String user, StringBuilder realUser, String partition, String splitBy) { + List ruleDataSourceList = rule.getRuleDataSources().stream().filter(dataSource -> (StringUtils.isNotBlank(dataSource.getDbName()) && StringUtils.isNotBlank(dataSource.getTableName())) || StringUtils.isNotEmpty(dataSource.getCollectSql())).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(ruleDataSourceList)) { RuleDataSource ruleDataSource = ruleDataSourceList.iterator().next(); @@ -203,9 +207,6 @@ private String getKey(Rule rule, String user, StringBuilder realUser, String par if (StringUtils.isEmpty(partition)) { partition = ruleDataSource.getFilter(); } - if (StringUtils.isNotEmpty(ruleDataSource.getColName())) { - columns.append(ruleDataSource.getColName()).append("|"); - } String envNames = "."; List ruleDataSourceEnvs = ruleDataSource.getRuleDataSourceEnvs(); if (CollectionUtils.isNotEmpty(ruleDataSourceEnvs)) { diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/RequestLinkis.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/RequestLinkis.java index b90eaa5d..cf101b5c 100644 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/RequestLinkis.java +++ b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/RequestLinkis.java @@ -39,6 +39,7 @@ public class RequestLinkis { private static final Logger LOGGER = LoggerFactory.getLogger(RequestLinkis.class); private static final String STATUS = "status"; + private static final String MESSAGE = "message"; @Autowired private LinkisConfig linkisConfig; @@ -111,8 +112,8 @@ private String extractMessage(RetryContext context) { if (StringUtils.isNotEmpty(messageJson)) { try { Map msgMap = objectMapper.readValue(messageJson, Map.class); - if (msgMap.containsKey("message")) { - return msgMap.get("message"); + if (msgMap.containsKey(MESSAGE)) { + return msgMap.get(MESSAGE); } } catch (IOException e) { LOGGER.error(e.getMessage(), e); @@ -259,8 +260,8 @@ private Map finishLog(AskLinkisParameter askLinkisParameter, Map LOGGER.info("traceId: {} Finished to {}, url: {}, authUser: {}, response: {}", traceId, askLinkisParameter.getLogmessage(), askLinkisParameter.getUrl(), askLinkisParameter.getAuthUser(), response); if (!checkResponse(response)) { String content = null; - if (response.containsKey("message")) { - content = response.get("message").toString(); + if (response.containsKey(MESSAGE)) { + content = response.get(MESSAGE).toString(); } String errorMsg = String.format("Error! Can not get meta data from linkis, traceId: %s, authUser: %s, exception: %s", traceId, askLinkisParameter.getAuthUser(), content); LOGGER.error(errorMsg); diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/config/OperateCiConfig.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/config/OperateCiConfig.java deleted file mode 100644 index 1a7ef718..00000000 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/config/OperateCiConfig.java +++ /dev/null @@ -1,125 +0,0 @@ -package com.webank.wedatasphere.qualitis.client.config; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Configuration; - -/** - * @author allenzhou@webank.com - * @date 2021/3/1 17:56 - */ -@Configuration -public class OperateCiConfig { - @Value("${cmdb.host}") - private String host; - - @Value("${cmdb.url}") - private String url; - - @Value("${cmdb.integrateUrl}") - private String integrateUrl; - - @Value("${cmdb.userAuthKey}") - private String userAuthKey; - - @Value("${cmdb.newUserAuthKey}") - private String newUserAuthKey; - - @Value("${cmdb.onlySlave}") - private Boolean onlySlave; - - @Value("${ef.host}") - private String efHost; - - @Value("${ef.url}") - private String efUrl; - - @Value("${ef.app_id}") - private String efAppId; - - @Value("${ef.app_token}") - private String efAppToken; - - public OperateCiConfig() { - // Do nothing. - } - - public String getUserAuthKey() { - return userAuthKey; - } - - public void setUserAuthKey(String userAuthKey) { - this.userAuthKey = userAuthKey; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public String getUrl() { - return url; - } - - public void setUrl(String url) { - this.url = url; - } - - public String getIntegrateUrl() { - return integrateUrl; - } - - public void setIntegrateUrl(String integrateUrl) { - this.integrateUrl = integrateUrl; - } - - public String getNewUserAuthKey() { - return newUserAuthKey; - } - - public void setNewUserAuthKey(String newUserAuthKey) { - this.newUserAuthKey = newUserAuthKey; - } - - public Boolean getOnlySlave() { - return onlySlave; - } - - public void setOnlySlave(Boolean onlySlave) { - this.onlySlave = onlySlave; - } - - public String getEfHost() { - return efHost; - } - - public void setEfHost(String efHost) { - this.efHost = efHost; - } - - public String getEfUrl() { - return efUrl; - } - - public void setEfUrl(String efUrl) { - this.efUrl = efUrl; - } - - public String getEfAppId() { - return efAppId; - } - - public void setEfAppId(String efAppId) { - this.efAppId = efAppId; - } - - public String getEfAppToken() { - return efAppToken; - } - - public void setEfAppToken(String efAppToken) { - this.efAppToken = efAppToken; - } -} diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/DataStandardClientImpl.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/DataStandardClientImpl.java index 8717182e..e5df912c 100644 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/DataStandardClientImpl.java +++ b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/DataStandardClientImpl.java @@ -25,6 +25,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Map; @@ -367,7 +368,7 @@ private String hashWithDataMap(String str) throws UnExpectedRequestException { LOGGER.error(e.getMessage(), e); throw new UnExpectedRequestException("A error occured when pick up a algorithm of hash to construct datamap http request.", 500); } - md.update(str.getBytes()); + md.update(str.getBytes(StandardCharsets.UTF_8)); byte[] digest = md.digest(); String hashStr = DatatypeConverter.printHexBinary(digest).toLowerCase(); return hashStr; diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/LinkisMetaDataManagerImpl.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/LinkisMetaDataManagerImpl.java index 4aeaa711..11083a16 100644 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/LinkisMetaDataManagerImpl.java +++ b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/LinkisMetaDataManagerImpl.java @@ -1,7 +1,7 @@ package com.webank.wedatasphere.qualitis.client.impl; import com.google.common.collect.Maps; -import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.config.LinkisConfig; import com.webank.wedatasphere.qualitis.constants.QualitisConstants; import com.webank.wedatasphere.qualitis.constants.ResponseStatusConstants; import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; @@ -12,6 +12,7 @@ import com.webank.wedatasphere.qualitis.metadata.request.LinkisDataSourceEnvRequest; import com.webank.wedatasphere.qualitis.metadata.request.LinkisDataSourceRequest; import com.webank.wedatasphere.qualitis.metadata.request.ModifyDataSourceParameterRequest; +import com.webank.wedatasphere.qualitis.metadata.response.datasource.LinkisDataSourceInfoDetail; import com.webank.wedatasphere.qualitis.metadata.response.datasource.LinkisDataSourceParamsResponse; import com.webank.wedatasphere.qualitis.response.GeneralResponse; import com.webank.wedatasphere.qualitis.util.CryptoUtils; @@ -45,6 +46,8 @@ public class LinkisMetaDataManagerImpl implements LinkisMetaDataManager { @Autowired private MetaDataClient metaDataClient; + @Autowired + private LinkisConfig linkisConfig; private final ObjectMapper objectMapper = new ObjectMapper(); @@ -53,6 +56,7 @@ public class LinkisMetaDataManagerImpl implements LinkisMetaDataManager { @Override public Long createDataSource(LinkisDataSourceRequest linkisDataSourceRequest, String cluster, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException { String dataSourceJson = createDatasourceJson(linkisDataSourceRequest); + LOGGER.info("To create datasource to Linkis, request body: {}", dataSourceJson); GeneralResponse> generalResponse; try { generalResponse = metaDataClient.createDataSource(cluster, authUser, dataSourceJson); @@ -69,6 +73,7 @@ public Long createDataSource(LinkisDataSourceRequest linkisDataSourceRequest, St @Override public Long modifyDataSource(LinkisDataSourceRequest linkisDataSourceRequest, String cluster, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException { String dataSourceJson = createDatasourceJson(linkisDataSourceRequest); + LOGGER.info("To modify datasource to Linkis, request body: {}", dataSourceJson); GeneralResponse> generalResponse; try { generalResponse = metaDataClient.modifyDataSource(cluster, authUser, linkisDataSourceRequest.getLinkisDataSourceId(), dataSourceJson); @@ -83,8 +88,8 @@ public Long modifyDataSource(LinkisDataSourceRequest linkisDataSourceRequest, St } @Override - public List createDataSourceEnv(Integer inputType, Integer verifyType, List linkisDataSourceEnvRequestList, String clusterName, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException { - String envJson = createDatasourceEnvJson(inputType, verifyType, linkisDataSourceEnvRequestList); + public List createDataSourceEnvAndSetEnvId(Integer inputType, Integer verifyType, List linkisDataSourceEnvRequestList, String clusterName, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException { + String envJson = createDatasourceEnvJson(verifyType, linkisDataSourceEnvRequestList); GeneralResponse> datasourceEnvResponse; try { LOGGER.info("createDataSourceEnv, request body: {}", envJson); @@ -114,7 +119,7 @@ public List createDataSourceEnv(Integer inputType, I @Override public List modifyDataSourceEnv(Integer inputType, Integer verifyType, List linkisDataSourceEnvRequestList, String clusterName, String authUser) throws UnExpectedRequestException, MetaDataAcquireFailedException { - String envJson = createDatasourceEnvJson(inputType, verifyType, linkisDataSourceEnvRequestList); + String envJson = createDatasourceEnvJson(verifyType, linkisDataSourceEnvRequestList); GeneralResponse> datasourceEnvResponse; try { LOGGER.info("modifyDataSourceEnv, request body: {}", envJson); @@ -177,12 +182,11 @@ public void deleteDataSource(Long linkisDataSourceId, String clusterName, String } - private String createDatasourceEnvJson(Integer inputType, Integer verifyType, List dataSourceEnvList) throws UnExpectedRequestException { + private String createDatasourceEnvJson(Integer verifyType, List dataSourceEnvList) throws UnExpectedRequestException { if (CollectionUtils.isEmpty(dataSourceEnvList)) { return StringUtils.EMPTY; } boolean isShared = isShared(verifyType); - boolean isAutoInput = isAutoInput(inputType); for (LinkisDataSourceEnvRequest dataSourceEnv : dataSourceEnvList) { if (Objects.isNull(dataSourceEnv.getConnectParamsRequest())) { LOGGER.warn("Lack of connect parameter, envName: {}", dataSourceEnv.getEnvName()); @@ -190,19 +194,18 @@ private String createDatasourceEnvJson(Integer inputType, Integer verifyType, Li } LinkisConnectParamsRequest connectParamsRequest = dataSourceEnv.getConnectParamsRequest(); Map connectParamMap = new HashMap<>(); - if (isAutoInput) { - connectParamMap.put("database", dataSourceEnv.getDatabase()); - } if (!isShared) { String authType = connectParamsRequest.getAuthType(); connectParamMap.put("authType", authType); if (QualitisConstants.AUTH_TYPE_ACCOUNT_PWD.equals(authType)) { connectParamMap.put("username", connectParamsRequest.getUsername()); - connectParamMap.put("password", CryptoUtils.encode(connectParamsRequest.getPassword())); + if (StringUtils.isNotEmpty(connectParamsRequest.getPassword())) { + connectParamMap.put("password", CryptoUtils.encode(connectParamsRequest.getPassword())); + } } else if (QualitisConstants.AUTH_TYPE_DPM.equals(authType)) { connectParamMap.put("appid", connectParamsRequest.getAppId()); connectParamMap.put("objectid", connectParamsRequest.getObjectId()); - connectParamMap.put("mkPrivate", connectParamsRequest.getMkPrivate()); + connectParamMap.put("dk", connectParamsRequest.getDk()); } } connectParamMap.put("host", connectParamsRequest.getHost()); @@ -251,6 +254,44 @@ private String createDatasourceJson(LinkisDataSourceRequest linkisDataSourceRequ } } + @Override + public Map getDataSourceTypeNameAndIdMap() { + Map typeNameAndIdMap = new HashMap<>(); + try { + GeneralResponse> generalResponse = metaDataClient.getAllDataSourceTypes(linkisConfig.getDatasourceCluster(), linkisConfig.getDatasourceAdmin()); + if (ResponseStatusConstants.OK.equals(generalResponse.getCode()) + && generalResponse.getData() != null + && generalResponse.getData().containsKey("typeList")) { + List> typeListMap = (List>) generalResponse.getData().get("typeList"); + typeListMap.forEach(typeMap -> { + typeNameAndIdMap.put(MapUtils.getString(typeMap, "name"), Long.valueOf(MapUtils.getString(typeMap, "id"))); + }); + } + } catch (UnExpectedRequestException e) { + LOGGER.error("Failed to query all dataSource types. ", e); + } catch (MetaDataAcquireFailedException e) { + LOGGER.error("Failed to query all dataSource types. ", e); + } + return typeNameAndIdMap; + } + + @Override + public GeneralResponse connect(Long linkisDataSourceId, Long versionId) throws Exception { + LinkisDataSourceInfoDetail linkisDataSourceInfoDetail = metaDataClient.getDataSourceInfoById(linkisConfig.getDatasourceCluster(), linkisConfig.getDatasourceAdmin(), linkisDataSourceId, versionId); + String dataSourceJson = objectMapper.writeValueAsString(linkisDataSourceInfoDetail); + + try { + GeneralResponse> resultMap = metaDataClient.connectDataSource(linkisConfig.getDatasourceCluster(), linkisConfig.getDatasourceAdmin(), dataSourceJson); + if (!resultMap.getData().containsKey("ok")) { + return resultMap; + } + } catch (MetaDataAcquireFailedException e) { + String errorMsg = "环境连接失败"; + throw new MetaDataAcquireFailedException(errorMsg, 500); + } + return new GeneralResponse(ResponseStatusConstants.OK, "Connected!", null); + } + private void validateConnectParams(Map connectParams) throws UnExpectedRequestException { validateKey(connectParams, "subSystem"); validateKey(connectParams, "share"); diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java index 860091f9..f2ad769a 100644 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java +++ b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/MetaDataClientImpl.java @@ -24,6 +24,7 @@ import com.webank.wedatasphere.qualitis.config.LinkisConfig; import com.webank.wedatasphere.qualitis.constant.LinkisResponseKeyEnum; import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.constants.ResponseStatusConstants; import com.webank.wedatasphere.qualitis.dao.ClusterInfoDao; import com.webank.wedatasphere.qualitis.entity.ClusterInfo; import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; @@ -51,6 +52,8 @@ import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -507,7 +510,7 @@ public GeneralResponse> getAllDataSourceTypes(String cluster Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get all datasource types", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get all datasource types", data); } @Override @@ -521,7 +524,7 @@ public GeneralResponse> getDataSourceEnv(String clusterName, Map response = gainResponseLinkisByGet(clusterInfo, authUser, url, "get data source env by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource env", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource env", data); } @Override @@ -535,7 +538,7 @@ public GeneralResponse> createDataSourceEnvBatch(String clus Map response = gainResponseLinkisByPostBringJsonArray(clusterInfo, authUser, url, "batch create data source env param by user and cluster by linkis." , new JSONArray(datasourceEnvs)); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to create datasource env connect params", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to create datasource env connect params", data); } @Override @@ -550,7 +553,7 @@ public GeneralResponse> modifyDataSourceEnvBatch(String clus Map response = gainResponseLinkisByPutBringJsonArray(clusterInfo, authUser, url, "modify data source env by user and cluster by linkis." ,new JSONArray(datasourceEnvs)); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to modify datasource", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to modify datasource", data); } @Override @@ -564,7 +567,7 @@ public GeneralResponse> getDatasourceEnvById(String clusterN Map response = gainResponseLinkisByGet(clusterInfo, authUser, url, "get data source env by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource version", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource version", data); } @Override @@ -588,13 +591,13 @@ public GeneralResponse> getDataSourceInfoPage(String cluster Map response = gainResponseLinkisByGet(clusterInfo, authUser, url, "get data source info by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource info", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource info", data); } @Override public GeneralResponse> getDataSourceInfoByIds(String clusterName, String userName, List dataSourceIds) throws UnExpectedRequestException, MetaDataAcquireFailedException, IOException { if (CollectionUtils.isEmpty(dataSourceIds)) { - return new GeneralResponse<>("200", "Success to get datasource info by ids", null); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource info by ids", null); } // Check existence of cluster name ClusterInfo clusterInfo = checkClusterNameExists(clusterName); @@ -606,7 +609,7 @@ public GeneralResponse> getDataSourceInfoByIds(String cluste Map response = gainResponseLinkisByGet(clusterInfo, userName, url, "get data source info by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource info by ids", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource info by ids", data); } @Override @@ -619,7 +622,7 @@ public GeneralResponse> getDataSourceVersions(String cluster Map response = gainResponseLinkisByGet(clusterInfo, authUser, url, "get data source versions by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource version", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource version", data); } @Override @@ -635,7 +638,7 @@ public GeneralResponse> getDataSourceInfoDetail(String clust Map response = gainResponseLinkisByGetRetry(clusterInfo, authUser, url, "get data source info detail by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource detail info", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource detail info", data); } @Override @@ -649,7 +652,7 @@ public GeneralResponse> getDataSourceInfoDetailByName(String Map response = gainResponseLinkisByGet(clusterInfo, authUser, url, "get data source info detail by user and cluster and name by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource info detail by datasource name", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource info detail by datasource name", data); } @Override @@ -662,7 +665,7 @@ public GeneralResponse> getDataSourceKeyDefine(String cluste Map response = gainResponseLinkisByGet(clusterInfo, authUser, url, "get data source key define by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource key define", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource key define", data); } @Override @@ -675,7 +678,7 @@ public GeneralResponse> connectDataSource(String clusterName Map response = gainResponseLinkisByPostBringJson(clusterInfo, authUser, url, "connect data source by user and cluster by linkis.",new JSONObject(jsonRequest)); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "{&CONNECT_SUCCESS}", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "{&CONNECT_SUCCESS}", data); } @@ -693,7 +696,7 @@ public GeneralResponse> getDataSourceConnectParams(String cl Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to get datasource connect params", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to get datasource connect params", data); } @Override @@ -706,7 +709,7 @@ public GeneralResponse> publishDataSource(String clusterName Map response = gainResponseLinkisByPost(clusterInfo, authUser, url, "publish data source by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to publish datasource", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to publish datasource", data); } @Override @@ -719,7 +722,7 @@ public GeneralResponse> expireDataSource(String clusterName, Map response = gainResponseLinkisByPut(clusterInfo, authUser, url, "expire data source by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to expire datasource", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to expire datasource", data); } @Override @@ -732,7 +735,7 @@ public GeneralResponse> modifyDataSource(String clusterName, Map response = gainResponseLinkisByPutBringJson(clusterInfo, authUser, url, "modify data source by user and cluster by linkis.",new JSONObject(jsonRequest)); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to modify datasource", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to modify datasource", data); } @Override @@ -746,7 +749,7 @@ public GeneralResponse> modifyDataSourceParam(String cluster Map response = gainResponseLinkisByPostBringJson(clusterInfo, authUser, url, "modify data source param by user and cluster by linkis.",new JSONObject(jsonRequest)); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to modify datasource connect params", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to modify datasource connect params", data); } @Override @@ -759,7 +762,7 @@ public GeneralResponse> createDataSource(String clusterName, Map response = gainResponseLinkisByPostBringJson(clusterInfo, authUser, url, "create data source by user and cluster by linkis.", new JSONObject(jsonRequest)); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to create datasource", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to create datasource", data); } @Override @@ -771,7 +774,19 @@ public GeneralResponse> deleteDataSource(String clusterName, Map response = gainResponseLinkisByDelete(clusterInfo, userName, url, "delete data source by user and cluster by linkis."); Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to delete datasource", data); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to delete datasource", data); + } + + @Override + public GeneralResponse> deleteEnv(String clusterName, String userName, Long envId) throws UnExpectedRequestException, MetaDataAcquireFailedException { + // Check existence of cluster name + ClusterInfo clusterInfo = checkClusterNameExists(clusterName); + // send request to get dbs + String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getEnvDelete()).toString().replace("{ENV_ID}", envId.toString()); + Map response = gainResponseLinkisByDelete(clusterInfo, userName, url, "delete env by user and cluster by linkis."); + + Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); + return new GeneralResponse<>(ResponseStatusConstants.OK, "Success to delete env", data); } @Override @@ -920,6 +935,18 @@ public LinkisDataSourceInfoDetail getDataSourceInfoById(String clusterName, Stri return objectMapper.readValue(infoJson, LinkisDataSourceInfoDetail.class); } + @Override + public LinkisDataSourceInfoDetail getDataSourceInfoById(String clusterName, String userName, Long dataSourceId, Long versionId) throws Exception { + GeneralResponse> generalResponse = getDataSourceInfoDetail(clusterName, userName, dataSourceId, versionId); + if (MapUtils.isEmpty(generalResponse.getData()) || !generalResponse.getData().containsKey(INFO)) { + throw new MetaDataAcquireFailedException("Failed to acquire data source by id"); + } + Map infoMap = (Map) generalResponse.getData().get("info"); + ObjectMapper objectMapper = new ObjectMapper(); + String infoJson = objectMapper.writeValueAsString(infoMap); + return objectMapper.readValue(infoJson, LinkisDataSourceInfoDetail.class); + } + @Override public Long addUdf(String currentCluster, String userName, Map requestBody) throws UnExpectedRequestException, IOException, JSONException, MetaDataAcquireFailedException { ClusterInfo clusterInfo = checkClusterNameExists(currentCluster); @@ -1242,18 +1269,6 @@ public void deployUdfNewVersion(String clusterName, String userName, Long udfId, return; } - @Override - public GeneralResponse> deleteEnv(String clusterName, String userName, Long envId) throws UnExpectedRequestException, MetaDataAcquireFailedException { - // Check existence of cluster name - ClusterInfo clusterInfo = checkClusterNameExists(clusterName); - // send request to get dbs - String url = getPath(clusterInfo.getLinkisAddress()).path(linkisConfig.getEnvDelete()).toString().replace("{ENV_ID}", envId.toString()); - Map response = gainResponseLinkisByDelete(clusterInfo, userName, url, "delete env by user and cluster by linkis."); - - Map data = (Map) response.get(LinkisResponseKeyEnum.DATA.getKey()); - return new GeneralResponse<>("200", "Success to delete env", data); - } - private ClusterInfo checkClusterNameExists(String clusterName) throws UnExpectedRequestException { ClusterInfo currentClusterInfo = clusterInfoCache.getIfPresent(clusterName); diff --git a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/OperateCiServiceImpl.java b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/OperateCiServiceImpl.java index 81624dd5..21a149cf 100644 --- a/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/OperateCiServiceImpl.java +++ b/core/meta_data/src/main/java/com/webank/wedatasphere/qualitis/client/impl/OperateCiServiceImpl.java @@ -2,14 +2,17 @@ import com.google.common.collect.Lists; import com.webank.wedatasphere.qualitis.client.config.MetricPropertiesConfig; -import com.webank.wedatasphere.qualitis.client.config.OperateCiConfig; import com.webank.wedatasphere.qualitis.client.constant.OperateEnum; import com.webank.wedatasphere.qualitis.client.request.OperateRequest; +import com.webank.wedatasphere.qualitis.config.OperateCiConfig; import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.constants.QualitisConstants; +import com.webank.wedatasphere.qualitis.constants.ResponseStatusConstants; import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; import com.webank.wedatasphere.qualitis.metadata.client.OperateCiService; import com.webank.wedatasphere.qualitis.metadata.response.*; import com.webank.wedatasphere.qualitis.response.GeneralResponse; +import com.webank.wedatasphere.qualitis.util.map.CustomObjectMapper; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; @@ -17,7 +20,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; @@ -29,6 +31,7 @@ import javax.ws.rs.core.UriBuilder; import java.io.IOException; import java.util.*; +import java.util.stream.Collectors; /** * @author allenzhou@webank.com @@ -45,21 +48,10 @@ public class OperateCiServiceImpl implements OperateCiService { @Autowired private RestTemplate restTemplate; - @Value("${department.data_source_from: custom}") - private String departmentSourceType; - - @Value("${deploy.environment: open_source}") - private String deployEnvType; - private static final Logger LOGGER = LoggerFactory.getLogger(OperateCiServiceImpl.class); @Override public List getAllSubSystemInfo() throws UnExpectedRequestException { -// 仅限开源环境 - if ("open_source".equals(deployEnvType)) { - return Collections.emptyList(); - } - Map response = requestCmdb(OperateEnum.SUB_SYSTEM, "A problem occurred when converting the request body to json.", "{&FAILED_TO_GET_SUB_SYSTEM_INFO}", "Start to get sub_system info from cmdb. url: {}, method: {}, body: {}", "Succeed to get sub_system info from cmdb. response."); List content = checkResponse(response); @@ -69,67 +61,110 @@ public List getAllSubSystemInfo() throws UnExpectedRequestExc for (int i = 0; i < content.size(); i++) { SubSystemResponse tempResponse = new SubSystemResponse(); Object current = content.get(i); + if (!(current instanceof Map)) { + LOGGER.warn("Error data format. original content: {}", CustomObjectMapper.transObjectToJson(current)); + break; + } + Map currentMap = (Map) current; - Integer currentSubsystemId = ((Map) current).get("subsystem_id"); + String currentSubsystemId = null; + try { + currentSubsystemId = currentMap.get("subsystem_id").toString(); + } catch (Exception e) { + LOGGER.warn("Current subsystem ID cannot be number. error: {}", e.getMessage()); + } tempResponse.setSubSystemId(currentSubsystemId); - String currentSubSystemName = ((Map) current).get("subsystem_name"); + String currentSubSystemName = MapUtils.getString(currentMap, "subsystem_name"); tempResponse.setSubSystemName(currentSubSystemName); - String currentFullCnmName = ((Map) current).get("full_cn_name"); + String currentFullCnmName = MapUtils.getString(currentMap, "full_cn_name"); tempResponse.setSubSystemFullCnName(currentFullCnmName); - List> opsList = (List) ((Map) current).get("pro_oper_group"); - List> deptList = (List) ((Map) current).get("busiResDept"); - List> devList = (List) ((Map) current).get("devdept"); + List> opsList = (List) currentMap.get("pro_oper_group"); + List> deptList = (List) currentMap.get("busiResDept"); + List> devList = (List) currentMap.get("devdept"); String dept = ""; String opsDept = ""; String devDept = ""; - if (CollectionUtils.isNotEmpty(deptList)) { - dept = (String) (deptList.iterator().next()).get("v"); - tempResponse.setDepartmentName(dept); - } + try { + if (CollectionUtils.isNotEmpty(deptList)) { + dept = (String) (deptList.iterator().next()).getOrDefault("v", ""); + tempResponse.setDepartmentName(dept); + } - if (CollectionUtils.isNotEmpty(opsList)) { - opsDept = (String) (opsList.iterator().next()).get("v"); - if (StringUtils.isEmpty(dept)) { - String[] infos = opsDept.split(SpecCharEnum.MINUS.getValue()); - if (infos.length == 2) { - dept = infos[0]; - tempResponse.setDepartmentName(dept); - tempResponse.setOpsDepartmentName(infos[1]); + if (CollectionUtils.isNotEmpty(opsList)) { + opsDept = (String) (opsList.iterator().next()).getOrDefault("v", ""); + if (StringUtils.isEmpty(dept)) { + String[] infos = opsDept.split(SpecCharEnum.MINUS.getValue()); + if (infos.length == 2) { + dept = infos[0]; + tempResponse.setDepartmentName(dept); + tempResponse.setOpsDepartmentName(infos[1]); + } else { + tempResponse.setOpsDepartmentName(infos[0]); + } } else { - tempResponse.setOpsDepartmentName(infos[0]); + tempResponse.setOpsDepartmentName(opsDept.replace(StringUtils.trimToEmpty(dept) + "-", "")); } - } else { - tempResponse.setOpsDepartmentName(opsDept.replace(dept + "-", "")); } - } - if (CollectionUtils.isNotEmpty(devList)) { - devDept = (String) (devList.iterator().next()).get("v"); - if (StringUtils.isEmpty(dept)) { - String[] infos = devDept.split(SpecCharEnum.MINUS.getValue()); - if (infos.length == 2) { - dept = infos[0]; - tempResponse.setDepartmentName(dept); - tempResponse.setDevDepartmentName(infos[1]); + if (CollectionUtils.isNotEmpty(devList)) { + devDept = (String) (devList.iterator().next()).getOrDefault("v", ""); + if (StringUtils.isEmpty(dept)) { + String[] infos = devDept.split(SpecCharEnum.MINUS.getValue()); + if (infos.length == 2) { + dept = infos[0]; + tempResponse.setDepartmentName(dept); + tempResponse.setDevDepartmentName(infos[1]); + } else { + tempResponse.setDevDepartmentName(infos[0]); + } } else { - tempResponse.setDevDepartmentName(infos[0]); + tempResponse.setDevDepartmentName(devDept.replace(StringUtils.trimToEmpty(dept) + "-", "")); } - } else { - tempResponse.setDevDepartmentName(devDept.replace(dept + "-", "")); } + } catch (Exception e) { + LOGGER.error("Failed to format data: {}", CustomObjectMapper.transObjectToJson(current), e ); } - responses.add(tempResponse); } return responses; } + @Override + public String getSubSystemIdByName(String subSystemName) throws UnExpectedRequestException { + Map response = requestCmdb(OperateEnum.SUB_SYSTEM, "A problem occurred when converting the request body to json.", "{&FAILED_TO_GET_SUB_SYSTEM_INFO}", "Start to get sub_system info from cmdb. url: {}, method: {}, body: {}", "Succeed to get sub_system info from cmdb. response."); + + List content = checkResponse(response); + for (int i = 0; i < content.size(); i++) { + Object current = content.get(i); + String currentSubSystemName = ((Map) current).get("subsystem_name"); + if (!subSystemName.equals(currentSubSystemName)) { + continue; + } + + String currentSubsystemId; + try { + currentSubsystemId = ((Map) current).get("subsystem_id").toString(); + } catch (Exception e1) { + try { + currentSubsystemId = ((Map) current).get("subsystem_id"); + } catch (Exception e2) { + LOGGER.warn("Current subsystem ID cannot be number."); + continue; + } + } + if (null != currentSubsystemId) { + return currentSubsystemId; + } + } + return null; + } + private Map requestCmdb(OperateEnum subSystem, String problemDescribe, String international, String requestInfo, String successInfo) throws UnExpectedRequestException { String url = UriBuilder.fromUri(operateCiConfig.getHost()).path(operateCiConfig.getUrl()).toString(); @@ -150,7 +185,6 @@ private Map requestCmdb(OperateEnum subSystem, String problemDes } LOGGER.info(requestInfo, url, javax.ws.rs.HttpMethod.POST, entity); Map response = restTemplate.postForObject(url, entity, Map.class); - LOGGER.info(successInfo); return response; } @@ -172,10 +206,6 @@ private List checkResponse(Map response) @Override public List getAllProductInfo() throws UnExpectedRequestException { - if ("open_source".equals(deployEnvType)) { - return Collections.emptyList(); - } - String url = UriBuilder.fromUri(operateCiConfig.getHost()).path(operateCiConfig.getUrl()).toString(); HttpHeaders headers = new HttpHeaders(); @@ -293,7 +323,8 @@ public List getDevAndOpsInfo(Integer deptCode) throws UnE } @Override - public GeneralResponse getDcn(Long subSystemId) throws UnExpectedRequestException { + public GeneralResponse getDcn(String subSystemId, String dcnRangeType, List dcnRangeValues) throws UnExpectedRequestException { + String url = UriBuilder.fromUri(operateCiConfig.getHost()).path(operateCiConfig.getIntegrateUrl()).toString(); HttpHeaders headers = new HttpHeaders(); @@ -302,7 +333,7 @@ public GeneralResponse getDcn(Long subSystemId) throws UnExpectedRe // Construct request body. OperateRequest request = new OperateRequest(OperateEnum.SUB_SYSTEM_FIND_DCN.getCode()); request.setUserAuthKey(operateCiConfig.getNewUserAuthKey()); - request.getFilter().put("subsystem_id", subSystemId.toString()); + request.getFilter().put("subsystem_id", subSystemId); HttpEntity entity; try { String jsonRequest = objectMapper.writeValueAsString(request); @@ -315,40 +346,34 @@ public GeneralResponse getDcn(Long subSystemId) throws UnExpectedRe LOGGER.info("Start to get dcn by subsystem.", url, javax.ws.rs.HttpMethod.POST, entity); Map response = restTemplate.postForObject(url, entity, Map.class); LOGGER.info("Finished to get dcn by subsystem."); + List> maps = (List>) ((Map) response.get("data")).get("content"); - DcnResponse dcnResponse = new DcnResponse((List>) ((Map) response.get("data")).get("content")); + filterDcn(maps, dcnRangeType, dcnRangeValues); - // Filter MASTER - if (Boolean.TRUE.equals(operateCiConfig.getOnlySlave())) { - filterDcn(dcnResponse); + if(Arrays.asList(QualitisConstants.CMDB_KEY_DCN_NUM, QualitisConstants.CMDB_KEY_LOGIC_AREA) + .contains(dcnRangeType)) { + Map>> res = maps.stream() + .collect(Collectors.groupingBy( + map -> map.get(dcnRangeType) + )); + return new GeneralResponse(ResponseStatusConstants.OK, "Success to get dcn by subsystem", res); + } else { + return new GeneralResponse(ResponseStatusConstants.OK, "Success to get dcn by subsystem", maps); } - - return new GeneralResponse<>("200", "Success to get dcn by subsystem", dcnResponse); } - private void filterDcn(DcnResponse dcnResponse) { - Map>>> resMap = dcnResponse.getRes(); - Iterator>>>> resIterator = resMap.entrySet().iterator(); - while (resIterator.hasNext()) { - Map.Entry>>> res = resIterator.next(); - Map>> dcnMap = res.getValue(); - Iterator>>> dcnIterator = dcnMap.entrySet().iterator(); - while (dcnIterator.hasNext()) { - List> logicDcns = dcnIterator.next().getValue(); - ListIterator> logicDcnIterator = logicDcns.listIterator(); - while (logicDcnIterator.hasNext()) { - Map dcn = logicDcnIterator.next(); - if ("MASTER".equals(dcn.get("set_type"))) { - logicDcnIterator.remove(); - } - } - if (CollectionUtils.isEmpty(logicDcns)) { - dcnIterator.remove(); - } + private void filterDcn(List> maps, String dcnRangeType, List dcnRangeValues) { + ListIterator> dcnIterator = maps.listIterator(); + while (dcnIterator.hasNext()) { + Map dcn = dcnIterator.next(); + if (Boolean.TRUE.equals(operateCiConfig.getOnlySlave()) && "MASTER".equals(dcn.get("set_type"))) { + dcnIterator.remove(); } - if (MapUtils.isEmpty(dcnMap)) { - resIterator.remove(); + if (CollectionUtils.isNotEmpty(dcnRangeValues) && !dcnRangeValues.contains(dcn.get(dcnRangeType))) { + dcnIterator.remove(); } } + } + } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/EmailEntity.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/EmailEntity.java new file mode 100644 index 00000000..fc187cc6 --- /dev/null +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/EmailEntity.java @@ -0,0 +1,91 @@ +package com.webank.wedatasphere.qualitis.bean; + +import com.fasterxml.jackson.annotation.JsonProperty; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +public class EmailEntity { + + /** + * 发件人邮箱 + */ + @JsonProperty("From") + private String from; + + /** + * 收件人邮箱,多人用分号隔开。 + */ + @JsonProperty("To") + private String to; + + @JsonProperty("ToList") + private List toList; + + /** + * 邮件标题 + */ + @JsonProperty("Title") + private String title; + /** + * 邮件内容,如果带图片的话,要配合下面的Attachments参数使用。图片: + */ + @JsonProperty("Content") + private String content; + + /** + * 邮件格式,0 文本、1 Html。默认值为0。 + */ + @JsonProperty("BodyFormat") + private Integer bodyFormat; + + public String getFrom() { + return from; + } + + public void setFrom(String from) { + this.from = from; + } + + public String getTo() { + return to; + } + + public void setTo(String to) { + this.to = to; + } + + public List getToList() { + return toList; + } + + public void setToList(List toList) { + this.toList = toList; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getContent() { + return content; + } + + public void setContent(String content) { + this.content = content; + } + + public Integer getBodyFormat() { + return bodyFormat; + } + + public void setBodyFormat(Integer bodyFormat) { + this.bodyFormat = bodyFormat; + } +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/SendMailMakeRequest.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/SendMailMakeRequest.java new file mode 100644 index 00000000..55a7aaeb --- /dev/null +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/bean/SendMailMakeRequest.java @@ -0,0 +1,59 @@ +package com.webank.wedatasphere.qualitis.bean; + +import com.webank.wedatasphere.qualitis.report.entity.SubscriptionRecord; + +import java.util.List; +import java.util.Map; + +/** + * @author v_gaojiedeng@webank.com + */ +public class SendMailMakeRequest { + + private String receiver; + private List> mapLists; + private List subscriptionRecords; + private String createUser; + + public String getReceiver() { + return receiver; + } + + public void setReceiver(String receiver) { + this.receiver = receiver; + } + + public List> getMapLists() { + return mapLists; + } + + public void setMapLists(List> mapLists) { + this.mapLists = mapLists; + } + + public List getSubscriptionRecords() { + return subscriptionRecords; + } + + public void setSubscriptionRecords(List subscriptionRecords) { + this.subscriptionRecords = subscriptionRecords; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + @Override + public String toString() { + return "SendMailMakeRequest{" + + "receiver='" + receiver + '\'' + + ", mapLists=" + mapLists + + ", subscriptionRecords=" + subscriptionRecords + + ", createUser='" + createUser + '\'' + + '}'; + } +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolTaskConfig.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolTaskConfig.java new file mode 100644 index 00000000..af4fe2da --- /dev/null +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/config/ThreadPoolTaskConfig.java @@ -0,0 +1,108 @@ +package com.webank.wedatasphere.qualitis.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.annotation.EnableAsync; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; + +import java.util.concurrent.ThreadPoolExecutor; + +/** + * @author v_gaojiedeng@webank.com + */ +@Configuration +@EnableAsync +public class ThreadPoolTaskConfig { + @Value("${report.cron_enable}") + private Boolean reportCronEnable; + @Value("${report.daily.core_pool_size}") + private int dailyCorePoolSize; + @Value("${report.daily.max_pool_size}") + private int dailyMaxPoolSize; + @Value("${report.daily.queue_capacity}") + private int dailyQueueCapacity; + + @Value("${report.weekly.core_pool_size}") + private int weeklyCorePoolSize; + @Value("${report.weekly.max_pool_size}") + private int weeklyMaxPoolSize; + @Value("${report.weekly.queue_capacity}") + private int weeklyQueueCapacity; + + @Bean("threadPoolTaskDailyExecutor") + public ThreadPoolTaskExecutor threadPoolTaskExecutor() { + ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); + //线程池创建的核心线程数,线程池维护线程的最少数量,即使没有任务需要执行,也会一直存活 + //如果设置allowCoreThreadTimeout=true(默认false)时,核心线程会超时关闭 + executor.setCorePoolSize(dailyCorePoolSize); + + //队列容量(队列只存在任务,不存在线程) + executor.setQueueCapacity(dailyQueueCapacity); + + //最大线程数; + //当corePoolSize 以及queueCapacity 满了以后,会在线程中额外创建线程.最大线程数指的是当前存在的最大的线程数。队列中的不属于 + executor.setMaxPoolSize(dailyMaxPoolSize); + + //当线程空闲时间达到keepAliveTime时,线程会退出,直到线程数量=corePoolSize + //如果allowCoreThreadTimeout=true,则会直到线程数量=0 + executor.setKeepAliveSeconds(60); + //优雅关闭 + executor.setWaitForTasksToCompleteOnShutdown(true); + + //线程名前缀 + executor.setThreadNamePrefix("Every-Day-Report-Execution-Thread-"); + + //当线程数满MaxPoolSize时,可采用以下拒绝策略 + + //CallerRunsPolicy():交由调用方线程运行,比如 main 线程;如果添加到线程池失败,那么主线程会自己去执行该任务,不会等待线程池中的线程去执行 + //AbortPolicy():该策略是线程池的默认策略,如果线程池队列满了丢掉这个任务并且抛出RejectedExecutionException异常。 + //DiscardPolicy():如果线程池队列满了,会直接丢掉这个任务并且不会有任何异常 + //DiscardOldestPolicy():丢弃队列中最老的任务,队列满了,会将最早进入队列的任务删掉腾出空间,再尝试加入队列 + executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy()); + executor.initialize(); + return executor; + } + + @Bean("threadPoolTaskWeeklyExecutor") + public ThreadPoolTaskExecutor threadPoolTaskWeeklyExecutor() { + ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); + //线程池创建的核心线程数,线程池维护线程的最少数量,即使没有任务需要执行,也会一直存活 + //如果设置allowCoreThreadTimeout=true(默认false)时,核心线程会超时关闭 + executor.setCorePoolSize(weeklyCorePoolSize); + + //队列容量(队列只存在任务,不存在线程) + executor.setQueueCapacity(weeklyQueueCapacity); + + //最大线程数; + //当corePoolSize 以及queueCapacity 满了以后,会在线程中额外创建线程.最大线程数指的是当前存在的最大的线程数。队列中的不属于 + executor.setMaxPoolSize(weeklyMaxPoolSize); + + //当线程空闲时间达到keepAliveTime时,线程会退出,直到线程数量=corePoolSize + //如果allowCoreThreadTimeout=true,则会直到线程数量=0 + executor.setKeepAliveSeconds(60); + //优雅关闭 + executor.setWaitForTasksToCompleteOnShutdown(true); + + //线程名前缀 + executor.setThreadNamePrefix("Weekly-Report-Execution-Thread-"); + + //当线程数满MaxPoolSize时,可采用以下拒绝策略 + + //CallerRunsPolicy():交由调用方线程运行,比如 main 线程;如果添加到线程池失败,那么主线程会自己去执行该任务,不会等待线程池中的线程去执行 + //AbortPolicy():该策略是线程池的默认策略,如果线程池队列满了丢掉这个任务并且抛出RejectedExecutionException异常。 + //DiscardPolicy():如果线程池队列满了,会直接丢掉这个任务并且不会有任何异常 + //DiscardOldestPolicy():丢弃队列中最老的任务,队列满了,会将最早进入队列的任务删掉腾出空间,再尝试加入队列 + executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy()); + executor.initialize(); + return executor; + } + + public Boolean getReportCronEnable() { + return reportCronEnable; + } + + public void setReportCronEnable(Boolean reportCronEnable) { + this.reportCronEnable = reportCronEnable; + } +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java index 6c3dd239..ddc7ec3f 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/ha/HaAbstractServiceCoordinator.java @@ -15,6 +15,8 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; +import javax.annotation.PostConstruct; + /** * @author howeye */ @@ -33,6 +35,7 @@ public class HaAbstractServiceCoordinator extends AbstractServiceCoordinator { private static final Logger LOGGER = LoggerFactory.getLogger(HaAbstractServiceCoordinator.class); + @PostConstruct @Override public void init() { LOGGER.info("Start to create zookeeper client"); diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java index a6206039..caaae62a 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/job/impl/MonitorManagerImpl.java @@ -19,13 +19,14 @@ import com.webank.wedatasphere.qualitis.bean.LogResult; import com.webank.wedatasphere.qualitis.client.AbstractJobSubmitter; import com.webank.wedatasphere.qualitis.exception.ClusterInfoNotConfigException; -import com.webank.wedatasphere.qualitis.exception.TaskNotExistException; import com.webank.wedatasphere.qualitis.exception.LogPartialException; +import com.webank.wedatasphere.qualitis.exception.TaskNotExistException; import com.webank.wedatasphere.qualitis.job.MonitorManager; -import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; +import java.util.Map; + /** * @author howeye */ @@ -40,7 +41,7 @@ public Map getTaskStatus(Long taskId, String user, String remote } @Override - public LogResult getTaskPartialLog(Long taskId, Integer begin, String user, String remoteAddress, String clusterName) throws LogPartialException, ClusterInfoNotConfigException { + public LogResult getTaskPartialLog(Long taskId, Integer begin, String user, String remoteAddress, String clusterName) throws ClusterInfoNotConfigException, LogPartialException { return abstractJobSubmitter.getJobPartialLog(taskId, begin, user, remoteAddress, clusterName); } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/PushReportScheduled.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/PushReportScheduled.java new file mode 100644 index 00000000..6a18447b --- /dev/null +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/PushReportScheduled.java @@ -0,0 +1,387 @@ +package com.webank.wedatasphere.qualitis.timer; + +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.webank.wedatasphere.qualitis.bean.EmailEntity; +import com.webank.wedatasphere.qualitis.bean.SendMailMakeRequest; +import com.webank.wedatasphere.qualitis.client.MailClient; +import com.webank.wedatasphere.qualitis.config.EsbSdkConfig; +import com.webank.wedatasphere.qualitis.config.ThreadPoolTaskConfig; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.constant.TaskStatusEnum; +import com.webank.wedatasphere.qualitis.constants.QualitisConstants; +import com.webank.wedatasphere.qualitis.dao.ApplicationDao; +import com.webank.wedatasphere.qualitis.dao.MailLockRecordDao; +import com.webank.wedatasphere.qualitis.dao.TaskDao; +import com.webank.wedatasphere.qualitis.entity.Application; +import com.webank.wedatasphere.qualitis.entity.MailLockRecord; +import com.webank.wedatasphere.qualitis.entity.Task; +import com.webank.wedatasphere.qualitis.entity.TaskRuleSimple; +import com.webank.wedatasphere.qualitis.exception.UnExpectedRequestException; +import com.webank.wedatasphere.qualitis.ha.AbstractServiceCoordinator; +import com.webank.wedatasphere.qualitis.project.entity.Project; +import com.webank.wedatasphere.qualitis.report.constant.ExecutionFrequencyEnum; +import com.webank.wedatasphere.qualitis.report.dao.SubscribeOperateReportDao; +import com.webank.wedatasphere.qualitis.report.dao.SubscriptionRecordDao; +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import com.webank.wedatasphere.qualitis.report.entity.SubscriptionRecord; +import com.webank.wedatasphere.qualitis.rule.dao.RuleDao; +import com.webank.wedatasphere.qualitis.rule.entity.Rule; +import com.webank.wedatasphere.qualitis.rule.entity.RuleDataSource; +import com.webank.wedatasphere.qualitis.util.HtmlTableGeneratorUtils; +import com.webank.wedatasphere.qualitis.util.map.CustomObjectMapper; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.annotation.Async; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.TreeSet; +import java.util.stream.Collectors; + +/** + * @author v_gaojiedeng@webank.com + */ +@Configuration +public class PushReportScheduled { + + @Autowired + private SubscribeOperateReportDao subscribeOperateReportDao; + @Autowired + private SubscriptionRecordDao subscriptionRecordDao; + @Autowired + private RuleDao ruleDao; + @Autowired + private ApplicationDao applicationDao; + @Autowired + private TaskDao taskDao; + @Autowired + private MailClient mailClient; + @Autowired + private EsbSdkConfig esbSdkConfig; + @Autowired + private MailLockRecordDao mailLockRecordDao; + @Autowired + private ThreadPoolTaskConfig threadPoolTaskConfig; + + private static final Logger LOGGER = LoggerFactory.getLogger(PushReportScheduled.class); + private static final String DAILY_DATE = "daily"; + private static final String WEEKLY_DATE = "weekly"; + SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + @Autowired + private AbstractServiceCoordinator abstractServiceCoordinator; + + /** + * 每个批次数据条目 + */ + private static final int BATCH_SIZE = 10; + + private static final List TABLE_HEADERS = Arrays.asList("项目ID", "项目名称", "已配置规则数量", "过去周期已配置规则增量", + "已配置规则的表数量", "过去周期已配置规则的表增量", "已配置规则的KPI表数量", "过去周期已配置规则的KPI表增量", "KPI 表已配置规则数量", + "过去周期KPI表已配置规则增量", "在调度监控的规则数(过去周期)", "通过校验规则数(过去周期)", "未通过校验规则数(过去周期)", "失败规则数(过去周期)"); + + /** + * 用户订阅项目的运营指标,多项目汇总,一行统计记录,接入邮件系统,发送报表: + * 已配置规则的表数量; + * 过去周期已配置规则的表增量; + * 已配置规则数量; + * 过去周期已配置规则增量; + * 已配置规则的 KPI 表数量; + * 过去周期已配置规则的 KPI 表增量; + * KPI 表已配置规则数量; + * 过去周期 KPI 表已配置规则增量; + * 在调度监控的规则数(过去周期); + * 通过校验规则数(过去周期); + * 未通过校验规则数(过去周期); + * 失败规则数(过去周期); + *

+ * 0 0 2 * * 1 每周周一2点 (避免与其他的调度冲突) + * 0 0 1 * * ? 每天1时 + */ + + @Scheduled(cron = "${report.daily.cron}") + @Async("threadPoolTaskDailyExecutor") + public void dailyMonitor() { + if (!threadPoolTaskConfig.getReportCronEnable()) { + return; + } + try { + LOGGER.info(Thread.currentThread().getName() + " >>>>>>>>>> Start operating report scheduling <<<<<<<<<<"); + abstractServiceCoordinator.coordinate(); + handleOperatingReportLogic(ExecutionFrequencyEnum.DAILY.getCode(), DAILY_DATE); + } catch (Exception e) { + LOGGER.error("Failed to operating report scheduling, caused by: {}", e.getMessage(), e); + } finally { + abstractServiceCoordinator.release(); + } + } + + @Scheduled(cron = "${report.weekly.cron}") + @Async("threadPoolTaskWeeklyExecutor") + public void weeklyMonitor() { + if (!threadPoolTaskConfig.getReportCronEnable()) { + return; + } + try { + LOGGER.info(Thread.currentThread().getName() + " >>>>>>>>>> Start operating report scheduling <<<<<<<<<<"); + abstractServiceCoordinator.coordinate(); + handleOperatingReportLogic(ExecutionFrequencyEnum.WEEKLY.getCode(), WEEKLY_DATE); + } catch (Exception e) { + LOGGER.error("Failed to operating report scheduling, caused by: {}", e.getMessage(), e); + } finally { + abstractServiceCoordinator.release(); + } + } + + + @Transactional(propagation = Propagation.REQUIRED, rollbackFor = {RuntimeException.class, UnExpectedRequestException.class}) + public void handleOperatingReportLogic(Integer code, String date) throws ParseException { + LOGGER.info("System Current Time: {}", QualitisConstants.PRINT_TIME_FORMAT.format(new Date())); + // Check another qualitis Services. + try { + InetAddress inetAddress = InetAddress.getLocalHost(); + LOGGER.info("Start to scheduling machine ip:{}" + inetAddress.getHostAddress()); + } catch (UnknownHostException e) { + LOGGER.error("Failed to get host info."); + } + + Date nowDate = new Date(); + MailLockRecord mailLockRecord = mailLockRecordDao.findByUnique(simpleDateFormat.parse(simpleDateFormat.format(new Date())), true, code); + if (mailLockRecord != null) { + LOGGER.info("mail lock in the possession of."); + return; + } else { + LOGGER.info("mail lock is not exist."); + } + + List subscribeOperateReports = subscribeOperateReportDao.selectAllMateFrequency(code); + if (CollectionUtils.isEmpty(subscribeOperateReports)) { + LOGGER.info(">>>>>>>>>> There is no " + date + " execution frequency in the operation report <<<<<<<<<<"); + return; + } + List sendMailMakeRequests = Lists.newArrayList(); + + for (SubscribeOperateReport subscribeOperateReport : subscribeOperateReports) { + List projects = subscribeOperateReport.getSubscribeOperateReportProjectsSet().stream().map(item -> item.getProject()).collect(Collectors.toList()); + List> mapLists = new ArrayList>(); + List subscriptionRecords = Lists.newArrayList(); + + for (Project project : projects) { + // 1.rulesCount 表数量 2.rulesTableCount 已配置规则的表数量 3.rulesKpiCount KPI表数量 4.rulesKpiTableCount KPI已配置规则数量 + // 5.schedulingRulesCount 在调度监控的规则数 6.passRulesCount 通过校验规则数 7.noPassRulesCount 未通过校验规则数 8.failRulesCount 失败规则数 + int rulesCount = 0, rulesTableCount = 0, rulesKpiCount = 0, rulesKpiTableCount = 0, + schedulingRulesCount = 0, passRulesCount = 0, noPassRulesCount = 0, failRulesCount = 0; + + int pastCyclesRulesCount = 0, pastCyclesRulesTableCount = 0, pastCyclesRulesKpiCount = 0, pastCyclesRulesKpiTableCount = 0, + pastCyclesSchedulingRulesCount = 0, pastCyclesPassRulesCount = 0, pastCyclesNoPassRulesCount = 0, pastCyclesFailRulesCount = 0; + List rules = ruleDao.findByProject(project); + rulesCount = rules.size(); + for (Rule rule : rules) { + List ruleDataSources = rule.getRuleDataSources().stream().filter(item -> StringUtils.isNotBlank(item.getDbName()) && StringUtils.isNotBlank(item.getTableName())).collect(Collectors.toList()); + List ruleDataSourceKpiTable = rule.getRuleDataSources().stream().filter((item -> StringUtils.isNotBlank(item.getDbName()) && StringUtils.isNotBlank(item.getTableName()))). + filter(os -> StringUtils.isNotBlank(os.getTagName()) && os.getTagName().contains("KPI")).collect(Collectors.toList()); + List ruleDataSourceKpiRules = rule.getRuleDataSources().stream().filter((item -> StringUtils.isNotBlank(item.getDbName()) && StringUtils.isNotBlank(item.getTableName()))). + filter(os -> StringUtils.isNotBlank(os.getTagName()) && os.getTagName().contains("KPI")). + collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(p -> p.getRule().getId()))), ArrayList::new)); + rulesTableCount += ruleDataSources.size(); + rulesKpiCount += ruleDataSourceKpiTable.size(); + rulesKpiTableCount += ruleDataSourceKpiRules.size(); + } + + List applications = applicationDao.findByProject(project); + for (Application application : applications) { + List tasks = taskDao.findByApplication(application); + List usingTasks = tasks.stream().map(Task::getTaskRuleSimples).flatMap( + taskRuleSimpleSet -> taskRuleSimpleSet.stream()).distinct().collect(Collectors.toList()); + List passTasks = tasks.stream().filter(item -> item.getStatus().equals(TaskStatusEnum.PASS_CHECKOUT.getCode())).map(Task::getTaskRuleSimples).flatMap( + taskRuleSimpleSet -> taskRuleSimpleSet.stream()).distinct().collect(Collectors.toList()); + List failTasks = tasks.stream().filter(item -> item.getStatus().equals(TaskStatusEnum.FAIL_CHECKOUT.getCode())).map(Task::getTaskRuleSimples).flatMap( + taskRuleSimpleSet -> taskRuleSimpleSet.stream()).distinct().collect(Collectors.toList()); + List failedTasks = tasks.stream().filter(item -> item.getStatus().equals(TaskStatusEnum.FAILED.getCode())).map(Task::getTaskRuleSimples).flatMap( + taskRuleSimpleSet -> taskRuleSimpleSet.stream()).distinct().collect(Collectors.toList()); + schedulingRulesCount += usingTasks.size(); + passRulesCount += passTasks.size(); + noPassRulesCount += failTasks.size(); + failRulesCount += failedTasks.size(); + } + + SubscriptionRecord matchProjectAndFrequency = subscriptionRecordDao.findMatchProjectAndFrequency(project.getId(), code); + if (null == matchProjectAndFrequency) { + subscriptionRecords.add(setBasicInfo(project, rulesCount, rulesTableCount, rulesKpiCount, rulesKpiTableCount, schedulingRulesCount, + passRulesCount, noPassRulesCount, failRulesCount, code, new SubscriptionRecord())); + } else { + pastCyclesRulesCount = rulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesNum())) < 0 ? 0 : rulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesNum())); + pastCyclesRulesTableCount = rulesTableCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesTableNum())) < 0 ? 0 : rulesTableCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesTableNum())); + pastCyclesRulesKpiCount = rulesKpiCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesKpiNum())) < 0 ? 0 : rulesKpiCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesKpiNum())); + pastCyclesRulesKpiTableCount = rulesKpiTableCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesKpiTableNum())) < 0 ? 0 : rulesKpiTableCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getConfiguredRulesKpiTableNum())); + pastCyclesSchedulingRulesCount = schedulingRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getSchedulingRules())) < 0 ? 0 : schedulingRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getSchedulingRules())); + pastCyclesPassRulesCount = passRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getPassRules())) < 0 ? 0 : passRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getPassRules())); + pastCyclesNoPassRulesCount = noPassRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getNoPassRules())) < 0 ? 0 : noPassRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getNoPassRules())); + pastCyclesFailRulesCount = failRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getFailRules())) < 0 ? 0 : failRulesCount - Integer.parseInt(String.valueOf(matchProjectAndFrequency.getFailRules())); + + subscriptionRecords.add(setBasicInfo(project, rulesCount, rulesTableCount, rulesKpiCount, rulesKpiTableCount, schedulingRulesCount, + passRulesCount, noPassRulesCount, failRulesCount, code, matchProjectAndFrequency)); + } + + //封装报表数据,调用邮件接口,发送信息 + Map map = Maps.newHashMap(); + map.put("project_id", project.getId()); + map.put("project_name", project.getName()); + map.put("rules_count", rulesCount); + map.put("past_cycles_rules_count", pastCyclesRulesCount); + map.put("rules_table_count", rulesTableCount); + map.put("past_cycles_rules_table_count", pastCyclesRulesTableCount); + map.put("rules_kpi_count", rulesKpiCount); + map.put("past_cycles_rules_kpi_count", pastCyclesRulesKpiCount); + map.put("rules_kpi_table_count", rulesKpiTableCount); + map.put("past_cycles_rules_kpi_table_count", pastCyclesRulesKpiTableCount); + map.put("past_cycles_scheduling_rules_count", pastCyclesSchedulingRulesCount); + map.put("past_cycles_pass_rules_count", pastCyclesPassRulesCount); + map.put("past_cycles_no_pass_rules_count", pastCyclesNoPassRulesCount); + map.put("past_cycles_fail_rules_count", pastCyclesFailRulesCount); + mapLists.add(map); + } + + SendMailMakeRequest sendMailMakeRequest = new SendMailMakeRequest(); + sendMailMakeRequest.setReceiver(subscribeOperateReport.getReceiver()); + sendMailMakeRequest.setMapLists(mapLists); + sendMailMakeRequest.setSubscriptionRecords(subscriptionRecords); + sendMailMakeRequest.setCreateUser(subscribeOperateReport.getCreateUser()); + sendMailMakeRequests.add(sendMailMakeRequest); + } + + if (CollectionUtils.isNotEmpty(sendMailMakeRequests)) { + //send_date与execution_frequency 组合唯一索引,避免多实例定时调度重复执行导致重复数据出现 + MailLockRecord currentMailLockRecord = mailLockRecordDao.save(new MailLockRecord(sendMailMakeRequests.size(), true, new Date(nowDate.getTime()) + , QualitisConstants.PRINT_TIME_FORMAT.format(nowDate), "", code)); + //只有数据对象入库成功,才能发起邮件 + if (currentMailLockRecord == null) { + return; + } + + LOGGER.info(">>>>>>>>>> SEND EMAIL PACKAGING RESULT SET : <<<<<<<<<<" + sendMailMakeRequests.toString()); + try { + // 获取执行的轮次 + int round = (sendMailMakeRequests.size() - 1) / BATCH_SIZE; + LOGGER.info(">>>>>>>>>> Sending emails in batches round : <<<<<<<<<< " + round); + for (int i = 0; i <= round; i++) { + // 求每个批次起始位置 + int fromIndex = i * BATCH_SIZE; + int toIndex = (i + 1) * BATCH_SIZE; + // 如果是最后一个批次,则不能越界 + if (i == round) { + toIndex = sendMailMakeRequests.size(); + } + // TODO: 对subList执行进一步要做的操作 + List subList = sendMailMakeRequests.subList(fromIndex, toIndex); + + for (SendMailMakeRequest sendMailMakeRequest : subList) { + //发送邮件,请求esb接口 + sendMailMessage(sendMailMakeRequest.getReceiver(), sendMailMakeRequest.getMapLists(), sendMailMakeRequest.getCreateUser()); + //运营报表记录入库 + for (SubscriptionRecord subscriptionRecord : sendMailMakeRequest.getSubscriptionRecords()) { + subscriptionRecordDao.save(subscriptionRecord); + LOGGER.info(">>>>>>>>>> Subscription Record Object : <<<<<<<<<< " + subscriptionRecord.toString()); + } + } + } + } catch (Exception e) { + LOGGER.error("Failed to send emails record . Exception: {}", e.getMessage(), e); + currentMailLockRecord.setStatus(false); + currentMailLockRecord.setErrMsg(e.getMessage()); + } + + } + LOGGER.info(">>>>>>>>>> End of operation report scheduling <<<<<<<<<<"); + } + + private SubscriptionRecord setBasicInfo(Project project, Integer rulesCount, Integer rulesTableCount, Integer rulesKpiCount, Integer rulesKpiTableCount, + Integer schedulingRulesCount, Integer passRulesCount, Integer noPassRulesCount, Integer failRulesCount, Integer executionFrequency, SubscriptionRecord subscriptionRecord) { + subscriptionRecord.setProject(project); + subscriptionRecord.setExecutionFrequency(executionFrequency); + subscriptionRecord.setConfiguredRulesNum(Long.valueOf(rulesCount)); + subscriptionRecord.setConfiguredRulesTableNum(Long.valueOf(rulesTableCount)); + subscriptionRecord.setConfiguredRulesKpiNum(Long.valueOf(rulesKpiCount)); + subscriptionRecord.setConfiguredRulesKpiTableNum(Long.valueOf(rulesKpiTableCount)); + subscriptionRecord.setSchedulingRules(Long.valueOf(schedulingRulesCount)); + subscriptionRecord.setPassRules(Long.valueOf(passRulesCount)); + subscriptionRecord.setNoPassRules(Long.valueOf(noPassRulesCount)); + subscriptionRecord.setFailRules(Long.valueOf(failRulesCount)); + return subscriptionRecord; + } + + private void sendMailMessage(String receiver, List> mapLists, String createUser) throws Exception { + String[] specReceiver = receiver.split(SpecCharEnum.COMMA.getValue()); + List resultReceiver = Lists.newArrayList(); + for (String accepter : specReceiver) { + resultReceiver.add(accepter + "@webank.com"); + } + //邮件封装实体 + EmailEntity emailEntity = new EmailEntity(); + //发件人邮箱 + emailEntity.setFrom("wds@webank.com"); + //收件人邮箱,多人用分号隔开。(to,cc,bcc不能全部同时为空) + emailEntity.setToList(resultReceiver); + //邮件标题 + emailEntity.setTitle(esbSdkConfig.getTitle() + "(" + simpleDateFormat.format(new Date()) + ")"); + + List> data = new ArrayList<>(); + for (Map mapList : mapLists) { + data.add(Arrays.asList(mapList.get("project_id").toString(), mapList.get("project_name").toString(), mapList.get("rules_count").toString(), + mapList.get("past_cycles_rules_count").toString(), mapList.get("rules_table_count").toString(), mapList.get("past_cycles_rules_table_count").toString(), + mapList.get("rules_kpi_count").toString(), mapList.get("past_cycles_rules_kpi_count").toString(), mapList.get("rules_kpi_table_count").toString(), + mapList.get("past_cycles_rules_kpi_table_count").toString(), mapList.get("past_cycles_scheduling_rules_count").toString(), mapList.get("past_cycles_pass_rules_count").toString(), + mapList.get("past_cycles_no_pass_rules_count").toString(), mapList.get("past_cycles_fail_rules_count").toString() + )); + } + // 创建HtmlTableGenerator对象 + HtmlTableGeneratorUtils tableGenerator = new HtmlTableGeneratorUtils(mapLists.size(), 14, TABLE_HEADERS, data); + // 生成HTML表格 + String htmlTable = tableGenerator.generateTable(); + // 打印生成的HTML表格 + LOGGER.info(">>>>>>>>>> OUT PUT HTML TABLE :<<<<<<<<<< " + htmlTable); + //邮件内容,如果带图片的话 + emailEntity.setContent(htmlTable); + //邮件格式,0 文本、1 Html。默认值为0。 + emailEntity.setBodyFormat(1); + + convertEmail(emailEntity); + mailClient.sendEsbMail(CustomObjectMapper.transObjectToJson(emailEntity), createUser); + } + + + /** + * @param email 邮件对象 + * @return email + * @Description:将EmailEntity里的TO/CC/BC List分别转换为字符串字段To CC BCC,多个时,用分号隔开 + */ + private EmailEntity convertEmail(EmailEntity email) { + if (email.getToList() != null && email.getToList().size() != 0) { + email.setTo(List2String(email.getToList())); + } + return email; + } + + public static String List2String(List list) { + if (list == null || list.size() == 0) { + return ""; + } + return org.apache.commons.lang3.StringUtils.join(list, ";"); + } + +} diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java index 6f0b8308..1d064569 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/timer/TaskChecker.java @@ -26,6 +26,7 @@ import com.webank.wedatasphere.qualitis.constant.ApplicationCommentEnum; import com.webank.wedatasphere.qualitis.constant.ApplicationStatusEnum; import com.webank.wedatasphere.qualitis.constant.ImsLevelEnum; +import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; import com.webank.wedatasphere.qualitis.constant.TaskStatusEnum; import com.webank.wedatasphere.qualitis.constants.QualitisConstants; import com.webank.wedatasphere.qualitis.dao.AbnormalDataRecordInfoDao; @@ -38,6 +39,7 @@ import com.webank.wedatasphere.qualitis.dao.TaskDataSourceDao; import com.webank.wedatasphere.qualitis.dao.TaskResultDao; import com.webank.wedatasphere.qualitis.dao.TaskResultStatusDao; +import com.webank.wedatasphere.qualitis.dao.TaskRuleAlarmConfigDao; import com.webank.wedatasphere.qualitis.dao.TaskRuleSimpleDao; import com.webank.wedatasphere.qualitis.dao.UploadRecordDao; import com.webank.wedatasphere.qualitis.dao.UserDao; @@ -83,6 +85,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; @@ -92,16 +95,8 @@ import java.math.BigDecimal; import java.net.InetAddress; import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.time.temporal.ChronoUnit; +import java.util.*; import java.util.stream.Collectors; /** @@ -120,6 +115,8 @@ public class TaskChecker implements IChecker { @Autowired private TaskDataSourceDao taskDataSourceDao; @Autowired + private TaskRuleAlarmConfigDao taskRuleAlarmConfigDao; + @Autowired private RuleMetricDao ruleMetricDao; @Autowired private RuleGroupDao ruleGroupDao; @@ -152,14 +149,22 @@ public class TaskChecker implements IChecker { @Autowired private ApplicationCommentDao applicationCommentDao; + @Value("${intellect.check.project_name}") + private String intellectCheckProjectName; + + @Value("${alarm.ims.receiver.collect:leoli,dqdong}") + private String collectReceiver; + private static final int BATCH_ABNORMAL_DATA_RECORD = 500; private static final String PRINT_TIME_PATTERN = "yyyy-MM-dd HH:mm:ss"; private static final Logger LOGGER = LoggerFactory.getLogger(TaskChecker.class); private static final DateTimeFormatter PRINT_TIME_FORMAT = DateTimeFormat.forPattern(PRINT_TIME_PATTERN); + private static final java.time.format.DateTimeFormatter FORMATTER = java.time.format.DateTimeFormatter.ofPattern(PRINT_TIME_PATTERN); private static final Map ERR_CODE_TYPE = new HashMap(); private static final List APPLICATION_COMMENT_LIST = Lists.newArrayList(); + private static final String IMS_LOG = "{\"TYPE\": \"QUALITIS\",\"RES_CODE\": %d,\"COST_TIME\": %d,\"RES_MSG\": \"%s\"}"; @PostConstruct public void init() { @@ -298,10 +303,12 @@ private void writeDb(JobChecker jobChecker, String newStatus, Integer errCode) { boolean isPass; boolean finish; if (passCheckOut(jobChecker.getApplicationId(), taskInDb)) { + LOGGER.info("Check passed! Task:[{}]", taskInDb.getId()); modifyJobStatus(taskInDb, TaskStatusEnum.PASS_CHECKOUT.getState()); isPass = true; finish = true; } else { + LOGGER.info("Check not passed! Task:[{}]", taskInDb.getId()); if (Boolean.FALSE.equals(checkWhetherBlocked(taskInDb)) && Boolean.TRUE.equals(taskInDb.getAbortOnFailure())) { modifyJobStatus(taskInDb, TaskStatusEnum.FAILED.getState()); List collect = APPLICATION_COMMENT_LIST.stream().filter(item -> item.getCode().toString().equals(ApplicationCommentEnum.DIFF_DATA_ISSUES.getCode().toString())).collect(Collectors.toList()); @@ -385,9 +392,14 @@ private Boolean passCheckOut(String applicationId, Task task) { @Transactional(rollbackFor = {RuntimeException.class, UnExpectedRequestException.class}) private Boolean checkTaskRuleSimplePass(String applicationId, TaskRuleSimple taskRuleSimple) { - Boolean passFlag = true; + Boolean passFlag = Boolean.TRUE; + Application application = applicationDao.findById(applicationId); + if (StringUtils.isNotEmpty(application.getClusterName()) && application.getClusterName().contains(SpecCharEnum.COMMA.getValue()) && (QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateEnName()) || QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateName()))) { + return Boolean.TRUE; + } List taskResults = taskResultDao.findByApplicationAndRule(applicationId, taskRuleSimple.getRuleId()); if (CollectionUtils.isEmpty(taskResults)) { + LOGGER.warn("Has no task result. Application:[{}], Rule[{}]", applicationId, taskRuleSimple.getRuleId()); return false; } List taskResultStatusList = Lists.newArrayList(); @@ -399,44 +411,44 @@ private Boolean checkTaskRuleSimplePass(String applicationId, TaskRuleSimple tas taskRuleAlarmConfig.getRuleMetric().getId().equals(ruleMetricId) ).collect(Collectors.toList()); } -// 遍历校验预期 + // 遍历校验预期 for (TaskRuleAlarmConfig taskRuleAlarmConfig : taskRuleAlarmConfigList) { TaskResultStatus taskResultStatus = new TaskResultStatus(); taskResultStatus.setApplicationId(applicationId); taskResultStatus.setRuleId(taskRuleSimple.getRuleId()); - taskResultStatus.setTaskResult(taskResult); taskResultStatus.setTaskRuleAlarmConfigId(taskRuleAlarmConfig.getId()); + taskResultStatus.setTaskResult(taskResult); taskResultStatusList.add(taskResultStatus); - if (AlarmConfigStatusEnum.NOT_PASS.getCode().equals(taskRuleAlarmConfig.getStatus())) { - taskResultStatus.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); - } else if (AlarmConfigStatusEnum.PASS.getCode().equals(taskRuleAlarmConfig.getStatus())) { + + Boolean passReal = PassUtil.notSafe(applicationId, taskRuleSimple.getRuleId(), taskRuleAlarmConfig, taskResult, taskResultDao); + + if (passReal) { + if (! AlarmConfigStatusEnum.NOT_PASS.getCode().equals(taskRuleAlarmConfig.getStatus())) { + taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + } taskResultStatus.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + LOGGER.info("Current task rule alarm config passed. TaskRuleAlarmConfig:[{}]", taskRuleAlarmConfig.toString()); } else { - Boolean passReal = PassUtil.notSafe(applicationId, taskRuleSimple.getRuleId(), taskRuleAlarmConfig, taskResult, taskResultDao); + passFlag = false; + taskResultStatus.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); + taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); + LOGGER.info("Current task rule alarm config not passed. TaskRuleAlarmConfig:[{}]", taskRuleAlarmConfig.toString()); - if (passReal) { - taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.PASS.getCode()); - taskResultStatus.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + if (taskRuleSimple.getRuleType().equals(RuleTemplateTypeEnum.CUSTOM.getCode()) + || taskRuleSimple.getRuleType().equals(RuleTemplateTypeEnum.FILE_COUSTOM.getCode())) { + if (taskRuleAlarmConfig.getDeleteFailCheckResult() != null && true == taskRuleAlarmConfig.getDeleteFailCheckResult().booleanValue()) { + taskResult.setSaveResult(false); + taskResultDao.saveTaskResult(taskResult); + } } else { - taskResultStatus.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); - passFlag = false; - taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); - - if (taskRuleSimple.getRuleType().equals(RuleTemplateTypeEnum.CUSTOM.getCode()) - || taskRuleSimple.getRuleType().equals(RuleTemplateTypeEnum.FILE_COUSTOM.getCode())) { - if (taskRuleAlarmConfig.getDeleteFailCheckResult() != null && true == taskRuleAlarmConfig.getDeleteFailCheckResult().booleanValue()) { - taskResult.setSaveResult(false); - taskResultDao.saveTaskResult(taskResult); - } - } else { - if (taskRuleSimple.getDeleteFailCheckResult() != null && true == taskRuleSimple.getDeleteFailCheckResult().booleanValue()) { - taskResult.setSaveResult(false); - taskResultDao.saveTaskResult(taskResult); - } + if (taskRuleSimple.getDeleteFailCheckResult() != null && true == taskRuleSimple.getDeleteFailCheckResult().booleanValue()) { + taskResult.setSaveResult(false); + taskResultDao.saveTaskResult(taskResult); } } } } + taskRuleAlarmConfigDao.saveAll(taskRuleAlarmConfigList); } taskResultStatusDao.saveBatch(taskResultStatusList); return passFlag; @@ -446,19 +458,116 @@ private void ifLastTaskAndSaveApplication(Application applicationInDb) { if (isLastJob(applicationInDb)) { LOGGER.info("Succeed to execute all task of application. Application: {}", applicationInDb); applicationInDb.setFinishTime(new DateTime(new Date()).toString(PRINT_TIME_FORMAT)); + if (StringUtils.isNotEmpty(applicationInDb.getClusterName()) && applicationInDb.getClusterName().contains(SpecCharEnum.COMMA.getValue())) { + List taskResults = taskResultDao.findByApplicationId(applicationInDb.getId()); + Map> ruleTaskResults = new HashMap<>(taskResults.size()); + for (TaskResult taskResult : taskResults) { + Long ruleId = taskResult.getRuleId(); + if (ruleTaskResults.keySet().contains(ruleId)) { + ruleTaskResults.get(ruleId).add(taskResult); + } else { + List tmpTaskResults = new ArrayList<>(); + tmpTaskResults.add(taskResult); + ruleTaskResults.put(ruleId, tmpTaskResults); + } + } + + for (Long ruleId : ruleTaskResults.keySet()) { + List taskRuleSimples = taskRuleSimpleDao.findByApplicationAndRule(applicationInDb.getId(), ruleId); + boolean allMatch = taskRuleSimples.stream().allMatch(taskRuleSimple -> QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateEnName()) || QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateName())); + if (! allMatch) { + continue; + } + boolean allSucc = taskRuleSimples.stream().allMatch(taskRuleSimple -> TaskStatusEnum.PASS_CHECKOUT.getCode().equals(taskRuleSimple.getTask().getStatus())); + if (! allSucc) { + continue; + } + boolean pass = true; +// boolean allZero = true; + + List taskResultList = ruleTaskResults.get(ruleId); + TaskResult first = taskResultList.get(0); + String value = first.getValue(); + +// if (0 != Integer.valueOf(value)) { +// allZero = false; +// } + List taskResultStatusList = Lists.newArrayList(); + for (TaskResult taskResult : taskResultList) { + TaskResultStatus taskResultStatus = new TaskResultStatus(); + taskResultStatus.setRuleId(ruleId); + taskResultStatus.setTaskResult(taskResult); + taskResultStatus.setApplicationId(applicationInDb.getId()); + taskResultStatus.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + Long taskRuleAlarmConfigId = taskRuleSimples.stream().filter(taskRuleSimple -> taskRuleSimple.getTask().getId().equals(taskResult.getTaskId())).iterator().next().getTaskRuleAlarmConfigList().stream().iterator().next().getId(); + taskResultStatus.setTaskRuleAlarmConfigId(taskRuleAlarmConfigId); + taskResultStatusList.add(taskResultStatus); + if (! value.equals(taskResult.getValue())) { + pass = false; + break; + } +// if (allZero && 0 != Integer.valueOf(taskResult.getValue())) { +// allZero = false; +// } + } +// if (allZero) { +// pass = false; +// } + + if (! pass) { + applicationInDb.reduceSuccessJobNum(); + applicationInDb.reduceSuccessJobNum(); + taskDao.saveAll(taskRuleSimples.stream().map(taskRuleSimple -> { + Task task = taskRuleSimple.getTask(); + if (Boolean.TRUE.equals(task.getAbortOnFailure())) { + applicationInDb.addFailJobNum(); + task.setStatus(TaskStatusEnum.FAILED.getCode()); + } else { + applicationInDb.addNotPassTaskNum(); + task.setStatus(TaskStatusEnum.FAIL_CHECKOUT.getCode()); + } + return task; + }).collect(Collectors.toList())); + taskRuleAlarmConfigDao.saveAll(taskRuleSimples.stream() + .map(taskRuleSimple -> taskRuleSimple.getTaskRuleAlarmConfigList()) + .flatMap(taskRuleAlarmConfigs -> taskRuleAlarmConfigs.stream()) + .map(taskRuleAlarmConfig -> { + taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); + return taskRuleAlarmConfig; + }).collect(Collectors.toList())); + taskResultStatusDao.saveBatch(taskResultStatusList.stream().map(taskResultStatus -> { + taskResultStatus.setStatus(AlarmConfigStatusEnum.NOT_PASS.getCode()); + return taskResultStatus; + }).collect(Collectors.toList())); + } else { + taskRuleAlarmConfigDao.saveAll(taskRuleSimples.stream().map(taskRuleSimple -> taskRuleSimple.getTaskRuleAlarmConfigList()).flatMap(taskRuleAlarmConfigs -> taskRuleAlarmConfigs.stream()).map(taskRuleAlarmConfig -> { + taskRuleAlarmConfig.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + return taskRuleAlarmConfig; + }).collect(Collectors.toList())); + taskResultStatusDao.saveBatch(taskResultStatusList.stream().map(taskResultStatus -> { + taskResultStatus.setStatus(AlarmConfigStatusEnum.PASS.getCode()); + return taskResultStatus; + }).collect(Collectors.toList())); + } + } + } if (applicationInDb.getFinishTaskNum().equals(applicationInDb.getTotalTaskNum())) { applicationInDb.setStatus(ApplicationStatusEnum.FINISHED.getCode()); - List collect = APPLICATION_COMMENT_LIST.stream().filter(item -> item.getCode().toString().equals(ApplicationCommentEnum.SAME_ISSUES.getCode().toString())).collect(Collectors.toList()); + List collect = APPLICATION_COMMENT_LIST.stream() + .filter(item -> item.getCode().toString().equals(ApplicationCommentEnum.SAME_ISSUES.getCode().toString())) + .collect(Collectors.toList()); Integer applicationCommentCode = CollectionUtils.isNotEmpty(collect) ? collect.get(0).getCode() : null; - applicationInDb.setApplicationComment(applicationCommentCode); + printImsLog(applicationInDb,ApplicationStatusEnum.FINISHED); } else if (!applicationInDb.getFailTaskNum().equals(0) || !applicationInDb.getAbnormalTaskNum().equals(0)) { applicationInDb.setStatus(ApplicationStatusEnum.FAILED.getCode()); + printImsLog(applicationInDb,ApplicationStatusEnum.FAILED); } else { applicationInDb.setStatus(ApplicationStatusEnum.NOT_PASS.getCode()); List collect = APPLICATION_COMMENT_LIST.stream().filter(item -> item.getCode().toString().equals(ApplicationCommentEnum.DIFF_DATA_ISSUES.getCode().toString())).collect(Collectors.toList()); Integer applicationCommentCode = CollectionUtils.isNotEmpty(collect) ? collect.get(0).getCode() : null; applicationInDb.setApplicationComment(applicationCommentCode); + printImsLog(applicationInDb,ApplicationStatusEnum.NOT_PASS); } checkIfSendAlarm(applicationInDb); checkIfReport(applicationInDb, imsConfig); @@ -479,32 +588,82 @@ private boolean isLastJob(Application application) { } private void checkIfSendAlarm(Application application) { + if (StringUtils.isNotBlank(application.getCollectIds())) { + LOGGER.info("Start to alarm collect task."); + List tasks = taskDao.findByApplication(application); + List failedTask = tasks.stream().filter(job -> TaskStatusEnum.FAILED.getCode().equals(job.getStatus()) || TaskStatusEnum.CANCELLED.getCode().equals(job.getStatus())).collect(Collectors.toList()); + if (CollectionUtils.isEmpty(failedTask)) { + LOGGER.info("No failed collect task."); + return; + } + Set dbTableFilters = failedTask.stream().map(task -> task.getTaskDataSources()) + .flatMap(taskDataSources -> taskDataSources.stream()).map(taskDataSource -> taskDataSource.getDatabaseName() + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + taskDataSource.getTableName() + SpecCharEnum.COLON.getValue() + taskDataSource.getFilter()).collect(Collectors.toSet()); + Set failedTaskRemoteIds = failedTask.stream().map(task -> task.getTaskRemoteId()).collect(Collectors.toSet()); + String alertInfo = linkisConfig.getCollectTemplate(); + alertInfo = alertInfo.replace("dbTableFilters", StringUtils.join(dbTableFilters, SpecCharEnum.COMMA.getValue())).replace("applicationID", application.getId()).replace("failedTaskRemoteIds", Arrays.toString(failedTaskRemoteIds.toArray())); + alarmClient.sendAlarm(imsConfig.getFailReceiver() + SpecCharEnum.COMMA.getValue() + collectReceiver, imsConfig.getTitlePrefix() + "集群 Qualitis 采集任务告警", alertInfo, String.valueOf(ImsLevelEnum.MINOR.getCode()), QualitisConstants.SUB_SYSTEM_ID); + LOGGER.info("Finish to alarm collect task."); + return; + } + LOGGER.info("Start to collect alarm info."); List tasks = taskDao.findByApplication(application); +// Previously, tasks that didn't pass and were aborted had been modified as failed tasks, but when sending alarm to receivers, +// their status still appeared as 'not pass' and 'aborted' + List notPassAndAbortTask = new ArrayList<>(); + for (Task task : tasks) { + if (TaskStatusEnum.FAILED.getCode().equals(task.getStatus())) { + boolean hasNo = ifTaskHasNotCheckRuleAlarmConfig(task.getTaskRuleSimples()); + if (hasNo) { + notPassAndAbortTask.add(task); + } + } + } + List notPassTask = tasks.stream().filter(job -> job.getStatus().equals(TaskStatusEnum.FAIL_CHECKOUT.getCode())).collect(Collectors.toList()); + if (CollectionUtils.isNotEmpty(notPassAndAbortTask)) { + notPassTask.addAll(notPassAndAbortTask); + } + LOGGER.info("Succeed to collect failed pass tasks. Task ID: {}", notPassTask.stream().map(Task::getId).collect(Collectors.toList())); List notPassTaskRuleSimples = AlarmUtil.notSafeTaskRuleSimple(notPassTask); - + List checkAlarmAcrossClusters = notPassTaskRuleSimples.stream().filter(taskRuleSimple -> QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateEnName()) || QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateName())).collect(Collectors.toList()); + List alarmedRuleIds = null; + if (CollectionUtils.isNotEmpty(checkAlarmAcrossClusters)) { + alarmedRuleIds = new ArrayList<>(checkAlarmAcrossClusters.size()); + } List failedTask = tasks.stream().filter(job -> job.getStatus().equals(TaskStatusEnum.FAILED.getCode()) || job.getStatus().equals(TaskStatusEnum.CANCELLED.getCode())).collect(Collectors.toList()); + if (CollectionUtils.isNotEmpty(notPassAndAbortTask)) { + failedTask = failedTask.stream().filter(task -> ! notPassAndAbortTask.contains(task)).collect(Collectors.toList()); + } + LOGGER.info("Succeed to collect failed tasks. Task ID: {}", failedTask.stream().map(Task::getId).collect(Collectors.toList())); List failedTaskRuleSimples = AlarmUtil.getFailedTaskRule(failedTask); for (Iterator taskRuleSimpleIterator = failedTaskRuleSimples.iterator(); taskRuleSimpleIterator.hasNext(); ) { TaskRuleSimple taskRuleSimple = taskRuleSimpleIterator.next(); List taskRuleAlarmConfigList = taskRuleSimple.getTaskRuleAlarmConfigList(); - int count = (int) taskRuleAlarmConfigList.stream().filter(o -> !AlarmConfigStatusEnum.PASS.getCode().equals(o.getStatus())).count(); + int count = (int) taskRuleAlarmConfigList.stream().filter(o -> ! AlarmConfigStatusEnum.PASS.getCode().equals(o.getStatus())).count(); if (0 == count) { taskRuleSimpleIterator.remove(); } } - // 是否告警都跟配置的告警事件来判断 AlarmEventEnum a. only pass b. task failed, not pass + abort, not pass + not abort) c. pass, not pass + abort, not pass + not abort + // 是否告警都跟配置的告警事件来判断 AlarmEventEnum a. only pass b. task failed, not pass + abort, not pass + not abort c. pass, not pass + abort, not pass + not abort // CHECK_SUCCESS 校验成功、CHECK_FAILURE 校验失败、EXECUTION_COMPLETED 执行完成 List alreadyAlertApp = new ArrayList<>(); for (Task task : tasks) { Set taskRuleSimpleCollect = task.getTaskRuleSimples(); for (TaskRuleSimple taskRuleSimple : taskRuleSimpleCollect) { + if (CollectionUtils.isNotEmpty(alarmedRuleIds) && alarmedRuleIds.contains(taskRuleSimple.getRuleId())) { + continue; + } + boolean isAcrossCluster = false; Rule rule = ruleDao.findById(taskRuleSimple.getRuleId()); + if (checkAlarmAcrossClusters.contains(taskRuleSimple)) { + alarmedRuleIds.add(taskRuleSimple.getRuleId()); + isAcrossCluster = true; + } if (rule != null && StringUtils.isNotBlank(rule.getExecutionParametersName())) { ExecutionParameters executionParameters = executionParametersDao.findByNameAndProjectId(rule.getExecutionParametersName(), rule.getProject().getId()); if (executionParameters != null) { @@ -518,7 +677,7 @@ private void checkIfSendAlarm(Application application) { if (executionParameters.getAlertLevel() != null && StringUtils.isNotBlank(executionParameters.getAlertReceiver())) { if (!alreadyAlertApp.contains(application.getId())) { List taskRuleSimples = notPassTaskRuleSimples.stream().filter(taskRuleSimpleTemp -> taskRuleSimpleTemp.getAlertLevel() != null).collect(Collectors.toList()); - handleCheckFailure(alreadyAlertApp, application, taskRuleSimples, null, null, null); + handleCheckFailure(alreadyAlertApp, application, task, taskRuleSimples, null, null, null); } if (!alreadyAlertApp.contains(application.getId())) { List taskRuleSimples = failedTaskRuleSimples.stream().filter(taskRuleSimpleTemp -> taskRuleSimpleTemp.getAlertLevel() != null).collect(Collectors.toList()); @@ -533,17 +692,16 @@ private void checkIfSendAlarm(Application application) { // a. only pass handleCheckSuccess(application, task, taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); } else if (QualitisConstants.CHECK_FAILURE.toString().equals(parameters.getAlarmEvent().toString())) { - // b. task failed, not pass + abort - handleTaskFailure(alreadyAlertApp, application, failedTaskRuleSimples, taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); - // b. not pass + not abort - handleCheckFailure(alreadyAlertApp, application, notPassTaskRuleSimples, taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); + // b. task failed + handleTaskFailure(alreadyAlertApp, application, failedTaskRuleSimples, isAcrossCluster ? null : taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); + // b. not pass + not abort, not pass + abort + handleCheckFailure(alreadyAlertApp, application, task, notPassTaskRuleSimples, isAcrossCluster ? null : taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); } else if (QualitisConstants.EXECUTION_COMPLETED.toString().equals(parameters.getAlarmEvent().toString())) { // c. pass handleCheckSuccess(application, task, taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); // c. not pass + abort - handleTaskFailureDueToAbort(application, failedTaskRuleSimples, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); // c. not pass + not abort - handleCheckFailure(alreadyAlertApp, application, notPassTaskRuleSimples, taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); + handleCheckFailure(alreadyAlertApp, application, task, notPassTaskRuleSimples, isAcrossCluster ? null : taskRuleSimple, parameters.getAlarmLevel(), parameters.getAlarmReceiver()); } } } @@ -554,7 +712,7 @@ private void checkIfSendAlarm(Application application) { if ((null != rule && Boolean.TRUE.equals(rule.getAlert())) || (null != taskRuleSimple.getAlertLevel() && StringUtils.isNotEmpty(taskRuleSimple.getAlertReceiver()))) { if (!alreadyAlertApp.contains(application.getId())) { List taskRuleSimples = notPassTaskRuleSimples.stream().filter(taskRuleSimpleTemp -> taskRuleSimpleTemp.getAlertLevel() != null).collect(Collectors.toList()); - handleCheckFailure(alreadyAlertApp, application, taskRuleSimples, null, null, null); + handleCheckFailure(alreadyAlertApp, application, task, taskRuleSimples, null, null, null); } if (!alreadyAlertApp.contains(application.getId())) { List taskRuleSimples = failedTaskRuleSimples.stream().filter(taskRuleSimpleTemp -> taskRuleSimpleTemp.getAlertLevel() != null).collect(Collectors.toList()); @@ -567,32 +725,6 @@ private void checkIfSendAlarm(Application application) { handleAbnormalDataRecord(tasks); } - /** - * not pass + abort(校验不通过+阻断) - * - * @param application - * @param failedTaskRuleSimples - * @param alertRank - * @param alertReceiver - */ - private void handleTaskFailureDueToAbort(Application application, List failedTaskRuleSimples, Integer alertRank, String alertReceiver) { - if (!application.getFailTaskNum().equals(0)) { - LOGGER.info("Start to filter not pass + abort task to alarm."); - if (CollectionUtils.isNotEmpty(failedTaskRuleSimples)) { - int notCheckNum = failedTaskRuleSimples.stream() - .map(taskRuleSimple -> taskRuleSimple.getTaskRuleAlarmConfigList()) - .flatMap(taskRuleAlarmConfigList -> taskRuleAlarmConfigList.stream()) - .filter(taskRuleAlarmConfig -> AlarmConfigStatusEnum.NOT_CHECK.getCode().equals(taskRuleAlarmConfig.getStatus())) - .collect(Collectors.toList()).size(); - LOGGER.info("Task has not check num is : " + notCheckNum); - if (notCheckNum != 0) { - return; - } - AlarmUtil.sendFailedMessage(application, failedTaskRuleSimples, imsConfig, alarmClient, alarmInfoDao, userDao, alertRank, alertReceiver); - } - } - } - /** * 异常数据告警收集,含指标且已配置告警的规则 * @@ -636,37 +768,56 @@ private void handleCheckSuccess(Application application, Task task, TaskRuleSimp List safes = new ArrayList<>(); safes.add(taskRuleSimple); LOGGER.info("Succeed to collect check success simple rule. Simple rules: {}", safes); - AlarmUtil.sendAlarmMessage(application, safes, imsConfig, alarmClient, alarmInfoDao, userDao, taskResultStatusDao, alert, alertReceiver, true); + AlarmUtil.sendAlarmMessage(application, safes, imsConfig, alarmClient, alarmInfoDao, userDao, taskResultStatusDao, alert, alertReceiver, true, false); } } /** * not pass + not abort(校验不通过+不阻断) - * - * @param alreadyAlertApp + * not pass + abort(校验失败+阻断) + * @param alreadyAlertApp * @param application + * @param task * @param notSafes * @param currentTaskRuleSimple */ - private void handleCheckFailure(List alreadyAlertApp, Application application, List notSafes, TaskRuleSimple currentTaskRuleSimple, Integer alertRank, String alertReceiver) { - if (!application.getNotPassTaskNum().equals(0)) { + private void handleCheckFailure(List alreadyAlertApp, Application application, Task task, List notSafes, TaskRuleSimple currentTaskRuleSimple, Integer alertRank, String alertReceiver) { + boolean isAbort = false; + if (Boolean.TRUE.equals(task.getAbortOnFailure())) { + isAbort = true; + } + if ((isAbort && TaskStatusEnum.FAILED.getCode().equals(task.getStatus())) || !application.getNotPassTaskNum().equals(0)) { if (null != currentTaskRuleSimple) { if (notSafes.contains(currentTaskRuleSimple)) { List taskRuleSimples = new ArrayList<>(); taskRuleSimples.add(currentTaskRuleSimple); - AlarmUtil.sendAlarmMessage(application, taskRuleSimples, imsConfig, alarmClient, alarmInfoDao, userDao, taskResultStatusDao, alertRank, alertReceiver, false); + + AlarmUtil.sendAlarmMessage(application, taskRuleSimples, imsConfig, alarmClient, alarmInfoDao, userDao, taskResultStatusDao, alertRank, alertReceiver, false, isAbort); } } else { alreadyAlertApp.add(application.getId()); - AlarmUtil.sendAlarmMessage(application, notSafes, imsConfig, alarmClient, alarmInfoDao, userDao, taskResultStatusDao, alertRank, alertReceiver, false); + AlarmUtil.sendAlarmMessage(application, notSafes, imsConfig, alarmClient, alarmInfoDao, userDao, taskResultStatusDao, alertRank, alertReceiver, false, isAbort); } } } + private boolean ifTaskHasNotCheckRuleAlarmConfig(Collection taskRuleSimples) { + int notCheckNum = taskRuleSimples.stream() + .map(taskRuleSimple -> taskRuleSimple.getTaskRuleAlarmConfigList()) + .flatMap(taskRuleAlarmConfigList -> taskRuleAlarmConfigList.stream()) + .filter(taskRuleAlarmConfig -> AlarmConfigStatusEnum.NOT_CHECK.getCode().equals(taskRuleAlarmConfig.getStatus())) + .collect(Collectors.toList()).size(); + LOGGER.info("Task has not check num is : " + notCheckNum); + if (notCheckNum == 0) { + return true; + } + return false; + } + /** - * task failed, not pass + abort(任务失败、校验失败+阻断) + * task failed(任务失败) * * @param alreadyAlertApp * @param application @@ -693,7 +844,7 @@ private void handleTaskFailure(List alreadyAlertApp, Application applica private void constructAbnormalDataRecordInfo(Task task, TaskRuleSimple taskRuleSimple, List ruleMetricList, List abnormalDataRecordInfoList) { RuleMetric currentRuleMetric = ruleMetricList.iterator().next(); String departmentName = currentRuleMetric.getDevDepartmentName(); - Integer subSystemId = currentRuleMetric.getSubSystemId(); + String subSystemId = currentRuleMetric.getSubSystemId(); if (null == subSystemId) { subSystemId = QualitisConstants.SUB_SYSTEM_ID; } @@ -927,6 +1078,9 @@ private Boolean checkWhetherBlocked(Task task) { Set taskRuleSimpleCollect = task.getTaskRuleSimples(); for (TaskRuleSimple taskRuleSimple : taskRuleSimpleCollect) { Rule rule = ruleDao.findById(taskRuleSimple.getRuleId()); + if (null == rule) { + return false; + } if (StringUtils.isNotBlank(rule.getExecutionParametersName())) { ExecutionParameters executionParameters = executionParametersDao .findByNameAndProjectId(rule.getExecutionParametersName(), rule.getProject().getId()); @@ -942,4 +1096,19 @@ private Boolean checkWhetherBlocked(Task task) { return false; } + private void printImsLog(Application applicationInDb, ApplicationStatusEnum applicationStatusEnum){ + try { + if(!intellectCheckProjectName.equals(applicationInDb.getProjectName())){ + return; + } + java.time.LocalDateTime submitTime = java.time.LocalDateTime.parse(applicationInDb.getSubmitTime(), FORMATTER); + java.time.LocalDateTime finishTime = java.time.LocalDateTime.parse(applicationInDb.getFinishTime(), FORMATTER); + long costTime = ChronoUnit.SECONDS.between(submitTime, finishTime); + LOGGER.info(String.format(IMS_LOG, applicationStatusEnum.getCode(), costTime, applicationStatusEnum.getMessage())); + } catch (Exception e) { + LOGGER.error("ims_omnis_prophet collect log printing failure"); + LOGGER.error(e.getMessage(), e); + } + } + } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/AlarmUtil.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/AlarmUtil.java index d3cf0d19..ab0a13d2 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/AlarmUtil.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/AlarmUtil.java @@ -1,33 +1,15 @@ package com.webank.wedatasphere.qualitis.util; +import com.google.common.base.Joiner; +import com.google.common.base.Splitter; +import com.google.common.collect.Maps; import com.webank.wedatasphere.qualitis.checkalert.entity.CheckAlert; import com.webank.wedatasphere.qualitis.client.AlarmClient; import com.webank.wedatasphere.qualitis.config.ImsConfig; -import com.webank.wedatasphere.qualitis.constant.AlarmConfigStatusEnum; -import com.webank.wedatasphere.qualitis.constant.AlertTypeEnum; -import com.webank.wedatasphere.qualitis.constant.ApplicationStatusEnum; -import com.webank.wedatasphere.qualitis.constant.ImsLevelEnum; -import com.webank.wedatasphere.qualitis.constant.SpecCharEnum; +import com.webank.wedatasphere.qualitis.constant.*; import com.webank.wedatasphere.qualitis.constants.QualitisConstants; -import com.webank.wedatasphere.qualitis.dao.AlarmInfoDao; -import com.webank.wedatasphere.qualitis.dao.ApplicationCommentDao; -import com.webank.wedatasphere.qualitis.dao.RoleDao; -import com.webank.wedatasphere.qualitis.dao.TaskResultStatusDao; -import com.webank.wedatasphere.qualitis.dao.UserDao; -import com.webank.wedatasphere.qualitis.dao.UserRoleDao; -import com.webank.wedatasphere.qualitis.entity.AlarmInfo; -import com.webank.wedatasphere.qualitis.entity.Application; -import com.webank.wedatasphere.qualitis.entity.ApplicationComment; -import com.webank.wedatasphere.qualitis.entity.Role; -import com.webank.wedatasphere.qualitis.entity.RuleMetric; -import com.webank.wedatasphere.qualitis.entity.Task; -import com.webank.wedatasphere.qualitis.entity.TaskDataSource; -import com.webank.wedatasphere.qualitis.entity.TaskResult; -import com.webank.wedatasphere.qualitis.entity.TaskResultStatus; -import com.webank.wedatasphere.qualitis.entity.TaskRuleAlarmConfig; -import com.webank.wedatasphere.qualitis.entity.TaskRuleSimple; -import com.webank.wedatasphere.qualitis.entity.User; -import com.webank.wedatasphere.qualitis.entity.UserRole; +import com.webank.wedatasphere.qualitis.dao.*; +import com.webank.wedatasphere.qualitis.entity.*; import com.webank.wedatasphere.qualitis.rule.constant.CheckTemplateEnum; import com.webank.wedatasphere.qualitis.rule.constant.CompareTypeEnum; import com.webank.wedatasphere.qualitis.rule.dao.ExecutionParametersDao; @@ -39,14 +21,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; /** @@ -96,14 +72,43 @@ public static List notSafeTaskRuleSimple(List tasks) { return taskRuleSimples; } - public static void sendAlarmMessage(Application application, List checkFailedRules, ImsConfig imsConfig, AlarmClient client - , AlarmInfoDao alarmInfoDao, UserDao userDao, TaskResultStatusDao taskResultStatusDao, Integer alert, String alertReceiver, Boolean flag) { + public static void sendAlarmMessage(Application application, List checkAlarmRules, ImsConfig imsConfig, AlarmClient client + , AlarmInfoDao alarmInfoDao, UserDao userDao, TaskResultStatusDao taskResultStatusDao, Integer alert, String alertReceiver, Boolean flag, Boolean isAbort) { boolean bdap = imsConfig.getTitlePrefix().contains("BDAP"); // 获取告警内容 StringBuilder alertInfo = new StringBuilder(); - List> requestList = new ArrayList<>(checkFailedRules.size()); + List> requestList = new ArrayList<>(checkAlarmRules.size()); + + List checkAlarmAcrossClusters = checkAlarmRules.stream().filter(taskRuleSimple -> QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateEnName()) || QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateName())).collect(Collectors.toList()); + Map> taskRuleSimpleMap = null; + List alarmedRuleIds = null; + + if (CollectionUtils.isNotEmpty(checkAlarmAcrossClusters)) { + alarmedRuleIds = new ArrayList<>(checkAlarmAcrossClusters.size()); + taskRuleSimpleMap = new HashMap<>(checkAlarmAcrossClusters.size()); + + for (TaskRuleSimple taskRuleSimple : checkAlarmAcrossClusters) { + Long ruleId = taskRuleSimple.getRuleId(); + if (taskRuleSimpleMap.keySet().contains(ruleId)) { + taskRuleSimpleMap.get(ruleId).add(taskRuleSimple); + } else { + List tmpTaskRuleSimples = new ArrayList<>(); + tmpTaskRuleSimples.add(taskRuleSimple); + taskRuleSimpleMap.put(ruleId, tmpTaskRuleSimples); + } + } + } + // 遍历每一个规则 - for (TaskRuleSimple taskRuleSimpleTemp : checkFailedRules) { + for (TaskRuleSimple taskRuleSimpleTemp : checkAlarmRules) { + boolean isAcrossCluster = false; + if (CollectionUtils.isNotEmpty(alarmedRuleIds) && alarmedRuleIds.contains(taskRuleSimpleTemp.getRuleId())) { + continue; + } + if (checkAlarmAcrossClusters.contains(taskRuleSimpleTemp)) { + alarmedRuleIds.add(taskRuleSimpleTemp.getRuleId()); + isAcrossCluster = true; + } Map request = new HashMap<>(6); // 获取告警标题 String cnName = taskRuleSimpleTemp.getCnName(); @@ -118,56 +123,41 @@ public static void sendAlarmMessage(Application application, List taskRuleAlarmConfigList = taskRuleSimpleTemp.getTaskRuleAlarmConfigList(); - Map taskRuleAlarmConfigMap = taskRuleAlarmConfigList.stream().collect(Collectors.toMap(TaskRuleAlarmConfig::getId, t -> t, (oValue, nValue) -> nValue)); List taskResultStatusList = taskResultStatusDao.findByStatus(application.getId(), taskRuleSimpleTemp.getRuleId(), AlarmConfigStatusEnum.NOT_PASS.getCode()); - for (TaskResultStatus taskResultStatus : taskResultStatusList) { - TaskRuleAlarmConfig alarmConfig = taskRuleAlarmConfigMap.get(taskResultStatus.getTaskRuleAlarmConfigId()); - if (null == alarmConfig) { - continue; - } - TaskResult taskResult = taskResultStatus.getTaskResult(); - String value = StringUtils.isBlank(taskResult.getValue()) ? "empty value" : taskResult.getValue(); - String compareValue = StringUtils.isBlank(taskResult.getCompareValue()) ? "empty value" : taskResult.getCompareValue(); - if (alarmConfig.getRuleMetric() == null || alarmConfig.getRuleMetric().getId().equals(taskResult.getRuleMetricId())) { - alarmStringAppend(alertInfo, alarmConfig, value, compareValue, realRuleName, realProjectName, taskResult.getEnvName()); + Map taskRuleAlarmConfigMap = taskRuleAlarmConfigList.stream().collect(Collectors.toMap(TaskRuleAlarmConfig::getId, t -> t, (oValue, nValue) -> nValue)); + if (isAcrossCluster) { + alarmAcrossCluster(alertInfo, taskRuleSimpleMap.get(taskRuleSimpleTemp.getRuleId()), taskResultStatusList); + } else { + alertInfo.append("库表信息:").append(retrieveDatasource(taskRuleSimpleTemp)).append("\n"); + for (TaskResultStatus taskResultStatus : taskResultStatusList) { + TaskRuleAlarmConfig alarmConfig = taskRuleAlarmConfigMap.get(taskResultStatus.getTaskRuleAlarmConfigId()); + if (null == alarmConfig) { + continue; + } + TaskResult taskResult = taskResultStatus.getTaskResult(); + String value = StringUtils.isBlank(taskResult.getValue()) ? "empty value" : taskResult.getValue(); + String compareValue = StringUtils.isBlank(taskResult.getCompareValue()) ? "empty value" : taskResult.getCompareValue(); + if (alarmConfig.getRuleMetric() == null || alarmConfig.getRuleMetric().getId().equals(taskResult.getRuleMetricId())) { + alarmStringAppend(alertInfo, alarmConfig, value, compareValue, realRuleName, realProjectName, taskResult.getEnvName()); + } } } - alertInfo.append("\n也可进入 Qualitis 系统查看详情。"); List ruleMetrics = taskRuleSimpleTemp.getTaskRuleAlarmConfigList().stream().map(TaskRuleAlarmConfig::getRuleMetric) .filter(ruleMetric -> ruleMetric != null).collect(Collectors.toList()); // 获取告警规则关联子系统 - int subSystemId = QualitisConstants.SUB_SYSTEM_ID; - if (CollectionUtils.isEmpty(ruleMetrics)) { - LOGGER.info("Qualitis find project's subsystem ID or datasource's subsystem ID because there is no rule metric. Rule name: " + realRuleName); - if (null != application.getSubSystemId()) { - subSystemId = application.getSubSystemId().intValue(); - } - if (taskRuleSimpleTemp.getTask() != null && CollectionUtils.isNotEmpty(taskRuleSimpleTemp.getTask().getTaskDataSources())) { - List subSystemIds = taskRuleSimpleTemp.getTask().getTaskDataSources().stream() - .map(taskDataSource -> taskDataSource.getSubSystemId()).filter(ele -> ele != null).collect( - Collectors.toList()); - if (CollectionUtils.isNotEmpty(subSystemIds)) { - Long currentSubSystemId = subSystemIds.iterator().next(); - if (currentSubSystemId != null) { - subSystemId = currentSubSystemId.intValue(); - } - } - } - } else { - // 获取子系统 - if (ruleMetrics.iterator().next().getSubSystemId() != null) { - subSystemId = ruleMetrics.iterator().next().getSubSystemId(); - } - } + String subSystemId = getAlarmSubSystemId(ruleMetrics, application, taskRuleSimpleTemp, realRuleName); + // 获取告警级别 int alertLevel = alert != null ? alert.intValue() : taskRuleSimpleTemp.getAlertLevel().intValue(); // 获取告警人 @@ -187,7 +177,7 @@ public static void sendAlarmMessage(Application application, List ruleMetrics, Application application, TaskRuleSimple taskRuleSimpleTemp, String realRuleName) { + String subSystemId = QualitisConstants.SUB_SYSTEM_ID; + if (CollectionUtils.isEmpty(ruleMetrics)) { + LOGGER.info("Qualitis find project's subsystem ID or datasource's subsystem ID because there is no rule metric. Rule name: " + realRuleName); + if (StringUtils.isNotBlank(application.getSubSystemId())) { + subSystemId = application.getSubSystemId(); + } + if (taskRuleSimpleTemp.getTask() != null && CollectionUtils.isNotEmpty(taskRuleSimpleTemp.getTask().getTaskDataSources())) { + List subSystemIds = taskRuleSimpleTemp.getTask().getTaskDataSources().stream() + .map(taskDataSource -> taskDataSource.getSubSystemId()).filter(ele -> ele != null).collect( + Collectors.toList()); + if (CollectionUtils.isNotEmpty(subSystemIds)) { + String currentSubSystemId = subSystemIds.iterator().next(); + if (currentSubSystemId != null) { + subSystemId = currentSubSystemId; + } + } + } + } else { + // 获取子系统 + if (ruleMetrics.iterator().next().getSubSystemId() != null) { + subSystemId = ruleMetrics.iterator().next().getSubSystemId(); + } + } + + return subSystemId; + } + private static String retrieveDatasource(TaskRuleSimple taskRuleSimpleTemp) { if (CollectionUtils.isNotEmpty(taskRuleSimpleTemp.getTask().getTaskDataSources())) { - String dbAndTable = taskRuleSimpleTemp.getTask().getTaskDataSources().stream().filter(taskDataSource -> taskDataSource.getRuleId().equals(taskRuleSimpleTemp.getRuleId())).map(taskDataSource -> - (StringUtils.isNotEmpty(taskDataSource.getDatabaseName()) ? taskDataSource.getDatabaseName() : "") + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + + String dbAndTable = taskRuleSimpleTemp.getTask().getTaskDataSources().stream().filter(taskDataSource -> + taskDataSource.getRuleId().equals(taskRuleSimpleTemp.getRuleId())).map(taskDataSource -> + (StringUtils.isNotEmpty(taskDataSource.getDatabaseName()) ? taskDataSource.getDatabaseName() : "") + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + (StringUtils.isNotEmpty(taskDataSource.getTableName()) ? taskDataSource.getTableName() : "") + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + - (StringUtils.isNotEmpty(taskDataSource.getColName()) ? taskDataSource.getColName() : "[]")) - .collect(Collectors.joining(SpecCharEnum.DIVIDER.getValue())); + (StringUtils.isNotEmpty(taskDataSource.getColName()) ? taskDataSource.getColName() : "[]") + SpecCharEnum.PERIOD_NO_ESCAPE.getValue() + + (StringUtils.isNotEmpty(taskDataSource.getFilter()) ? taskDataSource.getFilter() : "")) + .collect(Collectors.joining(SpecCharEnum.DIVIDER.getValue())); return dbAndTable; } return ""; } - private static void packageAlarm(StringBuilder alertInfo, Map request, String alertTitle, int subSystemId, int alertLevel, + private static void packageAlarm(StringBuilder alertInfo, Map request, String alertTitle, String subSystemId, int alertLevel, List receivers, String alertObj, ImsConfig imsConfig) { - request.put("alert_reciver", StringUtils.join(receivers, ",")); +// 企业微信群 + Map advancedAlertReceiverMap = extractAlertReceivers(receivers); + if (advancedAlertReceiverMap.containsKey("erp_group_id")) { + request.put("erp_group_id", advancedAlertReceiverMap.get("erp_group_id")); + } + // 封装告警 + request.put("alert_reciver", advancedAlertReceiverMap.get("alert_reciver")); request.put("alert_info", alertInfo.toString()); request.put("sub_system_id", subSystemId); request.put("alert_title", alertTitle); @@ -232,16 +258,25 @@ private static void packageAlarm(StringBuilder alertInfo, Map re */ public static String getDepartAlerters(TaskRuleSimple taskRuleSimple, UserDao userDao, Integer alarmCode, String alertReceiver) { User creator = userDao.findByUsername(taskRuleSimple.getProjectCreator()); + String creatorName = ""; + if (creator != null) { + creatorName = creator.getUsername(); + } StringBuilder alerters = new StringBuilder("请通知:"); - String creatorName = creator.getUsername(); - alerters.append("告警接收人").append(alertReceiver); + + // 从告警人中移除企业微信群ID + String receiverWithoutErpGroupId = removeErpGroupId(alertReceiver); + + alerters.append("告警接收人").append(receiverWithoutErpGroupId); Set departAlerters = new HashSet<>(); if (alarmCode.equals(AlertTypeEnum.TASK_FAILED.getCode())) { Role role = SpringContextHolder.getBean(RoleDao.class).findByRoleName(ADMIN); Set admins = SpringContextHolder.getBean(UserRoleDao.class).findByRole(role).stream().map(UserRole::getUser).map(User::getUsername).collect(Collectors.toSet()); departAlerters.addAll(admins); - departAlerters.remove(creatorName); + if (StringUtils.isNotBlank(creatorName)) { + departAlerters.remove(creatorName); + } if (departAlerters.isEmpty()) { return alerters.toString(); @@ -314,8 +349,8 @@ private static void alarmStringAppend(StringBuilder alertInfo, TaskRuleAlarmConf Integer checkTemplate = alarmConfig.getCheckTemplate(); String checkTemplateName = CheckTemplateEnum.getCheckTemplateName(checkTemplate); alertInfo.append("Qualitis项目: ").append(projectName). - append(" 技术规则: ").append(ruleName) - .append(" 任务运行完成, 不符合数据质量要求。原因: ") + append(",技术规则: ").append(ruleName) + .append(",任务运行完成, 不符合数据质量要求。原因: ") .append(alarmConfig.getOutputName() + " - [").append(StringUtils.isEmpty(value) ? "" : value) .append(alarmConfig.getOutputUnit() == null ? "" : alarmConfig.getOutputUnit()).append("]") .append(", 不符合设定阈值: [").append(alarmConfig.getThreshold()).append(alarmConfig.getOutputUnit() == null ? "" : alarmConfig.getOutputUnit()) @@ -345,6 +380,22 @@ private static void alarmStringAppend(StringBuilder alertInfo, TaskRuleAlarmConf alertInfo.append("\n"); } + private static void alarmAcrossCluster(StringBuilder alertInfo, List taskRuleSimples, List taskResultStatusList) { + alertInfo.append("表行数比对不一致,详情如下: \n"); + + for (TaskRuleSimple taskRuleSimple : taskRuleSimples) { + TaskDataSource taskDataSource = taskRuleSimple.getTask().getTaskDataSources().iterator().next(); + alertInfo.append("库名称:").append(taskDataSource.getDatabaseName()).append(",").append("表名称:").append(taskDataSource.getTableName()).append(",").append("分区:").append(taskDataSource.getFilter()); + + for (TaskResultStatus taskResultStatus : taskResultStatusList) { + if (taskResultStatus.getTaskResult().getTaskId().equals(taskRuleSimple.getTask().getId())) { + alertInfo.append("的数据量为:").append(taskResultStatus.getTaskResult().getValue()); + } + } + alertInfo.append("\n"); + } + } + public static List getReceivers(TaskRuleSimple taskRuleSimple, Integer alarmCode, String alertReceiver) { List users = new ArrayList<>(); // 增加规则关注人 @@ -423,8 +474,19 @@ public static void sendFailedMessage(Application application, List> requestList = new ArrayList<>(failedRules.size()); + List failedAcrossClusters = failedRules.stream().filter(taskRuleSimple -> QualitisConstants.MULTI_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateEnName()) || QualitisConstants.SINGLE_SOURCE_ACROSS_TEMPLATE_NAME.equals(taskRuleSimple.getTemplateName())).collect(Collectors.toList()); + List alarmedRuleIds = null; + if (CollectionUtils.isNotEmpty(failedAcrossClusters)) { + alarmedRuleIds = new ArrayList<>(failedAcrossClusters.size()); + } for (TaskRuleSimple taskRuleSimpleTemp : failedRules) { + if (CollectionUtils.isNotEmpty(alarmedRuleIds) && alarmedRuleIds.contains(taskRuleSimpleTemp.getRuleId())) { + continue; + } + if (failedAcrossClusters.contains(taskRuleSimpleTemp)) { + alarmedRuleIds.add(taskRuleSimpleTemp.getRuleId()); + } Map request = new HashMap<>(6); // 获取告警标题 String cnName = taskRuleSimpleTemp.getCnName(); @@ -436,7 +498,7 @@ public static void sendFailedMessage(Application application, List ruleMetrics = taskRuleSimpleTemp.getTaskRuleAlarmConfigList().stream().map(TaskRuleAlarmConfig::getRuleMetric) .filter(ruleMetric -> ruleMetric != null).collect(Collectors.toList()); - int subSystemId = QualitisConstants.SUB_SYSTEM_ID; - if (CollectionUtils.isEmpty(ruleMetrics)) { - LOGGER.info("Qualitis find project's subsystem ID or datasource's subsystem ID because there is no rule metric. Rule name: " + realRuleName); - if (null != application.getSubSystemId()) { - subSystemId = application.getSubSystemId().intValue(); - } - if (taskRuleSimpleTemp.getTask() != null && CollectionUtils.isNotEmpty(taskRuleSimpleTemp.getTask().getTaskDataSources())) { - List subSystemIds = taskRuleSimpleTemp.getTask().getTaskDataSources().stream() - .map(taskDataSource -> taskDataSource.getSubSystemId()).filter(ele -> ele != null).collect( - Collectors.toList()); - if (CollectionUtils.isNotEmpty(subSystemIds)) { - Long currentSubSystemId = subSystemIds.iterator().next(); - if (currentSubSystemId != null) { - subSystemId = currentSubSystemId.intValue(); - } - } - } - } else { - // 获取子系统 - if (ruleMetrics.iterator().next().getSubSystemId() != null) { - subSystemId = ruleMetrics.iterator().next().getSubSystemId(); - } - } + String subSystemId = getAlarmSubSystemId(ruleMetrics, application, taskRuleSimpleTemp, realRuleName); // 获取告警人 List receivers = getReceivers(taskRuleSimpleTemp, AlertTypeEnum.TASK_FAILED.getCode(), StringUtils.isNotBlank(alertReceiver) ? alertReceiver : taskRuleSimpleTemp.getAlertReceiver()); @@ -476,8 +516,15 @@ public static void sendFailedMessage(Application application, List advancedAlertReceiverMap = extractAlertReceivers(receivers); + if (advancedAlertReceiverMap.containsKey("erp_group_id")) { + request.put("erp_group_id", advancedAlertReceiverMap.get("erp_group_id")); + } // 封装告警 - request.put("alert_reciver", StringUtils.join(receivers, ",")); + request.put("alert_reciver", advancedAlertReceiverMap.get("alert_reciver")); + request.put("alert_info", alertInfo.toString()); request.put("sub_system_id", subSystemId); request.put("alert_title", alertTitle); @@ -497,7 +544,7 @@ public static void sendFailedMessage(Application application, List extractAlertReceivers(List alertReceivers) { + AtomicReference erpGroupId = new AtomicReference<>(); + List defaultReceivers = new ArrayList<>(); + alertReceivers.forEach(item -> { + if (item.startsWith(SpecCharEnum.LEFT_BRACKET.getValue()) && item.endsWith(SpecCharEnum.RIGHT_BRACKET.getValue())) { + erpGroupId.set(item.substring(1, item.length() - 1)); + } else { + defaultReceivers.add(item); + } + }); + Map resultMap = Maps.newHashMapWithExpectedSize(2); + if (erpGroupId.get() != null) { + resultMap.put("erp_group_id", erpGroupId.get()); + } + if (CollectionUtils.isNotEmpty(defaultReceivers)) { + resultMap.put("alert_reciver", Joiner.on(SpecCharEnum.COMMA.getValue()).join(defaultReceivers)); + } + return resultMap; + } + private static String contructAlertObj(Set taskDataSources) { - Map> dbAndTables = new HashMap<>(taskDataSources.size()); + Map> dbAndTables = new HashMap<>(taskDataSources.size()); for (TaskDataSource taskDataSource : taskDataSources) { String databaseName = taskDataSource.getDatabaseName(); String tableName = taskDataSource.getTableName(); @@ -516,7 +583,7 @@ private static String contructAlertObj(Set taskDataSources) { if (dbAndTables.keySet().contains(databaseName)) { dbAndTables.get(databaseName).add(tableName); } else { - List tables = new ArrayList<>(); + Set tables = new HashSet<>(); tables.add(tableName); dbAndTables.put(databaseName, tables); @@ -525,9 +592,9 @@ private static String contructAlertObj(Set taskDataSources) { List dbs = new ArrayList<>(dbAndTables.keySet().size()); StringBuilder tempDb = new StringBuilder(); - for (Map.Entry> entry : dbAndTables.entrySet()) { + for (Map.Entry> entry : dbAndTables.entrySet()) { String key = entry.getKey(); - Object value = entry.getValue(); + Set value = entry.getValue(); dbs.add(tempDb.append(key).append("[").append(StringUtils.join(value, ",")).append("]").toString()); tempDb.delete(0, tempDb.length()); } @@ -560,50 +627,65 @@ public static void sendInitFailedMessage(Application application, ApplicationCom if (StringUtils.isNotBlank(rule.getExecutionParametersName())) { ExecutionParameters executionParameters = SpringContextHolder.getBean(ExecutionParametersDao.class).findByNameAndProjectId(rule.getExecutionParametersName(), rule.getProject().getId()); if (executionParameters != null) { - //兼容旧规则数据 + // When saved in execution parameters' alert info if (StringUtils.isNotBlank(executionParameters.getAlertReceiver())) { - if (!alerters.toString().contains(executionParameters.getAlertReceiver())) { - alerters.append( - StringUtils.isNotEmpty(executionParameters.getAlertReceiver()) ? executionParameters.getAlertReceiver() : "未设置") - .append("或创建用户") - .append(rule.getCreateUser()) - .append(";"); + // 从告警人中移除企业微信群ID + String alertReceiver = removeErpGroupId(executionParameters.getAlertReceiver()); + if (!alerters.toString().contains(alertReceiver)) { + alerters.append(alertReceiver).append(";"); } maxLevel = getMaxLevelAndReceivers(maxLevel, receivers, executionParameters); continue; } - //0.23.0版本 + // 0.23.0 if (CollectionUtils.isNotEmpty(executionParameters.getAlarmArgumentsExecutionParameters())) { for (AlarmArgumentsExecutionParameters parameters : executionParameters.getAlarmArgumentsExecutionParameters()) { - if (!alerters.toString().contains(parameters.getAlarmReceiver())) { - alerters.append(StringUtils.isNotEmpty(parameters.getAlarmReceiver()) ? parameters.getAlarmReceiver() : "未设置") - .append("或创建用户") - .append(rule.getCreateUser()) - .append(";"); + if (StringUtils.isBlank(parameters.getAlarmReceiver())) { + continue; } - // Fix: add receivers when init failed. - if (StringUtils.isNotEmpty(parameters.getAlarmReceiver())) { - receivers.addAll(Arrays.asList(parameters.getAlarmReceiver().split(SpecCharEnum.COMMA.getValue()))); + // 从告警人中移除企业微信群ID + String alertReceiver = removeErpGroupId(parameters.getAlarmReceiver()); + + if (!alerters.toString().contains(alertReceiver)) { + alerters.append(alertReceiver).append(";"); } + + receivers.addAll(Arrays.asList(alertReceiver.split(SpecCharEnum.COMMA.getValue()))); if (parameters.getAlarmLevel() != null && parameters.getAlarmLevel() < maxLevel) { maxLevel = parameters.getAlarmLevel(); } } - + continue; } + if (StringUtils.isNotEmpty(rule.getCreateUser()) && !alerters.toString().contains(rule.getCreateUser())) { + alerters.append("未设置,").append("创建用户") + .append(rule.getCreateUser()) + .append(";"); + receivers.add(rule.getCreateUser()); + continue; + } } } else { - alerters.append(StringUtils.isNotEmpty(rule.getAlertReceiver()) ? rule.getAlertReceiver() : "未设置") - .append("或创建用户") + if (StringUtils.isNotEmpty(rule.getAlertReceiver()) && alerters.toString().contains(rule.getAlertReceiver())) { + continue; + } + if (StringUtils.isNotEmpty(rule.getCreateUser()) && alerters.toString().contains(rule.getCreateUser())) { + continue; + } + + alerters.append(StringUtils.isNotEmpty(rule.getAlertReceiver()) ? rule.getAlertReceiver() : "未设置").append("或创建用户") .append(rule.getCreateUser()) .append(";"); - // Fix: add receivers when init failed. + if (StringUtils.isNotEmpty(rule.getAlertReceiver())) { receivers.addAll(Arrays.asList(rule.getAlertReceiver().split(SpecCharEnum.COMMA.getValue()))); } + if (StringUtils.isNotEmpty(rule.getCreateUser())) { + receivers.add(rule.getCreateUser()); + } } } alerters.append("\n"); @@ -640,10 +722,14 @@ public static void sendInitFailedMessage(Application application, ApplicationCom // 发送告警 if (CollectionUtils.isNotEmpty(receivers)) { - client.sendAlarm(StringUtils.join(receivers, ","), alertTitle, alertContent.toString(), maxLevel + ""); + client.sendAlarm(StringUtils.join(receivers, ","), alertTitle, alertContent.toString(), maxLevel + "", null); } } + private static String removeErpGroupId(String alertReceiver) { + return Arrays.stream(StringUtils.split(alertReceiver, SpecCharEnum.COMMA.getValue())).filter(receiver -> !receiver.startsWith(SpecCharEnum.LEFT_BRACKET.getValue())).collect(Collectors.joining(SpecCharEnum.COMMA.getValue())); + } + public static void sendInitFailedMessage(Application application, CheckAlert checkAlert, ImsConfig imsConfig, AlarmClient client, AlarmInfoDao alarmInfoDao) { // 获取告警标题 String alertTitle = imsConfig.getTitlePrefix() + "集群 Qualitis 任务告警\n"; @@ -675,7 +761,7 @@ public static void sendInitFailedMessage(Application application, CheckAlert che // 发送告警 if (CollectionUtils.isNotEmpty(receivers)) { - client.sendAlarm(StringUtils.join(receivers, ","), alertTitle, alertContent.toString(), ImsLevelEnum.WARNING.getCode()); + client.sendAlarm(StringUtils.join(receivers, ","), alertTitle, alertContent.toString(), ImsLevelEnum.WARNING.getCode(), null); } } diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java index 1dda2b42..9a385fa5 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/PassUtil.java @@ -16,6 +16,7 @@ package com.webank.wedatasphere.qualitis.util; +import com.webank.wedatasphere.qualitis.constant.AlarmConfigStatusEnum; import com.webank.wedatasphere.qualitis.constants.QualitisConstants; import com.webank.wedatasphere.qualitis.dao.TaskResultDao; import com.webank.wedatasphere.qualitis.entity.TaskResult; @@ -51,6 +52,9 @@ private PassUtil() { } public static Boolean notSafe(String taskId, Long ruleId, TaskRuleAlarmConfig alarmConfig, TaskResult taskResult, TaskResultDao taskResultDao) { + if (alarmConfig.getCompareType() == null && alarmConfig.getCheckTemplate() == null && AlarmConfigStatusEnum.PASS.getCode().equals(alarmConfig.getStatus())) { + return true; + } Integer checkTemplate = alarmConfig.getCheckTemplate(); Double thresholds = alarmConfig.getThreshold(); if (taskResult == null) { @@ -63,6 +67,7 @@ public static Boolean notSafe(String taskId, Long ruleId, TaskRuleAlarmConfig al result = Double.parseDouble(taskResult.getValue()); } } catch (NumberFormatException e) { + LOGGER.error("When value is not a number format, not pass it."); return false; } Date nowDate = new Date(); @@ -116,7 +121,7 @@ public static Boolean notSafe(String taskId, Long ruleId, TaskRuleAlarmConfig al return true; } - if (result != null && result.equals(Double.NaN)) { + if (result != null && Double.compare(result, Double.NaN) == 0) { result = 0.0; } return moreThanThresholds(result, thresholds, compareType); diff --git a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/ReportUtil.java b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/ReportUtil.java index 965b924b..826a7607 100644 --- a/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/ReportUtil.java +++ b/core/monitor/src/main/java/com/webank/wedatasphere/qualitis/util/ReportUtil.java @@ -119,7 +119,7 @@ private static MetricData constructMetaData(RuleMetric ruleMetric, TaskResult ta metricData.setMetricValue(StringUtils.isBlank(taskResult.getValue()) ? "0" : taskResult.getValue()); metricData.setHostIp(QualitisConstants.QUALITIS_SERVER_HOST); if (ruleMetric.getSubSystemId() != null) { - metricData.setSubsystemId(String.valueOf(ruleMetric.getSubSystemId())); + metricData.setSubsystemId(ruleMetric.getSubSystemId()); } else { metricData.setSubsystemId(imsConfig.getSystemId()); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/OperateTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/OperateTypeEnum.java index a4af580d..d3986a41 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/OperateTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/OperateTypeEnum.java @@ -23,17 +23,21 @@ public enum OperateTypeEnum { /** * Type of project */ - UNAUTHORIZE_PROJECT(12, "Unauthorized Project", "取消授权项目"), - AUTHORIZE_PROJECT(11, "Authorized Project", "授权项目"), - SUBMIT_PROJECT(10, "Submit Project", "提交项目"), CREATE_PROJECT(1, "Create Project", "创建项目"), IMPORT_PROJECT(2, "Import Project", "导入项目"), - EXPORT_PROJECT(3, "Export Project","导出项目"), + EXPORT_PROJECT(3, "Export Project", "导出项目"), DELETE_PROJECT(4, "Create Project", "删除项目"), MODIFY_PROJECT(5, "Modify Project", "修改项目"), - CREATE_RULES(6,"Create Project","创建规则"), - MODIFY_RULES(7,"Modify Project","修改规则"), - DELETE_RULES(8,"Delete Project","删除规则"), + CREATE_RULES(6, "Create Project", "创建规则"), + MODIFY_RULES(7, "Modify Project", "修改规则"), + DELETE_RULES(8, "Delete Project", "删除规则"), + SUBMIT_PROJECT(10, "Submit Project", "提交项目"), + AUTHORIZE_PROJECT(11, "Authorized Project", "授权项目"), + UNAUTHORIZE_PROJECT(12, "Unauthorized Project", "取消授权项目"), + SUBSCRIBE_PROJECT(13, "Subscribe Project", "订阅项目"), + MODIFY_SUBSCRIBE_PROJECT(14, "Modify Subscribe Project", "编辑订阅项目"), + DELETE_SUBSCRIBE_PROJECT(15, "Delete Subscribe Project", "删除订阅项目"), + ENABLE_OR_DISABLE_RULES(16, "Enable Or Disable Rules", "启用或禁用规则") ; private Integer code; @@ -59,23 +63,12 @@ public Integer getCode() { return code; } - public void setCode(Integer code) { - this.code = code; - } - public String getName() { return name; } - public void setName(String name) { - this.name = name; - } - public String getMessage() { return message; } - public void setMessage(String message) { - this.message = message; - } } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java index 31201716..651b67b1 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectOperateEnum.java @@ -44,23 +44,11 @@ public Integer getCode() { return code; } - public void setCode(Integer code) { - this.code = code; - } - public String getMessage() { return message; } - public void setMessage(String message) { - this.message = message; - } - public String getZhMessage() { return zhMessage; } - - public void setZhMessage(String zhMessage) { - this.zhMessage = zhMessage; - } } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectStatusEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectStatusEnum.java new file mode 100644 index 00000000..99d7601e --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectStatusEnum.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.qualitis.project.constant; + +/** + * @author v_gaojiedeng@webank.com + */ +public enum ProjectStatusEnum { + + /** + * Type of project status 可操作、不可操作状态 + */ + OPERABLE_STATUS(0, "可操作状态"), + INOPERABLE_STATUS(1, "不可操作状态"); + + private Integer code; + private String message; + + ProjectStatusEnum(Integer code, String message) { + this.code = code; + this.message = message; + } + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java index c505dfbc..6b2e71e0 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/constant/ProjectTypeEnum.java @@ -38,15 +38,7 @@ public Integer getCode() { return code; } - public void setCode(Integer code) { - this.code = code; - } - public String getMessage() { return message; } - - public void setMessage(String message) { - this.message = message; - } } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectDao.java index a8bf3cb2..d68211eb 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectDao.java @@ -93,4 +93,12 @@ public interface ProjectDao { * @return */ List findAllById(List projectIds); + + /** + * Save and flush + * @param project + * @return + */ + Project saveAndFlush(Project project); + } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectUserDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectUserDao.java index 385d9e4b..c268666c 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectUserDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/ProjectUserDao.java @@ -64,7 +64,7 @@ public interface ProjectUserDao { * @param username * @param projectType * @param projectName - * @param subsystemName + * @param subsystemId * @param createUser * @param db * @param table @@ -74,7 +74,7 @@ public interface ProjectUserDao { * @param size * @return */ - Page findByAdvanceConditions(String username, Integer projectType, String projectName, String subsystemName, String createUser, String db, String table, Long startTime, Long endTime, int page, int size); + Page findByAdvanceConditions(String username, Integer projectType, String projectName, Integer subsystemId, String createUser, String db, String table, Long startTime, Long endTime, int page, int size); /** * Find project by user and permissions @@ -87,9 +87,10 @@ public interface ProjectUserDao { /** * Find project by user * @param username + * @param projectType * @return */ - List> findProjectByUserName(String username); + List> findProjectByUserName(String username,Integer projectType); /** * Count project user by userId with project type @@ -111,9 +112,10 @@ public interface ProjectUserDao { /** * Count project user by user name * @param username + * @param projectType * @return */ - Long countProjectByUserName(String username); + Long countProjectByUserName(String username,Integer projectType); /** * Find by project user by project diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectDaoImpl.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectDaoImpl.java index 26af2261..48ca7bcd 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectDaoImpl.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectDaoImpl.java @@ -87,4 +87,10 @@ public List findByCreateUser(String createUser) { public List findAllById(List projectIds) { return projectRepository.findAllById(projectIds); } + + @Override + public Project saveAndFlush(Project project) { + return projectRepository.saveAndFlush(project); + } + } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectUserDaoImpl.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectUserDaoImpl.java index ecc3ff29..7fde1d2e 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectUserDaoImpl.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/impl/ProjectUserDaoImpl.java @@ -77,13 +77,10 @@ public List findByUserNameAndProjectTypeWithOutPage(String username } @Override - public Page findByAdvanceConditions(String username, Integer projectType, String projectName, String subsystemName, String createUser, String db, String table, Long startTime, Long endTime, int page, int size) { + public Page findByAdvanceConditions(String username, Integer projectType, String projectName, Integer subsystemId, String createUser, String db, String table, Long startTime, Long endTime, int page, int size) { Sort sort = Sort.by(Direction.DESC, "id"); Pageable pageable = PageRequest.of(page, size, sort); - if (StringUtils.isNotEmpty(subsystemName)) { - subsystemName = "%" + subsystemName + "%"; - } - return projectUserRepository.findByAdvanceConditions(username, projectType, projectName, subsystemName, createUser, db, table, startTime, endTime, pageable); + return projectUserRepository.findByAdvanceConditions(username, projectType, projectName, subsystemId, createUser, db, table, startTime, endTime, pageable); } @Override @@ -92,8 +89,8 @@ public List findByUsernameAndPermissions(String username, List } @Override - public List> findProjectByUserName(String username) { - return projectUserRepository.findProjectByUserName(username); + public List> findProjectByUserName(String username, Integer projectType) { + return projectUserRepository.findProjectByUserName(username, projectType); } @Override @@ -107,8 +104,8 @@ public Long countByUserNameAndProjectType(String username, Integer projectType) } @Override - public Long countProjectByUserName(String username) { - return projectUserRepository.countProjectByUserName(username); + public Long countProjectByUserName(String username, Integer projectType) { + return projectUserRepository.countProjectByUserName(username, projectType); } @Override diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/repository/ProjectUserRepository.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/repository/ProjectUserRepository.java index 398ed7cc..5a8bfca1 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/repository/ProjectUserRepository.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/dao/repository/ProjectUserRepository.java @@ -68,7 +68,7 @@ public interface ProjectUserRepository extends JpaRepository, * @param username * @param projectType * @param projectName - * @param subSystemName + * @param subsystemId * @param createUser * @param db * @param table @@ -78,14 +78,14 @@ public interface ProjectUserRepository extends JpaRepository, * @return */ @Query(value = "select pu from Project p inner join ProjectUser pu on pu.project = p left join RuleDataSource ds on ds.projectId = p.id " + - "where pu.userName = ?1 and p.projectType = ?2 and p.name like ?3 and (?4 is null or p.subSystemName like ?4) " + + "where pu.userName = ?1 and p.projectType = ?2 and p.name like ?3 and (?4 is null or p.subSystemId = ?4) " + "and (?5 is null or p.createUser = ?5) and (?6 is null or ds.dbName = ?6) and (?7 is null or ds.tableName = ?7) " + "and (?8 is null or UNIX_TIMESTAMP(p.createTime) >= ?8) and (?9 is null or UNIX_TIMESTAMP(p.createTime) < ?9) group by pu.project" , countQuery = "select count(DISTINCT pu.project) from ProjectUser pu inner join Project p on pu.project = p left join RuleDataSource ds on ds.projectId = p.id " + "where pu.userName = ?1 and p.projectType = ?2 and p.name like ?3 and (?4 is null or p.subSystemId = ?4) " + "and (?5 is null or p.createUser = ?5) and (?6 is null or ds.dbName = ?6) and (?7 is null or ds.tableName = ?7) " + "and (?8 is null or UNIX_TIMESTAMP(p.createTime) >= ?8) and (?9 is null or UNIX_TIMESTAMP(p.createTime) < ?9)") - Page findByAdvanceConditions(String username, Integer projectType, String projectName, String subSystemName, String createUser, String db, String table, Long startTime, Long endTime, Pageable pageable); + Page findByAdvanceConditions(String username, Integer projectType, String projectName, Integer subsystemId, String createUser, String db, String table, Long startTime, Long endTime, Pageable pageable); /** * Count by user name and permission and project type @@ -127,18 +127,20 @@ public interface ProjectUserRepository extends JpaRepository, /** * Find project user by user name * @param userName + * @param projectType * @return */ - @Query("select DISTINCT new map(p.name as project_name, p.id as project_id) from ProjectUser pu inner join pu.project p where pu.userName = ?1") - List> findProjectByUserName(String userName); + @Query("select DISTINCT new map(p.name as project_name, p.id as project_id) from ProjectUser pu inner join pu.project p where pu.userName = ?1 and (?2 is null or p.projectType = ?2)") + List> findProjectByUserName(String userName, Integer projectType); /** * Count by username * @param userName + * @param projectType * @return */ - @Query("select count(DISTINCT pu.project) from ProjectUser pu inner join pu.project p where pu.userName = ?1") - Long countProjectByUserName(String userName); + @Query("select count(DISTINCT pu.project) from ProjectUser pu inner join pu.project p where pu.userName = ?1 and (?2 is null or p.projectType = ?2)") + Long countProjectByUserName(String userName, Integer projectType); /** * Find project user by userId diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java index 4e4bdad4..01e9f799 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/Project.java @@ -18,12 +18,20 @@ import com.fasterxml.jackson.annotation.JsonIdentityInfo; import com.fasterxml.jackson.annotation.ObjectIdGenerators; - -import javax.persistence.*; -import java.util.Set; import org.apache.commons.lang.StringUtils; import org.codehaus.jackson.annotate.JsonIgnore; +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import java.util.Set; + /** * @author howeye */ @@ -62,7 +70,7 @@ public class Project { private String department; @Column(name = "sub_system_id") - private Long subSystemId; + private String subSystemId; @Column(name = "sub_system_name") private String subSystemName; @@ -84,6 +92,21 @@ public class Project { @Column(name = "project_type") private Integer projectType; + @Column(name = "git_repo") + private String gitRepo; + + @Column(name = "git_type") + private Integer gitType; + + @Column(name = "git_branch") + private String gitBranch; + + @Column(name = "git_root_dir") + private String gitRootDir; + + @Column(name = "run_status") + private Integer runStatus; + public Project() { } @@ -209,11 +232,11 @@ public void setModifyTime(String modifyTime) { this.modifyTime = modifyTime; } - public Long getSubSystemId() { + public String getSubSystemId() { return subSystemId; } - public void setSubSystemId(Long subSystemId) { + public void setSubSystemId(String subSystemId) { this.subSystemId = subSystemId; } @@ -225,6 +248,46 @@ public void setSubSystemName(String subSystemName) { this.subSystemName = subSystemName; } + public String getGitRepo() { + return gitRepo; + } + + public void setGitRepo(String gitRepo) { + this.gitRepo = gitRepo; + } + + public Integer getGitType() { + return gitType; + } + + public void setGitType(Integer gitType) { + this.gitType = gitType; + } + + public String getGitBranch() { + return gitBranch; + } + + public void setGitBranch(String gitBranch) { + this.gitBranch = gitBranch; + } + + public String getGitRootDir() { + return gitRootDir; + } + + public void setGitRootDir(String gitRootDir) { + this.gitRootDir = gitRootDir; + } + + public Integer getRunStatus() { + return runStatus; + } + + public void setRunStatus(Integer runStatus) { + this.runStatus = runStatus; + } + @Override public String toString() { return "Project{" + @@ -241,6 +304,10 @@ public String toString() { ", modifyUser='" + modifyUser + '\'' + ", modifyTime='" + modifyTime + '\'' + ", projectType=" + projectType + + ", gitRepo='" + gitRepo + '\'' + + ", gitType=" + gitType + + ", gitBranch='" + gitBranch + '\'' + + ", gitRootDir='" + gitRootDir + '\'' + '}'; } } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java index 690d067d..e7a9caff 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/entity/ProjectUser.java @@ -20,7 +20,10 @@ import com.fasterxml.jackson.annotation.ObjectIdGenerators; import javax.persistence.*; +import java.nio.charset.StandardCharsets; import java.util.Objects; + +import org.apache.commons.lang3.StringUtils; import org.codehaus.jackson.annotate.JsonIgnore; /** @@ -64,7 +67,7 @@ public ProjectUser(Integer permission, Project project, String userName, String this.permission = permission; this.project = project; this.userName = userName; - this.userFullName = userFullName; + this.userFullName = StringUtils.isBlank(userFullName) ? userFullName : new String(userFullName.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8); this.automaticSwitch = flag; } @@ -125,7 +128,7 @@ public boolean equals(Object o) { return false; } ProjectUser that = (ProjectUser) o; - return Objects.equals(id, that.id) && Objects.equals(userName, that.userName); + return Objects.equals(id, that.id) && Objects.equals(userName, that.userName) && Objects.equals(permission, that.permission); } @Override diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/request/RuleQueryRequest.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/request/RuleQueryRequest.java index 5ffb9d5c..0985330b 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/request/RuleQueryRequest.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/project/request/RuleQueryRequest.java @@ -43,7 +43,7 @@ public class RuleQueryRequest { @JsonProperty("relation_object_type") private Integer relationObjectType; @JsonProperty("sub_system_id") - private Long subSystemId; + private String subSystemId; @JsonProperty("department_name") private String departmentName; @JsonProperty("dev_department_name") @@ -193,11 +193,11 @@ public void setPartitionField(Boolean partitionField) { isPartitionField = partitionField; } - public Long getSubSystemId() { + public String getSubSystemId() { return subSystemId; } - public void setSubSystemId(Long subSystemId) { + public void setSubSystemId(String subSystemId) { this.subSystemId = subSystemId; } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/constant/ExecutionFrequencyEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/constant/ExecutionFrequencyEnum.java new file mode 100644 index 00000000..6142aabe --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/constant/ExecutionFrequencyEnum.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.qualitis.report.constant; + +import com.google.common.collect.Maps; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * @author v_gaojiedeng@webank.com + */ +public enum ExecutionFrequencyEnum { + /** + * DAILY,WEEKLY + */ + DAILY(1, "每天","0 0 * * *"), + WEEKLY(2, "每周","0 0 * * 0"); + + private Integer code; + private String message; + private String cronExpressions; + + public Integer getCode() { + return code; + } + + public String getMessage() { + return message; + } + + public String getCronExpressions() { + return cronExpressions; + } + + ExecutionFrequencyEnum(Integer code, String message, String cronExpressions) { + this.code = code; + this.message = message; + this.cronExpressions = cronExpressions; + } + + public static String getExecutionFrequencyName(Integer code) { + for (ExecutionFrequencyEnum c : ExecutionFrequencyEnum.values()) { + if (c.getCode().equals(code)) { + return c.getMessage(); + } + } + return null; + } + + public static List> getExecutionFrequencyEnumList() { + List> list = new ArrayList>(); + for (ExecutionFrequencyEnum executionFrequencyEnum : ExecutionFrequencyEnum.values()) { + Map item = Maps.newHashMap(); + item.put("code", executionFrequencyEnum.code); + item.put("message", executionFrequencyEnum.message); + list.add(item); + + } + return list; + } + + +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscribeOperateReportDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscribeOperateReportDao.java new file mode 100644 index 00000000..4ae9339f --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscribeOperateReportDao.java @@ -0,0 +1,65 @@ +package com.webank.wedatasphere.qualitis.report.dao; + +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import org.springframework.data.domain.Page; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +public interface SubscribeOperateReportDao { + + /** + * find match operate report + * + * @param projectId + * @param executionFrequency + * @return + */ + SubscribeOperateReport findMatchOperateReport(Long projectId, Integer executionFrequency); + + /** + * Save + * + * @param subscribeOperateReport + * @return + */ + SubscribeOperateReport save(SubscribeOperateReport subscribeOperateReport); + + /** + * find by id + * + * @param subscribeOperateReportId + * @return + */ + SubscribeOperateReport findById(Long subscribeOperateReportId); + + /** + * delete + * + * @param subscribeOperateReport + */ + void delete(SubscribeOperateReport subscribeOperateReport); + + /** + * subscribe Operate Report Query + * + * @param projectName + * @param receiver + * @param projectType + * @param loginUser + * @param page + * @param size + * @return + */ + Page subscribeOperateReportQuery(String projectName, String receiver, Integer projectType, String loginUser, int page, int size); + + /** + * select All Mate Frequency + * + * @param executionFrequency + * @return + */ + List selectAllMateFrequency(Integer executionFrequency); +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscribeOperateReportProjectsDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscribeOperateReportProjectsDao.java new file mode 100644 index 00000000..bc060a03 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscribeOperateReportProjectsDao.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.qualitis.report.dao; + +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReportProjects; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +public interface SubscribeOperateReportProjectsDao { + + /** + * Save + * @param subscribeOperateReportProjects + * @return + */ + SubscribeOperateReportProjects save(SubscribeOperateReportProjects subscribeOperateReportProjects); + + /** + * 批量保存 + * @param subscribeOperateReportProjects 要保存的参数 + * @return 保存成功的返回值 + */ + List saveAll(List subscribeOperateReportProjects); + + /** + * delete + * @param subscribeOperateReport + */ + void deleteBySubscribeOperateReport(SubscribeOperateReport subscribeOperateReport); + + /** + * find By ProjectId + * @param projectId + * @return + */ + List findByProjectId(Long projectId); +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscriptionRecordDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscriptionRecordDao.java new file mode 100644 index 00000000..31fee2ab --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/SubscriptionRecordDao.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.qualitis.report.dao; + +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import com.webank.wedatasphere.qualitis.report.entity.SubscriptionRecord; + +/** + * @author v_gaojiedeng@webank.com + */ +public interface SubscriptionRecordDao { + + /** + * Save + * + * @param subscriptionRecord + * @return + */ + SubscriptionRecord save(SubscriptionRecord subscriptionRecord); + + /** + * find by id + * + * @param subscriptionRecordId + * @return + */ + SubscriptionRecord findById(Long subscriptionRecordId); + + /** + * find Match Project And Frequency + * + * @param projectId + * @param executionFrequency + * @return + */ + SubscriptionRecord findMatchProjectAndFrequency(Long projectId, Integer executionFrequency); + + /** + * delete + * + * @param subscriptionRecord + */ + void delete(SubscriptionRecord subscriptionRecord); + +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscribeOperateReportDaoImpl.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscribeOperateReportDaoImpl.java new file mode 100644 index 00000000..ea228f47 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscribeOperateReportDaoImpl.java @@ -0,0 +1,59 @@ +package com.webank.wedatasphere.qualitis.report.dao.impl; + +import com.webank.wedatasphere.qualitis.report.dao.SubscribeOperateReportDao; +import com.webank.wedatasphere.qualitis.report.dao.repository.SubscribeOperateReportRepository; +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; +import org.springframework.stereotype.Repository; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +@Repository +public class SubscribeOperateReportDaoImpl implements SubscribeOperateReportDao { + + @Autowired + private SubscribeOperateReportRepository subscribeOperateReportRepository; + + @Override + public SubscribeOperateReport findMatchOperateReport(Long projectId, Integer executionFrequency) { + return subscribeOperateReportRepository.findMatchOperateReport(projectId, executionFrequency); + } + + @Override + public SubscribeOperateReport save(SubscribeOperateReport subscribeOperateReport) { + return subscribeOperateReportRepository.save(subscribeOperateReport); + } + + @Override + public SubscribeOperateReport findById(Long subscribeOperateReportId) { + if (subscribeOperateReportRepository.findById(subscribeOperateReportId).isPresent()) { + return subscribeOperateReportRepository.findById(subscribeOperateReportId).get(); + } else { + return null; + } + } + + @Override + public void delete(SubscribeOperateReport subscribeOperateReport) { + subscribeOperateReportRepository.delete(subscribeOperateReport); + } + + @Override + public Page subscribeOperateReportQuery(String projectName, String receiver, Integer projectType, String loginUser, int page, int size) { + Sort sort = Sort.by(Sort.Direction.DESC, "id"); + Pageable pageable = PageRequest.of(page, size, sort); + return subscribeOperateReportRepository.subscribeOperateReportQuery(projectName, receiver, projectType, loginUser, pageable); + } + + @Override + public List selectAllMateFrequency(Integer executionFrequency) { + return subscribeOperateReportRepository.selectAllMateFrequency(executionFrequency); + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscribeOperateReportProjectsDaoImpl.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscribeOperateReportProjectsDaoImpl.java new file mode 100644 index 00000000..5e67566d --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscribeOperateReportProjectsDaoImpl.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.qualitis.report.dao.impl; + +import com.webank.wedatasphere.qualitis.report.dao.SubscribeOperateReportProjectsDao; +import com.webank.wedatasphere.qualitis.report.dao.repository.SubscribeOperateReportProjectsRepository; +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReportProjects; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +@Repository +public class SubscribeOperateReportProjectsDaoImpl implements SubscribeOperateReportProjectsDao { + + @Autowired + private SubscribeOperateReportProjectsRepository subscribeOperateReportProjectsRepository; + + @Override + public SubscribeOperateReportProjects save(SubscribeOperateReportProjects subscribeOperateReportProjects) { + return subscribeOperateReportProjectsRepository.save(subscribeOperateReportProjects); + } + + @Override + public List saveAll(List subscribeOperateReportProjects) { + return subscribeOperateReportProjectsRepository.saveAll(subscribeOperateReportProjects); + } + + @Override + public void deleteBySubscribeOperateReport(SubscribeOperateReport subscribeOperateReport) { + subscribeOperateReportProjectsRepository.deleteBySubscribeOperateReport(subscribeOperateReport); + } + + @Override + public List findByProjectId(Long projectId) { + return subscribeOperateReportProjectsRepository.findByProjectId(projectId); + } + + +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscriptionRecordDaoImpl.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscriptionRecordDaoImpl.java new file mode 100644 index 00000000..ef902e26 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/impl/SubscriptionRecordDaoImpl.java @@ -0,0 +1,41 @@ +package com.webank.wedatasphere.qualitis.report.dao.impl; + +import com.webank.wedatasphere.qualitis.report.dao.SubscriptionRecordDao; +import com.webank.wedatasphere.qualitis.report.dao.repository.SubscriptionRecordRepository; +import com.webank.wedatasphere.qualitis.report.entity.SubscriptionRecord; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +/** + * @author v_gaojiedeng@webank.com + */ +@Repository +public class SubscriptionRecordDaoImpl implements SubscriptionRecordDao { + + @Autowired + private SubscriptionRecordRepository subscriptionRecordRepository; + + @Override + public SubscriptionRecord save(SubscriptionRecord subscriptionRecord) { + return subscriptionRecordRepository.save(subscriptionRecord); + } + + @Override + public SubscriptionRecord findById(Long subscriptionRecordId) { + if (subscriptionRecordRepository.findById(subscriptionRecordId).isPresent()) { + return subscriptionRecordRepository.findById(subscriptionRecordId).get(); + } else { + return null; + } + } + + @Override + public SubscriptionRecord findMatchProjectAndFrequency(Long projectId, Integer executionFrequency) { + return subscriptionRecordRepository.findMatchProjectAndFrequency(projectId, executionFrequency); + } + + @Override + public void delete(SubscriptionRecord subscriptionRecord) { + subscriptionRecordRepository.delete(subscriptionRecord); + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscribeOperateReportProjectsRepository.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscribeOperateReportProjectsRepository.java new file mode 100644 index 00000000..9aeaaf9b --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscribeOperateReportProjectsRepository.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.qualitis.report.dao.repository; + +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReportProjects; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +public interface SubscribeOperateReportProjectsRepository extends JpaRepository { + + /** + * delete By Subscribe Operate Report + * @param subscribeOperateReport + */ + void deleteBySubscribeOperateReport(SubscribeOperateReport subscribeOperateReport); + + /** + * find By ProjectId + * @param projectId + * @return + */ + @Query(value = "select * from qualitis_subscribe_operate_report_associated_projects where project_id=?1", nativeQuery = true) + List findByProjectId(Long projectId); +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscribeOperateReportRepository.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscribeOperateReportRepository.java new file mode 100644 index 00000000..cd20eb95 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscribeOperateReportRepository.java @@ -0,0 +1,60 @@ +package com.webank.wedatasphere.qualitis.report.dao.repository; + +import com.webank.wedatasphere.qualitis.report.entity.SubscribeOperateReport; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +import java.util.List; + +/** + * @author v_gaojiedeng@webank.com + */ +public interface SubscribeOperateReportRepository extends JpaRepository { + + /** + * find match operate report + * + * @param executionFrequency + * @param projectId + * @return + */ + @Query(value = "SELECT qrm.* FROM qualitis_subscribe_operate_report as qrm,qualitis_subscribe_operate_report_associated_projects as qrmb where qrm.id = qrmb.operate_report_id AND qrm.execution_frequency= ?2 AND qrmb.project_id=?1 group by qrm.id", nativeQuery = true) + SubscribeOperateReport findMatchOperateReport(Long projectId, Integer executionFrequency); + + + /** + * subscribe Operate Report Query + * + * @param projectName + * @param receiver + * @param projectType + * @param loginUser + * @param pageable + * @return + */ + @Query(value = "select qrm.* from qualitis_subscribe_operate_report as qrm,qualitis_subscribe_operate_report_associated_projects as qrmb,qualitis_project as qp,qualitis_project_user as pu where qrm.id = qrmb.operate_report_id and qrmb.project_id= qp.id and qrmb.project_id= pu.project_id " + + " and pu.user_name = ?4 " + + " and if(?3 is null, 1=1, qp.project_type = ?3) " + + " and if(?1 is null, 1=1, qp.name like ?1) " + + " and if(?2 is null,1=1, find_in_set(?2,qrm.receiver)) group by qrm.id" + , countQuery = "SELECT COUNT(0) FROM (select qrm.* from qualitis_subscribe_operate_report as qrm,qualitis_subscribe_operate_report_associated_projects as qrmb,qualitis_project as qp,qualitis_project_user as pu where qrm.id = qrmb.operate_report_id and qrmb.project_id= qp.id and qrmb.project_id= pu.project_id " + + " and pu.user_name = ?4 " + + " and if(?3 is null, 1=1, qp.project_type = ?3) " + + " and if(?1 is null, 1=1, qp.name like ?1) " + + " and if(?2 is null,1=1, find_in_set(?2,qrm.receiver)) group by qrm.id" + + ") as a" + , nativeQuery = true) + Page subscribeOperateReportQuery(String projectName, String receiver, Integer projectType, String loginUser, Pageable pageable); + + + /** + * find match operate report + * + * @param executionFrequency + * @return + */ + @Query(value = "select * from qualitis_subscribe_operate_report where execution_frequency =?1", nativeQuery = true) + List selectAllMateFrequency(Integer executionFrequency); +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscriptionRecordRepository.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscriptionRecordRepository.java new file mode 100644 index 00000000..c713d635 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/dao/repository/SubscriptionRecordRepository.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.qualitis.report.dao.repository; + +import com.webank.wedatasphere.qualitis.report.entity.SubscriptionRecord; +import org.springframework.data.jpa.repository.JpaRepository; +import org.springframework.data.jpa.repository.Query; + +/** + * @author v_gaojiedeng@webank.com + */ +public interface SubscriptionRecordRepository extends JpaRepository { + + + /** + * find Match Project And Frequency + * + * @param projectId + * @param executionFrequency + * @return + */ + @Query(value = "select * from qualitis_subscription_record where project_id=?1 and execution_frequency=?2", nativeQuery = true) + SubscriptionRecord findMatchProjectAndFrequency(Long projectId, Integer executionFrequency); +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscribeOperateReport.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscribeOperateReport.java new file mode 100644 index 00000000..a5ebd572 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscribeOperateReport.java @@ -0,0 +1,122 @@ +package com.webank.wedatasphere.qualitis.report.entity; + +import com.fasterxml.jackson.annotation.JsonIdentityInfo; +import com.fasterxml.jackson.annotation.ObjectIdGenerators; + +import javax.persistence.CascadeType; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.FetchType; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.OneToMany; +import javax.persistence.Table; +import java.util.Set; + +/** + * @author v_gaojiedeng@webank.com + */ +@Entity +@JsonIdentityInfo(generator = ObjectIdGenerators.IntSequenceGenerator.class, property = "@id") +@Table(name = "qualitis_subscribe_operate_report") +public class SubscribeOperateReport { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @Column(length = 255) + private String receiver; + + @Column(name = "execution_frequency") + private Integer executionFrequency; + @Column(name = "create_user", length = 50) + private String createUser; + @Column(name = "create_time", length = 25) + private String createTime; + @Column(name = "modify_user", length = 50) + private String modifyUser; + @Column(name = "modify_time", length = 25) + private String modifyTime; + + @OneToMany(mappedBy = "subscribeOperateReport", fetch = FetchType.EAGER, cascade = CascadeType.REMOVE) + private Set subscribeOperateReportProjectsSet; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getReceiver() { + return receiver; + } + + public void setReceiver(String receiver) { + this.receiver = receiver; + } + + public Integer getExecutionFrequency() { + return executionFrequency; + } + + public void setExecutionFrequency(Integer executionFrequency) { + this.executionFrequency = executionFrequency; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getCreateTime() { + return createTime; + } + + public void setCreateTime(String createTime) { + this.createTime = createTime; + } + + public String getModifyUser() { + return modifyUser; + } + + public void setModifyUser(String modifyUser) { + this.modifyUser = modifyUser; + } + + public String getModifyTime() { + return modifyTime; + } + + public void setModifyTime(String modifyTime) { + this.modifyTime = modifyTime; + } + + public Set getSubscribeOperateReportProjectsSet() { + return subscribeOperateReportProjectsSet; + } + + public void setSubscribeOperateReportProjectsSet(Set subscribeOperateReportProjectsSet) { + this.subscribeOperateReportProjectsSet = subscribeOperateReportProjectsSet; + } + + @Override + public String toString() { + return "SubscribeOperateReport{" + + "id=" + id + + ", receiver='" + receiver + '\'' + + ", executionFrequency='" + executionFrequency + '\'' + + ", createUser='" + createUser + '\'' + + ", createTime='" + createTime + '\'' + + ", modifyUser='" + modifyUser + '\'' + + ", modifyTime='" + modifyTime + '\'' + + '}'; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscribeOperateReportProjects.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscribeOperateReportProjects.java new file mode 100644 index 00000000..7c3c42a2 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscribeOperateReportProjects.java @@ -0,0 +1,68 @@ +package com.webank.wedatasphere.qualitis.report.entity; + +import com.fasterxml.jackson.annotation.JsonIdentityInfo; +import com.fasterxml.jackson.annotation.ObjectIdGenerators; +import com.webank.wedatasphere.qualitis.project.entity.Project; +import org.codehaus.jackson.annotate.JsonIgnore; + +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; +import javax.persistence.Table; + +/** + * @author v_gaojiedeng@webank.com + */ +@Entity +@JsonIdentityInfo(generator = ObjectIdGenerators.IntSequenceGenerator.class, property = "@id") +@Table(name = "qualitis_subscribe_operate_report_associated_projects") +public class SubscribeOperateReportProjects { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @ManyToOne + @JsonIgnore + private Project project; + + @ManyToOne + @JoinColumn(name = "operate_report_id") + private SubscribeOperateReport subscribeOperateReport; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Project getProject() { + return project; + } + + public void setProject(Project project) { + this.project = project; + } + + public SubscribeOperateReport getSubscribeOperateReport() { + return subscribeOperateReport; + } + + public void setSubscribeOperateReport(SubscribeOperateReport subscribeOperateReport) { + this.subscribeOperateReport = subscribeOperateReport; + } + + @Override + public String toString() { + return "SubscribeOperateReportProjects{" + + "id=" + id + + ", project=" + project + + ", subscribeOperateReport=" + subscribeOperateReport + + '}'; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscriptionRecord.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscriptionRecord.java new file mode 100644 index 00000000..c2a70a3b --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/report/entity/SubscriptionRecord.java @@ -0,0 +1,157 @@ +package com.webank.wedatasphere.qualitis.report.entity; + +import com.fasterxml.jackson.annotation.JsonIdentityInfo; +import com.fasterxml.jackson.annotation.ObjectIdGenerators; +import com.webank.wedatasphere.qualitis.project.entity.Project; +import org.codehaus.jackson.annotate.JsonIgnore; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.ManyToOne; +import javax.persistence.Table; + +/** + * @author v_gaojiedeng@webank.com + */ +@Entity +@JsonIdentityInfo(generator = ObjectIdGenerators.IntSequenceGenerator.class, property = "@id") +@Table(name = "qualitis_subscription_record") +public class SubscriptionRecord { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + @ManyToOne + @JsonIgnore + private Project project; + + @Column(name = "execution_frequency") + private Integer executionFrequency; + + @Column(name = "configured_rules_table_num") + private Long configuredRulesTableNum; + @Column(name = "configured_rules_num") + private Long configuredRulesNum; + @Column(name = "configured_rules_kpi_table_num") + private Long configuredRulesKpiTableNum; + @Column(name = "configured_rules_kpi_num") + private Long configuredRulesKpiNum; + + @Column(name = "scheduling_rules") + private Long schedulingRules; + @Column(name = "pass_rules") + private Long passRules; + @Column(name = "no_pass_rules") + private Long noPassRules; + @Column(name = "fail_rules") + private Long failRules; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Project getProject() { + return project; + } + + public void setProject(Project project) { + this.project = project; + } + + public Integer getExecutionFrequency() { + return executionFrequency; + } + + public void setExecutionFrequency(Integer executionFrequency) { + this.executionFrequency = executionFrequency; + } + + public Long getConfiguredRulesTableNum() { + return configuredRulesTableNum; + } + + public void setConfiguredRulesTableNum(Long configuredRulesTableNum) { + this.configuredRulesTableNum = configuredRulesTableNum; + } + + public Long getConfiguredRulesNum() { + return configuredRulesNum; + } + + public void setConfiguredRulesNum(Long configuredRulesNum) { + this.configuredRulesNum = configuredRulesNum; + } + + public Long getConfiguredRulesKpiTableNum() { + return configuredRulesKpiTableNum; + } + + public void setConfiguredRulesKpiTableNum(Long configuredRulesKpiTableNum) { + this.configuredRulesKpiTableNum = configuredRulesKpiTableNum; + } + + public Long getConfiguredRulesKpiNum() { + return configuredRulesKpiNum; + } + + public void setConfiguredRulesKpiNum(Long configuredRulesKpiNum) { + this.configuredRulesKpiNum = configuredRulesKpiNum; + } + + public Long getSchedulingRules() { + return schedulingRules; + } + + public void setSchedulingRules(Long schedulingRules) { + this.schedulingRules = schedulingRules; + } + + public Long getPassRules() { + return passRules; + } + + public void setPassRules(Long passRules) { + this.passRules = passRules; + } + + public Long getNoPassRules() { + return noPassRules; + } + + public void setNoPassRules(Long noPassRules) { + this.noPassRules = noPassRules; + } + + public Long getFailRules() { + return failRules; + } + + public void setFailRules(Long failRules) { + this.failRules = failRules; + } + + @Override + public String toString() { + return "SubscriptionRecord{" + + "id=" + id + + ", project=" + project + + ", executionFrequency=" + executionFrequency + + ", configuredRulesTableNum=" + configuredRulesTableNum + + ", configuredRulesNum=" + configuredRulesNum + + ", configuredRulesKpiTableNum=" + configuredRulesKpiTableNum + + ", configuredRulesKpiNum=" + configuredRulesKpiNum + + ", schedulingRules=" + schedulingRules + + ", passRules=" + passRules + + ", noPassRules=" + noPassRules + + ", failRules=" + failRules + + '}'; + } +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java index 42ccd320..95e7bf6b 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/RuleTemplateTypeEnum.java @@ -29,7 +29,8 @@ public enum RuleTemplateTypeEnum { SINGLE_SOURCE_TEMPLATE(1, "单表模版"), CUSTOM(2, "自定义模版"), MULTI_SOURCE_TEMPLATE(3, "跨表模版"), - FILE_COUSTOM(4, "文件自定义模版"); + FILE_COUSTOM(4, "文件自定义模版"), + METRIC_COLLECT(5, "指标采集模版"); private Integer code; private String message; diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TableDataTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TableDataTypeEnum.java index 04a2ea32..8218d372 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TableDataTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TableDataTypeEnum.java @@ -14,7 +14,9 @@ public enum TableDataTypeEnum { * linkisDataSource Linkis数据源 */ RULE_METRIC("ruleMetric", "指标管理"), + STANDARD_VALUE("standardValue", "标准值"), RULE_TEMPLATE("ruleTemplate", "规则模板"), + LINKIS_UDF("linkisUdf", "Linkis UDF"), LINKIS_DATA_SOURCE("linkisDataSource", "Linkis数据源"); private String code; diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java index bc02754e..37353d7a 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateDataSourceTypeEnum.java @@ -53,7 +53,7 @@ public static String getMessage(Integer code) { return templateDataSourceTypeEnum.getMessage(); } } - return null; + return ""; } public static Integer getCode(String message) { diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateFileTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateFileTypeEnum.java index 611b3296..71bf19e3 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateFileTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateFileTypeEnum.java @@ -59,15 +59,7 @@ public Integer getCode() { return code; } - public void setCode(Integer code) { - this.code = code; - } - public String getMessage() { return message; } - - public void setMessage(String message) { - this.message = message; - } } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java index 4a6555fa..91516084 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/constant/TemplateInputTypeEnum.java @@ -17,6 +17,7 @@ package com.webank.wedatasphere.qualitis.rule.constant; import com.google.common.collect.Maps; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -24,62 +25,65 @@ /** * Enum in TemplateInputType of RuleTemplate + * * @author howeye */ public enum TemplateInputTypeEnum { /** * Enum in TemplateInputType of RuleTemplate */ - FIXED_VALUE(1, "固定值","Fixed value"), - TABLE(3, "数据表","Data table"), - FIELD(4, "校验字段","Field"), - DATABASE(5, "数据库","Database"), - FIELD_CONCAT(6, "字段拼接","Field concat"), - REGEXP(7, "正则","Regexp"), - LIST(8, "枚举值","List"), - CONDITION(9, "基础过滤条件","Condition"), + FIXED_VALUE(1, "固定值", "Fixed value"), + TABLE(3, "数据表", "Data table"), + FIELD(4, "校验字段", "Field"), + DATABASE(5, "数据库", "Database"), + FIELD_CONCAT(6, "字段拼接", "Field concat"), + REGEXP(7, "正则", "Regexp"), + LIST(8, "枚举值", "List"), + CONDITION(9, "基础过滤条件", "Condition"), /** * Provided for multi-table verification template */ - AND_CONCAT(10, "AND拼接","And concat"), - SOURCE_DB(11, "源数据库","Source db"), - SOURCE_TABLE(12, "源数据表","Source table"), - TARGET_DB(13, "目标数据库","Target db"), - TARGET_TABLE(14, "目标数据表","Target table"), - LEFT_STATEMENT(15, "join左表达式","Left statement"), - OPERATION(16, "join操作符","Operation"), - RIGHT_STATEMENT(17, "join右表达式","Right statement"), - SOURCE_FIELD(18, "join左字段","Source field"), - TARGET_FIELD(19, "join右字段","Target field"), - FRONT_CONDITION(20, "前置条件","Front condition"), - BEHIND_CONDITION(21, "后置条件","Behind condition"), - SOURCE_FIELDS(22, "来源字段","Source fields"), - TARGET_FIELDS(23, "目标字段","Target fields"), + AND_CONCAT(10, "AND拼接", "And concat"), + SOURCE_DB(11, "源数据库", "Source db"), + SOURCE_TABLE(12, "源数据表", "Source table"), + TARGET_DB(13, "目标数据库", "Target db"), + TARGET_TABLE(14, "目标数据表", "Target table"), + LEFT_STATEMENT(15, "join左表达式", "Left statement"), + OPERATION(16, "join操作符", "Operation"), + RIGHT_STATEMENT(17, "join右表达式", "Right statement"), + SOURCE_FIELD(18, "join左字段", "Source field"), + TARGET_FIELD(19, "join右字段", "Target field"), + FRONT_CONDITION(20, "前置条件", "Front condition"), + BEHIND_CONDITION(21, "后置条件", "Behind condition"), + SOURCE_FIELDS(22, "来源字段", "Source fields"), + TARGET_FIELDS(23, "目标字段", "Target fields"), /** * Provided for primary line repeat */ - FIELD_REPLACE_NULL_CONCAT(24, "替换空字段拼接","Field replace null concat"), - CONTRAST_TYPE(25, "比对方向","Contrast type"), + FIELD_REPLACE_NULL_CONCAT(24, "替换空字段拼接", "Field replace null concat"), + CONTRAST_TYPE(25, "比对方向", "Contrast type"), - VALUE_RANGE(28, "数值范围","Value range"), - EXPRESSION(29, "表达式","Expression"), - SOURCE_BASIC_FILTER_CONDITIONS(30, "源基础过滤条件","Source basic filter conditions"), - TARGET_BASIC_FILTER_CONDITIONS(31, "目标基础过滤条件","Target basic filter conditions"), - CONNECT_FIELDS(32, "连接字段设置","Connect fields"), - COMPARISON_FIELD_SETTINGS(33, "比对字段设置","Comparison field settings"), - COMPARISON_RESULTS_FOR_FILTER(34, "比对结果过滤条件","comparison results for filter"), + VALUE_RANGE(28, "数值范围", "Value range"), + EXPRESSION(29, "表达式", "Expression"), + SOURCE_BASIC_FILTER_CONDITIONS(30, "源基础过滤条件", "Source basic filter conditions"), + TARGET_BASIC_FILTER_CONDITIONS(31, "目标基础过滤条件", "Target basic filter conditions"), + CONNECT_FIELDS(32, "连接字段设置", "Connect fields"), + COMPARISON_FIELD_SETTINGS(33, "比对字段设置", "Comparison field settings"), + COMPARISON_RESULTS_FOR_FILTER(34, "比对结果过滤条件", "comparison results for filter"), - FILTER_BY(35, "筛选方式","Filter by"), + FILTER_BY(35, "筛选方式", "Filter by"), //最大值、最小值 表达式 - MAXIMUM(36, "最大值","Maximum"), - INTERMEDIATE_EXPRESSION(37, "中间表达式","Intermediate expression"), - MINIMUM(38, "最小值","Minimum"), - STANDARD_VALUE_EXPRESSION(39, "标准值表达式","standard value expression") - ; + MAXIMUM(36, "最大值", "Maximum"), + INTERMEDIATE_EXPRESSION(37, "中间表达式", "Intermediate expression"), + MINIMUM(38, "最小值", "Minimum"), + STANDARD_VALUE_EXPRESSION(39, "标准值表达式", "standard value expression"), + + LEFT_COLLECT_SQL(40, "左表指标计算采集SQL", "left collector SQL for metric calculation"), + RIGHT_COLLECT_SQL(41, "右表指标计算采集SQL", "right collector SQL for metric calculation"); private Integer code; private String cnMessage; @@ -123,23 +127,11 @@ public Integer getCode() { return code; } - public void setCode(Integer code) { - this.code = code; - } - public String getCnMessage() { return cnMessage; } - public void setCnMessage(String cnMessage) { - this.cnMessage = cnMessage; - } - public String getEnMessage() { return enMessage; } - - public void setEnMessage(String enMessage) { - this.enMessage = enMessage; - } } \ No newline at end of file diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/ExecutionParametersDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/ExecutionParametersDao.java index f39d1173..c4e653a8 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/ExecutionParametersDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/ExecutionParametersDao.java @@ -71,6 +71,14 @@ public interface ExecutionParametersDao { */ ExecutionParameters findByNameAndProjectId(String name, Long projectId); + /** + * find By Name List And ProjectId + * @param projectId + * @param names + * @return + */ + List findByProjectIdAndNames(Long projectId, List names); + /** * get All ExecutionParameters * @param projectId diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceDao.java index 1b001c02..4570e4e5 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceDao.java @@ -26,12 +26,19 @@ public interface LinkisDataSourceDao { LinkisDataSource getByLinkisDataSourceId(Long dataSourceId); /** - * get By Linkis Data Source Id + * get By Linkis Data Source Name * @param dataSourceName * @return */ LinkisDataSource getByLinkisDataSourceName(String dataSourceName); + /** + * get By Linkis Data Source Name List + * @param dataSourceNameList + * @return + */ + List getByLinkisDataSourceNameList(List dataSourceNameList); + /** * get By Linkis Data Source Ids * @param dataSourceIds @@ -58,7 +65,7 @@ public interface LinkisDataSourceDao { */ Page filterWithPage(String dataSourceName, Long dataSourceTypeId, List dataVisibilityDeptList, String createUser , String searchCreateUser, String searchModifyUser, String subSystemName, Long devDepartmentId, Long opsDepartmentId - , Boolean ignoreDataAuthorityCondition, List searchDataVisibilityDeptList, int page, int size); + , boolean ignoreDataAuthorityCondition, List searchDataVisibilityDeptList, int page, int size); /** * get all datasource name @@ -66,4 +73,10 @@ Page filterWithPage(String dataSourceName, Long dataSourceType */ List getAllDataSourceNameList(); + /** + * getAllDataSourceEnvsIsNotNull + * @return + */ + List getAllDataSourceEnvsIsNotNull(); + } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceEnvDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceEnvDao.java new file mode 100644 index 00000000..eec6cdb6 --- /dev/null +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/LinkisDataSourceEnvDao.java @@ -0,0 +1,42 @@ +package com.webank.wedatasphere.qualitis.rule.dao; + +import com.webank.wedatasphere.qualitis.rule.entity.LinkisDataSourceEnv; + +import java.util.List; + +/** + * @author v_minminghe@webank.com + * @date 2023-12-01 16:42 + * @description + */ +public interface LinkisDataSourceEnvDao { + + /** + * delete By Env Ids + * @param envIds + */ + void deleteByEnvIds(List envIds); + + /** + * saveAll + * @param linkisDataSourceEnvList + */ + void saveAll(List linkisDataSourceEnvList); + + /** + * query By Linkis Data Source Id + * @param linkisDataSourceId + * @return + */ + List queryByLinkisDataSourceId(Long linkisDataSourceId); + + /** + * query env by multi-conditions + * @param linkisDataSourceId + * @param envIds + * @param dcnNums + * @param logicAreas + * @return + */ + List query(Long linkisDataSourceId, List envIds, List dcnNums, List logicAreas); +} diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java index 973c7f5e..7734f1de 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDao.java @@ -60,9 +60,10 @@ public interface RuleDao { * Find all rule id, name by project * * @param project + * @param ruleName * @return */ - List> findSpecialInfoByProject(Project project); + List> findSpecialInfoByProject(Project project, String ruleName); /** * find rules by project and some conditions @@ -189,20 +190,7 @@ Page findByConditionWithPage(Project project, String ruleName, String rule * @param ruleType * @return */ - List findByRuleGroupWithPage(int page, int size, RuleGroup ruleGroup, Long templateId, String name, String cnName, List cols, Integer ruleType); - - /** - * count by rule group with page - * - * @param ruleGroup - * @param templateId - * @param name - * @param cnName - * @param cols - * @param ruleType - * @return - */ - Long countByRuleGroupWithPage(RuleGroup ruleGroup, Long templateId, String name, String cnName, List cols, Integer ruleType); + Page findByRuleGroupWithPage(int page, int size, RuleGroup ruleGroup, Long templateId, String name, String cnName, List cols, Integer ruleType); /** * find By Rule Group And File Out Name With Page @@ -240,6 +228,22 @@ Page findByConditionWithPage(Project project, String ruleName, String rule */ List findByTemplate(Template templateInDb); + /** + * Find rule by idList + * + * @param idList + * @return + */ + List getDeployStandardVersionIdList(List idList); + + /** + * Find rule by StandardVersionId + * + * @param standardVersionId + * @return + */ + List getDeployStandardVersionId(long standardVersionId); + /** * Find rule by projectId,name * @@ -247,7 +251,7 @@ Page findByConditionWithPage(Project project, String ruleName, String rule * @param name * @return */ - List getDeployExecutionParameters(Long projectId, String name); + Long countDeployExecutionParameters(Long projectId, String name); /** * Paging Rule by datasource @@ -313,11 +317,10 @@ Page findByConditionWithPage(Project project, String ruleName, String rule /** * find Exist Standard Vaule * - * @param templateId * @param projectId * @return */ - List findExistStandardVaule(Long templateId, Long projectId); + List findExistStandardVaule(Long projectId); /** * find Custom Rule Type By Project @@ -336,4 +339,12 @@ Page findByConditionWithPage(Project project, String ruleName, String rule */ List> findWorkFlowFiled(Long projectId); + /** + * find ByIds And Project + * + * @param ruleIds + * @param projectId + * @return + */ + List findByIdsAndProject(List ruleIds, Long projectId); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java index 80ee6afb..ca04b085 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDataSourceDao.java @@ -29,6 +29,13 @@ */ public interface RuleDataSourceDao { + /** + * Find RuleDataSource by linkisDataSourceId + * @param linkisDataSourceId + * @return + */ + List findByLinkisDataSourceId(Long linkisDataSourceId); + /** * Save all rule datasource. * @param ruleDataSources @@ -127,7 +134,7 @@ public interface RuleDataSourceDao { * @param envName * @return */ - long countProjectDsByUser(String user, String clusterName, String dbName, String tableName, Integer datasourceType, Long subSystemId, String departmentCode, String devDepartmentName, String tagCode, String envName); + long countProjectDsByUser(String user, String clusterName, String dbName, String tableName, Integer datasourceType, String subSystemId, String departmentCode, String devDepartmentName, String tagCode, String envName); /** * Filter rule datasource pageable. @@ -146,7 +153,7 @@ public interface RuleDataSourceDao { * @return */ List> filterProjectDsByUserPage(String user, String clusterName, String dbName, String tableName, - Integer datasourceType, Long subSystemId, String departmentName, String devDepartmentName, String tagCode, String envName, int page, int size); + Integer datasourceType, String subSystemId, String departmentName, String devDepartmentName, String tagCode, String envName, int page, int size); /** * Save rule datasource @@ -248,5 +255,5 @@ List> filterProjectDsByUserPage(String user, String clusterN * @param tagCode * @param tagName */ - void updateMetadataFields(Long id, Long subSystemId, String subSystemName, String departmentCode, String departmentName, String devDepartmentName, String tagCode, String tagName); + void updateMetadataFields(Long id, String subSystemId, String subSystemName, String departmentCode, String departmentName, String devDepartmentName, String tagCode, String tagName); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDatasourceEnvDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDatasourceEnvDao.java index 557108c1..41e36543 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDatasourceEnvDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleDatasourceEnvDao.java @@ -19,6 +19,12 @@ public interface RuleDatasourceEnvDao { */ RuleDataSourceEnv findByEnvId(Long envId); + /** + * delete env by id + * @param envId + */ + void deleteByEnvId(Long envId); + /** * Save all * @param datasourceEnvList @@ -49,4 +55,11 @@ public interface RuleDatasourceEnvDao { * @return */ List findByRuleDataSourceList(List ruleDataSourceList); + + /** + * find By DataSourceId + * @param datasourceId + * @return + */ + List findByDataSourceId(Long datasourceId); } diff --git a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java index 70f6b1d4..0d0158db 100644 --- a/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java +++ b/core/project/src/main/java/com/webank/wedatasphere/qualitis/rule/dao/RuleTemplateDao.java @@ -37,6 +37,13 @@ public interface RuleTemplateDao { */ Template findById(Long ruleTemplateId); + /** + * find by ids + * @param templateIds + * @return + */ + List