Skip to content

Commit 38d6cf0

Browse files
committed
Merge branch 'dev-1.2.0_k8s' of https://10.30.90.89:10080/luban/dataspherestudio into dev-2022-sprint2
� Conflicts: � db/dss_ddl.sql
2 parents f4b1749 + 0c1ebaa commit 38d6cf0

File tree

138 files changed

+18459
-1766
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

138 files changed

+18459
-1766
lines changed

conf/dss-guide-server.properties

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,10 @@
1919
# Spring configurations
2020
spring.server.port=9210
2121
spring.spring.application.name=dss-guide-server
22+
23+
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
24+
spring.jackson.time-zone=GMT+8
25+
2226
wds.linkis.server.version=v1
2327

2428
wds.linkis.log.clear=true
@@ -38,4 +42,5 @@ wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.guide.server.d
3842
#wds.linkis.gateway.url=http://127.0.0.1:9001/
3943

4044
## guide_images_path
41-
guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
45+
guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
46+
guide.chapter.images.path=/opt/dss/dss-guide-server/guide_images/

db/dss_ddl.sql

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1338,3 +1338,33 @@ CREATE TABLE `dss_download_audit` (
13381338
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
13391339
PRIMARY KEY (`id`)
13401340
) ENGINE = INNODB DEFAULT CHARSET = utf8 COMMENT = '文件下载审计';
1341+
1342+
DROP TABLE IF EXISTS `dss_guide_catalog`;
1343+
CREATE TABLE IF NOT EXISTS `dss_guide_catalog` (
1344+
`id` BIGINT(13) NOT NULL AUTO_INCREMENT,
1345+
`parent_id` BIGINT(13) NOT NULL COMMENT '父级目录ID,-1代表最顶级目录',
1346+
`title` VARCHAR(50) DEFAULT NULL COMMENT '标题',
1347+
`description` VARCHAR(200) DEFAULT NULL COMMENT '描述',
1348+
`create_by` VARCHAR(255) DEFAULT NULL COMMENT '创建者',
1349+
`create_time` DATETIME DEFAULT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1350+
`update_by` VARCHAR(255) DEFAULT NULL COMMENT '更新者',
1351+
`update_time` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1352+
`is_delete` TINYINT(1) DEFAULT '0' COMMENT '0:未删除(默认), 1已删除',
1353+
PRIMARY KEY (`id`)
1354+
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导知识库目录';
1355+
1356+
DROP TABLE IF EXISTS `dss_guide_chapter`;
1357+
CREATE TABLE IF NOT EXISTS `dss_guide_chapter` (
1358+
`id` BIGINT(13) NOT NULL AUTO_INCREMENT,
1359+
`catalog_id` BIGINT(13) NOT NULL COMMENT '目录ID',
1360+
`title` VARCHAR(50) DEFAULT NULL COMMENT '标题',
1361+
`title_alias` VARCHAR(50) DEFAULT NULL COMMENT '标题简称',
1362+
`content` TEXT DEFAULT NULL COMMENT 'Markdown格式的内容',
1363+
`content_html` TEXT DEFAULT NULL COMMENT 'Markdown转换为html内容',
1364+
`create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
1365+
`create_time` datetime DEFAULT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1366+
`update_by` varchar(255) DEFAULT NULL COMMENT '更新者',
1367+
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1368+
`is_delete` tinyint(1) DEFAULT '0' COMMENT '0:未删除(默认), 1已删除',
1369+
PRIMARY KEY (`id`)
1370+
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导知识库文章';

db/dss_dml.sql

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ INSERT INTO dss_application (id, name, url, is_user_need_init, level, user_init_
110110
INSERT INTO dss_application (id, name, url, is_user_need_init, level, user_init_url, exists_project_service, project_url, enhance_json, if_iframe, homepage_url, redirect_url) VALUES (22, 'dataGovernance', '/dataGovernance', null, null, null, null, '/dataGovernance', null, 0, '/dataGovernance', '/dataGovernance');
111111

112112

113-
UPDATE `dss_application` SET url = 'http://GATEWAY_INSTALL_IP:GATEWAY_PORT' WHERE `name` in('linkis','workflow');
113+
114114

115115
DELETE FROM dss_project_taxonomy;
116116
INSERT INTO `dss_project_taxonomy` (`id`, `name`, `description`, `creator`, `create_time`, `update_time`) VALUES (NULL, 'My project', NULL, '-1', NULL, NULL);
@@ -470,6 +470,7 @@ INSERT INTO `dss_workflow_node_ui_validate` (`id`, `validate_type`, `validate_ra
470470

471471
DELETE FROM dss_appconn;
472472
INSERT INTO `dss_appconn` VALUES (2,'orchestrator-framework',0,1,NULL,0,NULL,'com.webank.wedatasphere.dss.appconn.orchestrator.DefaultOrchestratorFrameworkAppConn','/opt/dss/dss-appconns/orchestrator-framework','{\"resource\":{\"fileName\":\"orchestrator-framework.zip\",\"resourceId\":\"95a6b56c-bb16-41c7-bb73-6c65fd29ac78\",\"version\":\"v000073\"},\"lastModifiedTime\":1637154970000,\"size\":4212166}'),(3,'workflow',0,1,NULL,0,NULL,'com.webank.wedatasphere.dss.appconn.workflow.DefaultWorkflowAppConn','/opt/dss/dss-appconns/workflow','{\"resource\":{\"fileName\":\"workflow.zip\",\"resourceId\":\"f9ac5b6c-76cb-4e5e-97b9-a6820c903160\",\"version\":\"v000022\"},\"lastModifiedTime\":1637154970000,\"size\":26744}'),(5,'eventchecker',0,1,NULL,0,NULL,'com.webank.wedatasphere.dss.appconn.eventchecker.EventCheckerAppConn','/opt/dss/dss-appconns/eventchecker','{\"resource\":{\"fileName\":\"eventchecker.zip\",\"resourceId\":\"8719b185-15db-45d5-8b6d-2b7070b2c1b9\",\"version\":\"v000022\"},\"lastModifiedTime\":1637154970000,\"size\":10722445}'),(6,'datachecker',0,1,NULL,0,NULL,'com.webank.wedatapshere.dss.appconn.datachecker.DataCheckerAppConn','/opt/dss/dss-appconns/datachecker','{\"resource\":{\"fileName\":\"datachecker.zip\",\"resourceId\":\"6ae4329b-65ca-47b3-99cf-1e30b75a5cba\",\"version\":\"v000022\"},\"lastModifiedTime\":1637154969000,\"size\":11772154}'),(7,'dolphinscheduler',0,1,NULL,0,NULL,'com.webank.wedatasphere.dss.appconn.dolphinscheduler.DolphinSchedulerAppConn','/opt/dss/dss-appconns/dolphinscheduler','{\"resource\":{\"fileName\":\"dolphinscheduler.zip\",\"resourceId\":\"2325c2ee-e68f-4fee-9dfc-d628f2b7262d\",\"version\":\"v000008\"},\"lastModifiedTime\":1637154972000,\"size\":15251697}');
473+
update dss_appconn set resource=NULL ;
473474

474475
DELETE FROM dss_appconn_instance;
475476
INSERT INTO `dss_appconn_instance` VALUES (1,1,'PROD','http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/','','http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/','http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/'),(2,2,'DEV','http://ORCHESTRATOR_IP:ORCHESTRATOR_PORT/#/workspaceHome?workspaceId=104','','http://ORCHESTRATOR_IP:ORCHESTRATOR_PORT/#/workspaceHome?workspaceId=104','http://ORCHESTRATOR_IP:ORCHESTRATOR_PORT/#/workspaceHome?workspaceId=104'),(3,3,'DEV','http://WORKFLOW_IP:WORKFLOW_PORT/#/workspaceHome?workspaceId=104','','http://WORKFLOW_IP:WORKFLOW_PORT/#/workspaceHome?workspaceId=104','http://WORKFLOW_IP:WORKFLOW_PORT/#/workspaceHome?workspaceId=104'),(4,4,'DEV','http://VISUALIS_IP:VISUALIS_PORT/','','http://VISUALIS_NGINX_IP_2:VISUALIS_NGINX_PORT/dss/visualis/#/projects','http://VISUALIS_NGINX_IP_2:VISUALIS_NGINX_PORT/'),(5,5,'DEV','eventchecker','{\"msg.eventchecker.jdo.option.name\": \"msg\",\"msg.eventchecker.jdo.option.url\": \"EVENTCHECKER_JDBC_URL\",\"msg.eventchecker.jdo.option.username\": \"EVENTCHECKER_JDBC_USERNAME\",\"msg.eventchecker.jdo.option.password\": \"EVENTCHECKER_JDBC_PASSWORD\"}',NULL,NULL),(6,6,'DEV','datachecker','{\"job.datachecker.jdo.option.name\":\"job\",\"job.datachecker.jdo.option.url\":\"DATACHECKER_JOB_JDBC_URL\",\"job.datachecker.jdo.option.username\":\"DATACHECKER_JOB_JDBC_USERNAME\",\"job.datachecker.jdo.option.password\":\"DATACHECKER_JOB_JDBC_PASSWORD\",\"bdp.datachecker.jdo.option.name\":\"bdp\",\"bdp.datachecker.jdo.option.url\":\"DATACHECKER_BDP_JDBC_URL\",\"bdp.datachecker.jdo.option.username\":\"DATACHECKER_BDP_JDBC_USERNAME\",\"bdp.datachecker.jdo.option.password\":\"DATACHECKER_BDP_JDBC_PASSWORD\",\"bdp.datachecker.jdo.option.login.type\":\"base64\",\"bdp.mask.url\":\"http://BDP_MASK_IP:BDP_MASK_PORT/api/v1/mask-status?\",\"bdp.mask.app.id\":\"wtss\",\"bdp.mask.app.token\":\"20a0ccdfc0\"}',NULL,NULL),(7,7,'DEV','http://dolphinscheduler-api.ns-luban.svc.cluster.local:12345/dolphinscheduler',NULL,'http://dolphinscheduler-api.ns-luban.svc.cluster.local:12345/dolphinscheduler','http://dolphinscheduler-api.ns-luban.svc.cluster.local:12345/dolphinscheduler');
@@ -531,3 +532,5 @@ INSERT INTO `dss_component_role` (`workspace_id`, `component_id`, `role_id`, `pr
531532
INSERT INTO `dss_component_role` (`workspace_id`, `component_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',17,'4','0',now(),'system');
532533
INSERT INTO `dss_component_role` (`workspace_id`, `component_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',17,'5','0',now(),'system');
533534
INSERT INTO `dss_component_role` (`workspace_id`, `component_id`, `role_id`, `priv`, `update_time`, `updateby`) VALUES('-1',17,'6','0',now(),'system');
535+
536+
INSERT INTO `dss_admin_dept` (`id`, `parent_id`, `ancestors`, `dept_name`, `order_num`, `leader`, `phone`, `email`, `status`, `del_flag`, `create_by`, `create_time`, `update_by`, `update_time`) VALUES('100','0','0','天翼云科技','0','ctyunuser','1888888888','ctyun@ctyun.com','0','0','admin',now(),'admin',now());

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblDetailInfo.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@ public class HiveTblDetailInfo implements Serializable {
2121

2222
@Data
2323
public static class HiveTblBasicInfo extends HiveTblSimpleInfo {
24-
private String store;
25-
private String comment;
26-
private Set<String> labels;
27-
private Boolean isParTbl;
24+
private String store; //存储量
25+
private Boolean isParTbl; //是否分区表
26+
private String tableType; //Hive表类型 tableType: EXTERNAL_TABLE, MANAGED_TABLE
27+
private String location; //Hive表存储路径
2828
}
2929

3030
@Data

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblSimpleInfo.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@
33
import lombok.Data;
44

55
import java.util.List;
6+
import java.util.Set;
67

78
/**
9+
* @author suyc
810
* @Classname HiveTblSimpleInfo
911
* @Description TODO
1012
* @Date 2021/8/24 10:17
@@ -14,10 +16,12 @@
1416
public class HiveTblSimpleInfo {
1517
private String guid;
1618
private String name;
19+
private String dbName;
1720
private String qualifiedName;
21+
private List<String> columns;
1822
private String createTime;
1923
private String owner;
20-
private String dbName;
24+
private String comment;
25+
private Set<String> labels;
2126
private List<HiveTblDetailInfo.HiveClassificationInfo> classifications;
22-
2327
}

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetTblRestful.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,13 +69,13 @@ public Message searchHiveTbl(@RequestParam(required = false) String classificati
6969
@RequestParam(defaultValue = "") String keyword,
7070
@RequestParam(defaultValue = DEFAULT_LIMIT) int limit,
7171
@RequestParam(defaultValue = DEFAULT_OFFSET) int offset) throws Exception {
72-
List<HiveTblSimpleInfo> hiveTblBasicList = assetService.searchHiveTable(classification,'*'+query+'*',limit,offset);
73-
if(hiveTblBasicList ==null || keyword ==null || keyword.trim().equals("")) {
74-
return Message.ok().data("result",hiveTblBasicList);
72+
List<HiveTblSimpleInfo> hiveTblSimpleInfoList = assetService.searchHiveTable(classification,query.trim(),limit,offset);
73+
if(hiveTblSimpleInfoList ==null || keyword ==null || keyword.trim().equals("")) {
74+
return Message.ok().data("result",hiveTblSimpleInfoList);
7575
}
7676
else {
7777
Pattern regex = Pattern.compile(keyword);
78-
return Message.ok().data("result",hiveTblBasicList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
78+
return Message.ok().data("result",hiveTblSimpleInfoList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
7979
}
8080
}
8181

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/impl/AssetServiceImpl.java

Lines changed: 34 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import org.apache.atlas.model.instance.AtlasClassification;
1919
import org.apache.atlas.model.instance.AtlasEntity;
2020
import org.apache.atlas.model.instance.AtlasEntityHeader;
21+
import org.apache.atlas.model.instance.AtlasRelatedObjectId;
2122
import org.apache.atlas.model.lineage.AtlasLineageInfo;
2223
import org.apache.atlas.model.typedef.AtlasClassificationDef;
2324
import org.slf4j.Logger;
@@ -31,6 +32,7 @@
3132
import java.util.Map;
3233
import java.util.Objects;
3334
import java.util.Set;
35+
import java.util.regex.Pattern;
3436
import java.util.stream.Collectors;
3537

3638
/**
@@ -72,37 +74,49 @@ public List<HiveTblSimpleInfo> searchHiveTable(String classification, String que
7274
int limit, int offset) throws DataGovernanceException {
7375
List<AtlasEntityHeader> atlasEntityHeaders = null;
7476
try {
75-
atlasEntityHeaders = atlasService.searchHiveTable(classification, query, true, limit, offset);
77+
atlasEntityHeaders = atlasService.searchHiveTable(classification, "*" + query + "*", true, limit, offset);
7678
} catch (AtlasServiceException ex) {
7779
throw new DataGovernanceException(ex.getMessage());
7880
}
7981

8082
if (atlasEntityHeaders != null) {
83+
//columns 根据keyword来正则匹配过滤
84+
Pattern regex = Pattern.compile(query);
8185
return atlasEntityHeaders.parallelStream().filter(Objects::nonNull).map(atlasEntityHeader -> {
82-
HiveTblSimpleInfo hiveTblBasic = new HiveTblSimpleInfo();
83-
hiveTblBasic.setGuid(atlasEntityHeader.getGuid());
84-
hiveTblBasic.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
86+
HiveTblSimpleInfo hiveTblSimpleInfo = new HiveTblSimpleInfo();
87+
hiveTblSimpleInfo.setGuid(atlasEntityHeader.getGuid());
88+
hiveTblSimpleInfo.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
8589
String qualifiedName =stringValueOfObject(atlasEntityHeader.getAttribute("qualifiedName"));
86-
hiveTblBasic.setQualifiedName(qualifiedName);
87-
hiveTblBasic.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
90+
hiveTblSimpleInfo.setQualifiedName(qualifiedName);
91+
hiveTblSimpleInfo.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
8892
Object createTime = atlasEntityHeader.getAttribute("createTime");
8993
if (createTime != null) {
90-
hiveTblBasic.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
94+
hiveTblSimpleInfo.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
9195
}
9296
if(null != qualifiedName && qualifiedName.split("\\.").length >0){
9397
String dbName = qualifiedName.split("\\.")[0];
94-
hiveTblBasic.setDbName(dbName);
98+
hiveTblSimpleInfo.setDbName(dbName);
9599
}
100+
hiveTblSimpleInfo.setLabels(atlasEntityHeader.getLabels());
96101

97102
try {
98103
AtlasEntity atlasEntity = atlasService.getHiveTblByGuid(atlasEntityHeader.getGuid());
104+
105+
//comment
106+
hiveTblSimpleInfo.setComment(stringValueOfObject(atlasEntity.getAttribute("comment")));
107+
List<Map<String,Object>> atlasRelatedObjectIdListForColumns = (List<Map<String,Object>>)atlasEntity.getRelationshipAttribute("columns");
108+
if(null != query && !query.trim().equalsIgnoreCase("")) {
109+
hiveTblSimpleInfo.setColumns(atlasRelatedObjectIdListForColumns.stream().map(columnMap -> columnMap.getOrDefault("displayText","").toString())
110+
.filter(columnName -> regex.matcher(columnName).find()).collect(Collectors.toList()));
111+
}
112+
//classifications
99113
List<HiveTblDetailInfo.HiveClassificationInfo> classificationInfoList = getClassificationInfoList(atlasEntity);
100-
hiveTblBasic.setClassifications(classificationInfoList);
114+
hiveTblSimpleInfo.setClassifications(classificationInfoList);
101115
} catch (AtlasServiceException ex) {
102116
logger.error(ex.getMessage());
103117
}
104118

105-
return hiveTblBasic;
119+
return hiveTblSimpleInfo;
106120
}).collect(Collectors.toList());
107121
}
108122
return null;
@@ -155,8 +169,8 @@ public HiveTblDetailInfo getHiveTblDetail(String guid) throws DataGovernanceExce
155169
}
156170

157171
private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity atlasEntity) throws AtlasServiceException {
158-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
159-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
172+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
173+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
160174
int storage = 0;
161175
String db_name = String.valueOf(atlasEntity.getAttributes().get("qualifiedName")).split("@")[0];
162176
String tableName = db_name.split("\\.")[1];
@@ -169,14 +183,16 @@ private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity
169183

170184
HiveTblDetailInfo.HiveTblBasicInfo basic = new HiveTblDetailInfo.HiveTblBasicInfo();
171185
basic.setName(tableName);
172-
basic.setOwner(String.valueOf(atlasEntity.getAttributes().get("owner")));
186+
basic.setOwner(String.valueOf(atlasEntity.getAttributes().getOrDefault("owner","NULL")));
173187
basic.setCreateTime(new java.text.SimpleDateFormat("yyyy MM-dd HH:mm:ss").format(atlasEntity.getCreateTime()));
174188
basic.setStore(String.valueOf(storage));
175-
basic.setComment(String.valueOf(atlasEntity.getAttributes().get("comment")));
189+
basic.setComment(String.valueOf(atlasEntity.getAttributes().getOrDefault("comment","NULL")));
176190
Set<String> labels = atlasEntity.getLabels();
177191
basic.setLabels(labels);
178192
basic.setIsParTbl(isPartTable);
179193
basic.setGuid(guid);
194+
basic.setTableType(hiveTblAttributesMap.getOrDefault("tableType","NULL").toString());
195+
basic.setLocation(hiveTblAttributesMap.getOrDefault("location","NULL").toString());
180196

181197
return basic;
182198
}
@@ -288,8 +304,8 @@ public String getTbSelect(String guid) throws DataGovernanceException {
288304
for (AtlasEntity hiveColumnsByGuid : hiveColumnsByGuids) {
289305
fields.add((String) hiveColumnsByGuid.getAttributes().get("name"));
290306
}
291-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
292-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
307+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
308+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
293309
if (isPartTable == true) {
294310
List<String> partguids = new ArrayList<>();
295311
List<LinkedTreeMap<String, String>> partitionKeys = (List<LinkedTreeMap<String, String>>) atlasEntity.getAttributes().get("partitionKeys");
@@ -355,8 +371,8 @@ public String getTbCreate(String guid) throws DataGovernanceException {
355371
sql.append(field);
356372
}
357373
sql.append(") @$ ");
358-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
359-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
374+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
375+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
360376
if (isPartTable == true) {
361377
sql.append("PARTITIONED BY @$ ( @$ ");
362378
List<String> partguids = new ArrayList<>();

0 commit comments

Comments
 (0)