Skip to content

Commit bf7b94b

Browse files
committed
Merge branch 'dev-1.2.0_k8s' into dev-1.1.0-ctyun
2 parents 30a8a9f + cb4c032 commit bf7b94b

File tree

185 files changed

+18377
-2072
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

185 files changed

+18377
-2072
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ dss-framework/framework-plugins/dss-framework-release-server/target
4646
dss-framework/dss-framework-admin/target
4747
dss-framework/dss-framework-sql-template/target
4848
dss-framework/dss-framework-dbapi-server/target
49+
dss-framework/dss-framework-ctyun-service/target
4950

5051

5152
# dss-orchestrator

conf/dss-flow-execution-server.properties

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,5 +51,5 @@ wds.linkis.server.component.exclude.packages=org.apache.linkis.entrance.restful.
5151
spring.spring.main.allow-bean-definition-overriding=true
5252

5353
wds.linkis.entrance.config.log.path=file:///appcom/tmp/dss/
54-
wds.linkis.spark.engine.version=3.1.2
54+
wds.linkis.spark.engine.version=3.0.3
5555
wds.linkis.hive.engine.version=3.1.2

conf/dss-guide-server.properties

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,10 @@
1919
# Spring configurations
2020
spring.server.port=9210
2121
spring.spring.application.name=dss-guide-server
22+
23+
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
24+
spring.jackson.time-zone=GMT+8
25+
2226
wds.linkis.server.version=v1
2327

2428
wds.linkis.log.clear=true
@@ -38,4 +42,5 @@ wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.guide.server.d
3842
#wds.linkis.gateway.url=http://127.0.0.1:9001/
3943

4044
## guide_images_path
41-
guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
45+
guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
46+
guide.chapter.images.path=/opt/dss/dss-guide-server/guide_images/

conf/dss.properties

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,11 @@ wds.dss.esb.appid=
2828
wds.dss.esb.token=
2929

3030
wds.dss.appconn.scheduler.job.label=dev
31-
wds.dss.ds.token=c755550dacfd980bca1fc9ec30449b18
31+
wds.dss.ds.admin.token=c755550dacfd980bca1fc9ec30449b18
3232

3333

3434
wds.linkis.reflect.scan.package=org.apache.linkis,com.webank.wedatasphere.dss
3535
spring.spring.mvc.servlet.path=/api/rest_j/v1
3636
spring.spring.servlet.multipart.max-file-size=200MB
37-
spring.spring.servlet.multipart.max-request-size=200MB
37+
spring.spring.servlet.multipart.max-request-size=200MB
38+
wds.dss.project.strict.mode=true

db/dss_ddl.sql

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1327,3 +1327,44 @@ CREATE TABLE IF NOT EXISTS `dss_guide_content` (
13271327
PRIMARY KEY (`id`)
13281328
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导页面内容详情';
13291329

1330+
DROP TABLE IF EXISTS `dss_download_audit`;
1331+
CREATE TABLE `dss_download_audit` (
1332+
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
1333+
`creator` varchar(255) COMMENT '创建者',
1334+
`tenant` varchar(255) COMMENT '租户',
1335+
`path` varchar(255) COMMENT '文件路径',
1336+
`sql` varchar(3000) COMMENT '执行sql脚本',
1337+
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1338+
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1339+
PRIMARY KEY (`id`)
1340+
) ENGINE = INNODB DEFAULT CHARSET = utf8 COMMENT = '文件下载审计';
1341+
1342+
DROP TABLE IF EXISTS `dss_guide_catalog`;
1343+
CREATE TABLE IF NOT EXISTS `dss_guide_catalog` (
1344+
`id` BIGINT(13) NOT NULL AUTO_INCREMENT,
1345+
`parent_id` BIGINT(13) NOT NULL COMMENT '父级目录ID,-1代表最顶级目录',
1346+
`title` VARCHAR(50) DEFAULT NULL COMMENT '标题',
1347+
`description` VARCHAR(200) DEFAULT NULL COMMENT '描述',
1348+
`create_by` VARCHAR(255) DEFAULT NULL COMMENT '创建者',
1349+
`create_time` DATETIME DEFAULT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1350+
`update_by` VARCHAR(255) DEFAULT NULL COMMENT '更新者',
1351+
`update_time` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1352+
`is_delete` TINYINT(1) DEFAULT '0' COMMENT '0:未删除(默认), 1已删除',
1353+
PRIMARY KEY (`id`)
1354+
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导知识库目录';
1355+
1356+
DROP TABLE IF EXISTS `dss_guide_chapter`;
1357+
CREATE TABLE IF NOT EXISTS `dss_guide_chapter` (
1358+
`id` BIGINT(13) NOT NULL AUTO_INCREMENT,
1359+
`catalog_id` BIGINT(13) NOT NULL COMMENT '目录ID',
1360+
`title` VARCHAR(50) DEFAULT NULL COMMENT '标题',
1361+
`title_alias` VARCHAR(50) DEFAULT NULL COMMENT '标题简称',
1362+
`content` TEXT DEFAULT NULL COMMENT 'Markdown格式的内容',
1363+
`content_html` TEXT DEFAULT NULL COMMENT 'Markdown转换为html内容',
1364+
`create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
1365+
`create_time` datetime DEFAULT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1366+
`update_by` varchar(255) DEFAULT NULL COMMENT '更新者',
1367+
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1368+
`is_delete` tinyint(1) DEFAULT '0' COMMENT '0:未删除(默认), 1已删除',
1369+
PRIMARY KEY (`id`)
1370+
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导知识库文章';

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblDetailInfo.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@ public class HiveTblDetailInfo implements Serializable {
2121

2222
@Data
2323
public static class HiveTblBasicInfo extends HiveTblSimpleInfo {
24-
private String store;
25-
private String comment;
26-
private Set<String> labels;
27-
private Boolean isParTbl;
24+
private String store; //存储量
25+
private Boolean isParTbl; //是否分区表
26+
private String tableType; //Hive表类型 tableType: EXTERNAL_TABLE, MANAGED_TABLE
27+
private String location; //Hive表存储路径
2828
}
2929

3030
@Data

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblSimpleInfo.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@
33
import lombok.Data;
44

55
import java.util.List;
6+
import java.util.Set;
67

78
/**
9+
* @author suyc
810
* @Classname HiveTblSimpleInfo
911
* @Description TODO
1012
* @Date 2021/8/24 10:17
@@ -14,10 +16,12 @@
1416
public class HiveTblSimpleInfo {
1517
private String guid;
1618
private String name;
19+
private String dbName;
1720
private String qualifiedName;
21+
private List<String> columns;
1822
private String createTime;
1923
private String owner;
20-
private String dbName;
24+
private String comment;
25+
private Set<String> labels;
2126
private List<HiveTblDetailInfo.HiveClassificationInfo> classifications;
22-
2327
}

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetTblRestful.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,13 +69,13 @@ public Message searchHiveTbl(@RequestParam(required = false) String classificati
6969
@RequestParam(defaultValue = "") String keyword,
7070
@RequestParam(defaultValue = DEFAULT_LIMIT) int limit,
7171
@RequestParam(defaultValue = DEFAULT_OFFSET) int offset) throws Exception {
72-
List<HiveTblSimpleInfo> hiveTblBasicList = assetService.searchHiveTable(classification,'*'+query+'*',limit,offset);
73-
if(hiveTblBasicList ==null || keyword ==null || keyword.trim().equals("")) {
74-
return Message.ok().data("result",hiveTblBasicList);
72+
List<HiveTblSimpleInfo> hiveTblSimpleInfoList = assetService.searchHiveTable(classification,query.trim(),limit,offset);
73+
if(hiveTblSimpleInfoList ==null || keyword ==null || keyword.trim().equals("")) {
74+
return Message.ok().data("result",hiveTblSimpleInfoList);
7575
}
7676
else {
7777
Pattern regex = Pattern.compile(keyword);
78-
return Message.ok().data("result",hiveTblBasicList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
78+
return Message.ok().data("result",hiveTblSimpleInfoList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
7979
}
8080
}
8181

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/impl/AssetServiceImpl.java

Lines changed: 34 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import org.apache.atlas.model.instance.AtlasClassification;
1919
import org.apache.atlas.model.instance.AtlasEntity;
2020
import org.apache.atlas.model.instance.AtlasEntityHeader;
21+
import org.apache.atlas.model.instance.AtlasRelatedObjectId;
2122
import org.apache.atlas.model.lineage.AtlasLineageInfo;
2223
import org.apache.atlas.model.typedef.AtlasClassificationDef;
2324
import org.slf4j.Logger;
@@ -31,6 +32,7 @@
3132
import java.util.Map;
3233
import java.util.Objects;
3334
import java.util.Set;
35+
import java.util.regex.Pattern;
3436
import java.util.stream.Collectors;
3537

3638
/**
@@ -72,37 +74,49 @@ public List<HiveTblSimpleInfo> searchHiveTable(String classification, String que
7274
int limit, int offset) throws DataGovernanceException {
7375
List<AtlasEntityHeader> atlasEntityHeaders = null;
7476
try {
75-
atlasEntityHeaders = atlasService.searchHiveTable(classification, query, true, limit, offset);
77+
atlasEntityHeaders = atlasService.searchHiveTable(classification, "*" + query + "*", true, limit, offset);
7678
} catch (AtlasServiceException ex) {
7779
throw new DataGovernanceException(ex.getMessage());
7880
}
7981

8082
if (atlasEntityHeaders != null) {
83+
//columns 根据keyword来正则匹配过滤
84+
Pattern regex = Pattern.compile(query);
8185
return atlasEntityHeaders.parallelStream().filter(Objects::nonNull).map(atlasEntityHeader -> {
82-
HiveTblSimpleInfo hiveTblBasic = new HiveTblSimpleInfo();
83-
hiveTblBasic.setGuid(atlasEntityHeader.getGuid());
84-
hiveTblBasic.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
86+
HiveTblSimpleInfo hiveTblSimpleInfo = new HiveTblSimpleInfo();
87+
hiveTblSimpleInfo.setGuid(atlasEntityHeader.getGuid());
88+
hiveTblSimpleInfo.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
8589
String qualifiedName =stringValueOfObject(atlasEntityHeader.getAttribute("qualifiedName"));
86-
hiveTblBasic.setQualifiedName(qualifiedName);
87-
hiveTblBasic.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
90+
hiveTblSimpleInfo.setQualifiedName(qualifiedName);
91+
hiveTblSimpleInfo.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
8892
Object createTime = atlasEntityHeader.getAttribute("createTime");
8993
if (createTime != null) {
90-
hiveTblBasic.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
94+
hiveTblSimpleInfo.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
9195
}
9296
if(null != qualifiedName && qualifiedName.split("\\.").length >0){
9397
String dbName = qualifiedName.split("\\.")[0];
94-
hiveTblBasic.setDbName(dbName);
98+
hiveTblSimpleInfo.setDbName(dbName);
9599
}
100+
hiveTblSimpleInfo.setLabels(atlasEntityHeader.getLabels());
96101

97102
try {
98103
AtlasEntity atlasEntity = atlasService.getHiveTblByGuid(atlasEntityHeader.getGuid());
104+
105+
//comment
106+
hiveTblSimpleInfo.setComment(stringValueOfObject(atlasEntity.getAttribute("comment")));
107+
List<Map<String,Object>> atlasRelatedObjectIdListForColumns = (List<Map<String,Object>>)atlasEntity.getRelationshipAttribute("columns");
108+
if(null != query && !query.trim().equalsIgnoreCase("")) {
109+
hiveTblSimpleInfo.setColumns(atlasRelatedObjectIdListForColumns.stream().map(columnMap -> columnMap.getOrDefault("displayText","").toString())
110+
.filter(columnName -> regex.matcher(columnName).find()).collect(Collectors.toList()));
111+
}
112+
//classifications
99113
List<HiveTblDetailInfo.HiveClassificationInfo> classificationInfoList = getClassificationInfoList(atlasEntity);
100-
hiveTblBasic.setClassifications(classificationInfoList);
114+
hiveTblSimpleInfo.setClassifications(classificationInfoList);
101115
} catch (AtlasServiceException ex) {
102116
logger.error(ex.getMessage());
103117
}
104118

105-
return hiveTblBasic;
119+
return hiveTblSimpleInfo;
106120
}).collect(Collectors.toList());
107121
}
108122
return null;
@@ -155,8 +169,8 @@ public HiveTblDetailInfo getHiveTblDetail(String guid) throws DataGovernanceExce
155169
}
156170

157171
private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity atlasEntity) throws AtlasServiceException {
158-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
159-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
172+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
173+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
160174
int storage = 0;
161175
String db_name = String.valueOf(atlasEntity.getAttributes().get("qualifiedName")).split("@")[0];
162176
String tableName = db_name.split("\\.")[1];
@@ -169,14 +183,16 @@ private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity
169183

170184
HiveTblDetailInfo.HiveTblBasicInfo basic = new HiveTblDetailInfo.HiveTblBasicInfo();
171185
basic.setName(tableName);
172-
basic.setOwner(String.valueOf(atlasEntity.getAttributes().get("owner")));
186+
basic.setOwner(String.valueOf(atlasEntity.getAttributes().getOrDefault("owner","NULL")));
173187
basic.setCreateTime(new java.text.SimpleDateFormat("yyyy MM-dd HH:mm:ss").format(atlasEntity.getCreateTime()));
174188
basic.setStore(String.valueOf(storage));
175-
basic.setComment(String.valueOf(atlasEntity.getAttributes().get("comment")));
189+
basic.setComment(String.valueOf(atlasEntity.getAttributes().getOrDefault("comment","NULL")));
176190
Set<String> labels = atlasEntity.getLabels();
177191
basic.setLabels(labels);
178192
basic.setIsParTbl(isPartTable);
179193
basic.setGuid(guid);
194+
basic.setTableType(hiveTblAttributesMap.getOrDefault("tableType","NULL").toString());
195+
basic.setLocation(hiveTblAttributesMap.getOrDefault("location","NULL").toString());
180196

181197
return basic;
182198
}
@@ -288,8 +304,8 @@ public String getTbSelect(String guid) throws DataGovernanceException {
288304
for (AtlasEntity hiveColumnsByGuid : hiveColumnsByGuids) {
289305
fields.add((String) hiveColumnsByGuid.getAttributes().get("name"));
290306
}
291-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
292-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
307+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
308+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
293309
if (isPartTable == true) {
294310
List<String> partguids = new ArrayList<>();
295311
List<LinkedTreeMap<String, String>> partitionKeys = (List<LinkedTreeMap<String, String>>) atlasEntity.getAttributes().get("partitionKeys");
@@ -355,8 +371,8 @@ public String getTbCreate(String guid) throws DataGovernanceException {
355371
sql.append(field);
356372
}
357373
sql.append(") @$ ");
358-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
359-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
374+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
375+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
360376
if (isPartTable == true) {
361377
sql.append("PARTITIONED BY @$ ( @$ ");
362378
List<String> partguids = new ArrayList<>();

dss-data-governance/dss-data-governance-common/src/main/java/com/webank/wedatasphere/dss/data/common/atlas/AtlasService.java

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,14 @@
77
import com.google.gson.JsonElement;
88
import com.google.gson.JsonObject;
99
import com.google.gson.JsonParseException;
10+
import com.google.gson.internal.LinkedTreeMap;
1011
import com.webank.wedatasphere.dss.data.common.conf.AtlasConf;
1112
import org.apache.atlas.AtlasServiceException;
1213
import org.apache.atlas.model.discovery.AtlasSearchResult;
1314
import org.apache.atlas.model.instance.AtlasClassification;
1415
import org.apache.atlas.model.instance.AtlasEntity;
1516
import org.apache.atlas.model.instance.AtlasEntityHeader;
17+
import org.apache.atlas.model.instance.AtlasRelatedObjectId;
1618
import org.apache.atlas.model.lineage.AtlasLineageInfo;
1719
import org.apache.atlas.model.typedef.AtlasClassificationDef;
1820
import org.apache.atlas.model.typedef.AtlasTypesDef;
@@ -189,17 +191,21 @@ public String getHiveTblNameById(String guid) throws AtlasServiceException {
189191
}
190192

191193
/**
192-
* 根据guid来获取hive tbl名称 和 是否分区表
194+
* 根据guid来获取hive tbl名称 和 是否分区表、是否外部表、外部表路径
193195
*/
194-
public Map<String, Object> getHiveTblNameAndIsPartById(String guid) throws AtlasServiceException {
195-
Map<String, Object> result = new HashMap<>(2);
196+
public Map<String, Object> getHiveTblAttributesByGuid(String guid) throws AtlasServiceException {
197+
Map<String, Object> result = new HashMap<>(4);
196198

197-
String jsonStr = atlasClient.getEntityByGuidForString(guid, true, false);
199+
String jsonStr = atlasClient.getEntityByGuidForString(guid, false, false);
198200
AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = gson.fromJson(jsonStr, AtlasEntity.AtlasEntityWithExtInfo.class);
199201

200202
result.put("tblName", atlasEntityWithExtInfo.getEntity().getAttribute("qualifiedName").toString().split("@")[0]);
201203
result.put("isPartition", ((List) atlasEntityWithExtInfo.getEntity().getAttribute("partitionKeys")).size() > 0);
202-
204+
result.put("tableType",atlasEntityWithExtInfo.getEntity().getAttribute("tableType"));
205+
Map<String,Object> sdMap = (LinkedTreeMap)atlasEntityWithExtInfo.getEntity().getRelationshipAttribute("sd");
206+
if(null != sdMap) {
207+
result.put("location",atlasEntityWithExtInfo.getReferredEntities().get(sdMap.get("guid")).getAttribute("location"));
208+
}
203209
return result;
204210
}
205211

0 commit comments

Comments
 (0)