Skip to content

Commit 20f208b

Browse files
committed
Merge branch 'feature-sprint2' into dev-1.2.0_k8s
2 parents ca07f1d + cd99d8c commit 20f208b

File tree

125 files changed

+5748
-1635
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

125 files changed

+5748
-1635
lines changed

conf/dss-guide-server.properties

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,10 @@
1919
# Spring configurations
2020
spring.server.port=9210
2121
spring.spring.application.name=dss-guide-server
22+
23+
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
24+
spring.jackson.time-zone=GMT+8
25+
2226
wds.linkis.server.version=v1
2327

2428
wds.linkis.log.clear=true
@@ -38,4 +42,5 @@ wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.guide.server.d
3842
#wds.linkis.gateway.url=http://127.0.0.1:9001/
3943

4044
## guide_images_path
41-
guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
45+
guide.content.images.path=/opt/dss/dss-guide-server/guide_images/
46+
guide.chapter.images.path=/opt/dss/dss-guide-server/guide_images/

db/dss_ddl.sql

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1291,6 +1291,7 @@ alter table dss_orchestrator_version_info add context_id varchar(200) DEFAULT
12911291

12921292
ALTER TABLE dss_onestop_user_favorites ADD COLUMN `type` varchar(20) comment '类型,区分收藏和盯一盯';
12931293

1294+
12941295
/**
12951296
* 鲁班产品及文档 dss-guide
12961297
*/
@@ -1327,3 +1328,33 @@ CREATE TABLE IF NOT EXISTS `dss_guide_content` (
13271328
PRIMARY KEY (`id`)
13281329
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导页面内容详情';
13291330

1331+
1332+
DROP TABLE IF EXISTS `dss_guide_catalog`;
1333+
CREATE TABLE IF NOT EXISTS `dss_guide_catalog` (
1334+
`id` BIGINT(13) NOT NULL AUTO_INCREMENT,
1335+
`parent_id` BIGINT(13) NOT NULL COMMENT '父级目录ID,-1代表最顶级目录',
1336+
`title` VARCHAR(50) DEFAULT NULL COMMENT '标题',
1337+
`description` VARCHAR(200) DEFAULT NULL COMMENT '描述',
1338+
`create_by` VARCHAR(255) DEFAULT NULL COMMENT '创建者',
1339+
`create_time` DATETIME DEFAULT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1340+
`update_by` VARCHAR(255) DEFAULT NULL COMMENT '更新者',
1341+
`update_time` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1342+
`is_delete` TINYINT(1) DEFAULT '0' COMMENT '0:未删除(默认), 1已删除',
1343+
PRIMARY KEY (`id`)
1344+
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导知识库目录';
1345+
1346+
DROP TABLE IF EXISTS `dss_guide_chapter`;
1347+
CREATE TABLE IF NOT EXISTS `dss_guide_chapter` (
1348+
`id` BIGINT(13) NOT NULL AUTO_INCREMENT,
1349+
`catalog_id` BIGINT(13) NOT NULL COMMENT '目录ID',
1350+
`title` VARCHAR(50) DEFAULT NULL COMMENT '标题',
1351+
`title_alias` VARCHAR(50) DEFAULT NULL COMMENT '标题简称',
1352+
`content` TEXT DEFAULT NULL COMMENT 'Markdown格式的内容',
1353+
`content_html` TEXT DEFAULT NULL COMMENT 'Markdown转换为html内容',
1354+
`create_by` varchar(255) DEFAULT NULL COMMENT '创建者',
1355+
`create_time` datetime DEFAULT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
1356+
`update_by` varchar(255) DEFAULT NULL COMMENT '更新者',
1357+
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',
1358+
`is_delete` tinyint(1) DEFAULT '0' COMMENT '0:未删除(默认), 1已删除',
1359+
PRIMARY KEY (`id`)
1360+
) ENGINE=INNODB DEFAULT CHARSET=utf8 COMMENT='用户向导知识库文章';

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblDetailInfo.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@ public class HiveTblDetailInfo implements Serializable {
2121

2222
@Data
2323
public static class HiveTblBasicInfo extends HiveTblSimpleInfo {
24-
private String store;
25-
private String comment;
26-
private Set<String> labels;
27-
private Boolean isParTbl;
24+
private String store; //存储量
25+
private Boolean isParTbl; //是否分区表
26+
private String tableType; //Hive表类型 tableType: EXTERNAL_TABLE, MANAGED_TABLE
27+
private String location; //Hive表存储路径
2828
}
2929

3030
@Data

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/entity/HiveTblSimpleInfo.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@
33
import lombok.Data;
44

55
import java.util.List;
6+
import java.util.Set;
67

78
/**
9+
* @author suyc
810
* @Classname HiveTblSimpleInfo
911
* @Description TODO
1012
* @Date 2021/8/24 10:17
@@ -14,10 +16,12 @@
1416
public class HiveTblSimpleInfo {
1517
private String guid;
1618
private String name;
19+
private String dbName;
1720
private String qualifiedName;
21+
private List<String> columns;
1822
private String createTime;
1923
private String owner;
20-
private String dbName;
24+
private String comment;
25+
private Set<String> labels;
2126
private List<HiveTblDetailInfo.HiveClassificationInfo> classifications;
22-
2327
}

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/restful/AssetTblRestful.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,13 +69,13 @@ public Message searchHiveTbl(@RequestParam(required = false) String classificati
6969
@RequestParam(defaultValue = "") String keyword,
7070
@RequestParam(defaultValue = DEFAULT_LIMIT) int limit,
7171
@RequestParam(defaultValue = DEFAULT_OFFSET) int offset) throws Exception {
72-
List<HiveTblSimpleInfo> hiveTblBasicList = assetService.searchHiveTable(classification,'*'+query+'*',limit,offset);
73-
if(hiveTblBasicList ==null || keyword ==null || keyword.trim().equals("")) {
74-
return Message.ok().data("result",hiveTblBasicList);
72+
List<HiveTblSimpleInfo> hiveTblSimpleInfoList = assetService.searchHiveTable(classification,query.trim(),limit,offset);
73+
if(hiveTblSimpleInfoList ==null || keyword ==null || keyword.trim().equals("")) {
74+
return Message.ok().data("result",hiveTblSimpleInfoList);
7575
}
7676
else {
7777
Pattern regex = Pattern.compile(keyword);
78-
return Message.ok().data("result",hiveTblBasicList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
78+
return Message.ok().data("result",hiveTblSimpleInfoList.stream().filter(ele -> regex.matcher(ele.getOwner()).find()).collect(Collectors.toList()));
7979
}
8080
}
8181

dss-data-governance/dss-data-asset-server/src/main/java/com/webank/wedatasphere/dss/data/asset/service/impl/AssetServiceImpl.java

Lines changed: 34 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
import org.apache.atlas.model.instance.AtlasClassification;
1919
import org.apache.atlas.model.instance.AtlasEntity;
2020
import org.apache.atlas.model.instance.AtlasEntityHeader;
21+
import org.apache.atlas.model.instance.AtlasRelatedObjectId;
2122
import org.apache.atlas.model.lineage.AtlasLineageInfo;
2223
import org.apache.atlas.model.typedef.AtlasClassificationDef;
2324
import org.slf4j.Logger;
@@ -31,6 +32,7 @@
3132
import java.util.Map;
3233
import java.util.Objects;
3334
import java.util.Set;
35+
import java.util.regex.Pattern;
3436
import java.util.stream.Collectors;
3537

3638
/**
@@ -72,37 +74,49 @@ public List<HiveTblSimpleInfo> searchHiveTable(String classification, String que
7274
int limit, int offset) throws DataGovernanceException {
7375
List<AtlasEntityHeader> atlasEntityHeaders = null;
7476
try {
75-
atlasEntityHeaders = atlasService.searchHiveTable(classification, query, true, limit, offset);
77+
atlasEntityHeaders = atlasService.searchHiveTable(classification, "*" + query + "*", true, limit, offset);
7678
} catch (AtlasServiceException ex) {
7779
throw new DataGovernanceException(ex.getMessage());
7880
}
7981

8082
if (atlasEntityHeaders != null) {
83+
//columns 根据keyword来正则匹配过滤
84+
Pattern regex = Pattern.compile(query);
8185
return atlasEntityHeaders.parallelStream().filter(Objects::nonNull).map(atlasEntityHeader -> {
82-
HiveTblSimpleInfo hiveTblBasic = new HiveTblSimpleInfo();
83-
hiveTblBasic.setGuid(atlasEntityHeader.getGuid());
84-
hiveTblBasic.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
86+
HiveTblSimpleInfo hiveTblSimpleInfo = new HiveTblSimpleInfo();
87+
hiveTblSimpleInfo.setGuid(atlasEntityHeader.getGuid());
88+
hiveTblSimpleInfo.setName(stringValueOfObject(atlasEntityHeader.getAttribute("name")));
8589
String qualifiedName =stringValueOfObject(atlasEntityHeader.getAttribute("qualifiedName"));
86-
hiveTblBasic.setQualifiedName(qualifiedName);
87-
hiveTblBasic.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
90+
hiveTblSimpleInfo.setQualifiedName(qualifiedName);
91+
hiveTblSimpleInfo.setOwner(stringValueOfObject(atlasEntityHeader.getAttribute("owner")));
8892
Object createTime = atlasEntityHeader.getAttribute("createTime");
8993
if (createTime != null) {
90-
hiveTblBasic.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
94+
hiveTblSimpleInfo.setCreateTime(DateUtil.unixToTimeStr((Double) createTime));
9195
}
9296
if(null != qualifiedName && qualifiedName.split("\\.").length >0){
9397
String dbName = qualifiedName.split("\\.")[0];
94-
hiveTblBasic.setDbName(dbName);
98+
hiveTblSimpleInfo.setDbName(dbName);
9599
}
100+
hiveTblSimpleInfo.setLabels(atlasEntityHeader.getLabels());
96101

97102
try {
98103
AtlasEntity atlasEntity = atlasService.getHiveTblByGuid(atlasEntityHeader.getGuid());
104+
105+
//comment
106+
hiveTblSimpleInfo.setComment(stringValueOfObject(atlasEntity.getAttribute("comment")));
107+
List<Map<String,Object>> atlasRelatedObjectIdListForColumns = (List<Map<String,Object>>)atlasEntity.getRelationshipAttribute("columns");
108+
if(null != query && !query.trim().equalsIgnoreCase("")) {
109+
hiveTblSimpleInfo.setColumns(atlasRelatedObjectIdListForColumns.stream().map(columnMap -> columnMap.getOrDefault("displayText","").toString())
110+
.filter(columnName -> regex.matcher(columnName).find()).collect(Collectors.toList()));
111+
}
112+
//classifications
99113
List<HiveTblDetailInfo.HiveClassificationInfo> classificationInfoList = getClassificationInfoList(atlasEntity);
100-
hiveTblBasic.setClassifications(classificationInfoList);
114+
hiveTblSimpleInfo.setClassifications(classificationInfoList);
101115
} catch (AtlasServiceException ex) {
102116
logger.error(ex.getMessage());
103117
}
104118

105-
return hiveTblBasic;
119+
return hiveTblSimpleInfo;
106120
}).collect(Collectors.toList());
107121
}
108122
return null;
@@ -155,8 +169,8 @@ public HiveTblDetailInfo getHiveTblDetail(String guid) throws DataGovernanceExce
155169
}
156170

157171
private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity atlasEntity) throws AtlasServiceException {
158-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
159-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
172+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
173+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
160174
int storage = 0;
161175
String db_name = String.valueOf(atlasEntity.getAttributes().get("qualifiedName")).split("@")[0];
162176
String tableName = db_name.split("\\.")[1];
@@ -169,14 +183,16 @@ private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity
169183

170184
HiveTblDetailInfo.HiveTblBasicInfo basic = new HiveTblDetailInfo.HiveTblBasicInfo();
171185
basic.setName(tableName);
172-
basic.setOwner(String.valueOf(atlasEntity.getAttributes().get("owner")));
186+
basic.setOwner(String.valueOf(atlasEntity.getAttributes().getOrDefault("owner","NULL")));
173187
basic.setCreateTime(new java.text.SimpleDateFormat("yyyy MM-dd HH:mm:ss").format(atlasEntity.getCreateTime()));
174188
basic.setStore(String.valueOf(storage));
175-
basic.setComment(String.valueOf(atlasEntity.getAttributes().get("comment")));
189+
basic.setComment(String.valueOf(atlasEntity.getAttributes().getOrDefault("comment","NULL")));
176190
Set<String> labels = atlasEntity.getLabels();
177191
basic.setLabels(labels);
178192
basic.setIsParTbl(isPartTable);
179193
basic.setGuid(guid);
194+
basic.setTableType(hiveTblAttributesMap.getOrDefault("tableType","NULL").toString());
195+
basic.setLocation(hiveTblAttributesMap.getOrDefault("location","NULL").toString());
180196

181197
return basic;
182198
}
@@ -288,8 +304,8 @@ public String getTbSelect(String guid) throws DataGovernanceException {
288304
for (AtlasEntity hiveColumnsByGuid : hiveColumnsByGuids) {
289305
fields.add((String) hiveColumnsByGuid.getAttributes().get("name"));
290306
}
291-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
292-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
307+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
308+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
293309
if (isPartTable == true) {
294310
List<String> partguids = new ArrayList<>();
295311
List<LinkedTreeMap<String, String>> partitionKeys = (List<LinkedTreeMap<String, String>>) atlasEntity.getAttributes().get("partitionKeys");
@@ -355,8 +371,8 @@ public String getTbCreate(String guid) throws DataGovernanceException {
355371
sql.append(field);
356372
}
357373
sql.append(") @$ ");
358-
Map<String, Object> hiveTblNameAndIsPartById = atlasService.getHiveTblNameAndIsPartById(guid);
359-
Boolean isPartTable = (Boolean) hiveTblNameAndIsPartById.get("isPartition");
374+
Map<String, Object> hiveTblAttributesMap = atlasService.getHiveTblAttributesByGuid(guid);
375+
Boolean isPartTable = (Boolean) hiveTblAttributesMap.get("isPartition");
360376
if (isPartTable == true) {
361377
sql.append("PARTITIONED BY @$ ( @$ ");
362378
List<String> partguids = new ArrayList<>();

dss-data-governance/dss-data-governance-common/src/main/java/com/webank/wedatasphere/dss/data/common/atlas/AtlasService.java

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,14 @@
77
import com.google.gson.JsonElement;
88
import com.google.gson.JsonObject;
99
import com.google.gson.JsonParseException;
10+
import com.google.gson.internal.LinkedTreeMap;
1011
import com.webank.wedatasphere.dss.data.common.conf.AtlasConf;
1112
import org.apache.atlas.AtlasServiceException;
1213
import org.apache.atlas.model.discovery.AtlasSearchResult;
1314
import org.apache.atlas.model.instance.AtlasClassification;
1415
import org.apache.atlas.model.instance.AtlasEntity;
1516
import org.apache.atlas.model.instance.AtlasEntityHeader;
17+
import org.apache.atlas.model.instance.AtlasRelatedObjectId;
1618
import org.apache.atlas.model.lineage.AtlasLineageInfo;
1719
import org.apache.atlas.model.typedef.AtlasClassificationDef;
1820
import org.apache.atlas.model.typedef.AtlasTypesDef;
@@ -189,17 +191,21 @@ public String getHiveTblNameById(String guid) throws AtlasServiceException {
189191
}
190192

191193
/**
192-
* 根据guid来获取hive tbl名称 和 是否分区表
194+
* 根据guid来获取hive tbl名称 和 是否分区表、是否外部表、外部表路径
193195
*/
194-
public Map<String, Object> getHiveTblNameAndIsPartById(String guid) throws AtlasServiceException {
195-
Map<String, Object> result = new HashMap<>(2);
196+
public Map<String, Object> getHiveTblAttributesByGuid(String guid) throws AtlasServiceException {
197+
Map<String, Object> result = new HashMap<>(4);
196198

197-
String jsonStr = atlasClient.getEntityByGuidForString(guid, true, false);
199+
String jsonStr = atlasClient.getEntityByGuidForString(guid, false, false);
198200
AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = gson.fromJson(jsonStr, AtlasEntity.AtlasEntityWithExtInfo.class);
199201

200202
result.put("tblName", atlasEntityWithExtInfo.getEntity().getAttribute("qualifiedName").toString().split("@")[0]);
201203
result.put("isPartition", ((List) atlasEntityWithExtInfo.getEntity().getAttribute("partitionKeys")).size() > 0);
202-
204+
result.put("tableType",atlasEntityWithExtInfo.getEntity().getAttribute("tableType"));
205+
Map<String,Object> sdMap = (LinkedTreeMap)atlasEntityWithExtInfo.getEntity().getRelationshipAttribute("sd");
206+
if(null != sdMap) {
207+
result.put("location",atlasEntityWithExtInfo.getReferredEntities().get(sdMap.get("guid")).getAttribute("location"));
208+
}
203209
return result;
204210
}
205211

dss-guide/dss-guide-server/src/main/java/com/webank/wedatasphere/dss/guide/server/conf/GuideConf.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,5 @@
1212
public interface GuideConf {
1313
CommonVars<String> GUIDE_CONTENT_IMAGES_PATH = CommonVars.apply("guide.content.images.path", "/usr/local/anlexander/all_bak/dss_linkis/dss-linkis-1.0.2/images");
1414

15+
CommonVars<String> GUIDE_CHAPTER_IMAGES_PATH = CommonVars.apply("guide.chapter.images.path", "/usr/local/anlexander/all_bak/dss_linkis/dss-linkis-1.0.2/images");
1516
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
package com.webank.wedatasphere.dss.guide.server.dao;
2+
3+
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
4+
import com.webank.wedatasphere.dss.guide.server.entity.GuideCatalog;
5+
import org.apache.ibatis.annotations.Mapper;
6+
import org.apache.ibatis.annotations.Param;
7+
import org.apache.ibatis.annotations.Select;
8+
9+
import java.util.List;
10+
11+
/**
12+
* @author suyc
13+
* @Classname GuideCatalogMapper
14+
* @Description TODO
15+
* @Date 2022/1/13 20:30
16+
* @Created by suyc
17+
*/
18+
@Mapper
19+
public interface GuideCatalogMapper extends BaseMapper<GuideCatalog> {
20+
/**
21+
* parent_id =-1 标识该目录属于最顶层的一级目录
22+
*/
23+
@Select("SELECT * FROM dss_guide_catalog WHERE is_delete =0 AND parent_id =-1 ORDER BY id ASC")
24+
List<GuideCatalog> queryGuideCatalogListForTop();
25+
26+
@Select("SELECT * FROM dss_guide_catalog WHERE is_delete =0 AND parent_id =#{id} ORDER BY id ASC")
27+
List<GuideCatalog> queryGuideCatalogChildrenById(@Param("id") Long id);
28+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
package com.webank.wedatasphere.dss.guide.server.dao;
2+
3+
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
4+
import com.webank.wedatasphere.dss.guide.server.entity.GuideChapter;
5+
import org.apache.ibatis.annotations.Mapper;
6+
import org.apache.ibatis.annotations.Param;
7+
import org.apache.ibatis.annotations.Select;
8+
9+
import java.util.List;
10+
11+
/**
12+
* @author suyc
13+
* @Classname GuideChapterMapper
14+
* @Description TODO
15+
* @Date 2022/1/13 20:31
16+
* @Created by suyc
17+
*/
18+
@Mapper
19+
public interface GuideChapterMapper extends BaseMapper<GuideChapter> {
20+
@Select("SELECT * FROM dss_guide_chapter WHERE is_delete =0 AND catalog_id =#{catalogId} ORDER BY id ASC")
21+
List<GuideChapter> queryGuideChapterListByCatalogId(@Param("catalogId") Long catalogId);
22+
23+
@Select("SELECT * FROM dss_guide_chapter WHERE is_delete =0 AND (content LIKE CONCAT('%', #{keyword}, '%') OR title LIKE CONCAT('%', #{keyword}, '%')) ORDER BY id ASC")
24+
List<GuideChapter> searchGuideChapterListByKeyword(@Param("keyword") String keyword);
25+
}

0 commit comments

Comments
 (0)