18
18
import org .apache .atlas .model .instance .AtlasClassification ;
19
19
import org .apache .atlas .model .instance .AtlasEntity ;
20
20
import org .apache .atlas .model .instance .AtlasEntityHeader ;
21
+ import org .apache .atlas .model .instance .AtlasRelatedObjectId ;
21
22
import org .apache .atlas .model .lineage .AtlasLineageInfo ;
22
23
import org .apache .atlas .model .typedef .AtlasClassificationDef ;
23
24
import org .slf4j .Logger ;
31
32
import java .util .Map ;
32
33
import java .util .Objects ;
33
34
import java .util .Set ;
35
+ import java .util .regex .Pattern ;
34
36
import java .util .stream .Collectors ;
35
37
36
38
/**
@@ -72,37 +74,49 @@ public List<HiveTblSimpleInfo> searchHiveTable(String classification, String que
72
74
int limit , int offset ) throws DataGovernanceException {
73
75
List <AtlasEntityHeader > atlasEntityHeaders = null ;
74
76
try {
75
- atlasEntityHeaders = atlasService .searchHiveTable (classification , query , true , limit , offset );
77
+ atlasEntityHeaders = atlasService .searchHiveTable (classification , "*" + query + "*" , true , limit , offset );
76
78
} catch (AtlasServiceException ex ) {
77
79
throw new DataGovernanceException (ex .getMessage ());
78
80
}
79
81
80
82
if (atlasEntityHeaders != null ) {
83
+ //columns 根据keyword来正则匹配过滤
84
+ Pattern regex = Pattern .compile (query );
81
85
return atlasEntityHeaders .parallelStream ().filter (Objects ::nonNull ).map (atlasEntityHeader -> {
82
- HiveTblSimpleInfo hiveTblBasic = new HiveTblSimpleInfo ();
83
- hiveTblBasic .setGuid (atlasEntityHeader .getGuid ());
84
- hiveTblBasic .setName (stringValueOfObject (atlasEntityHeader .getAttribute ("name" )));
86
+ HiveTblSimpleInfo hiveTblSimpleInfo = new HiveTblSimpleInfo ();
87
+ hiveTblSimpleInfo .setGuid (atlasEntityHeader .getGuid ());
88
+ hiveTblSimpleInfo .setName (stringValueOfObject (atlasEntityHeader .getAttribute ("name" )));
85
89
String qualifiedName =stringValueOfObject (atlasEntityHeader .getAttribute ("qualifiedName" ));
86
- hiveTblBasic .setQualifiedName (qualifiedName );
87
- hiveTblBasic .setOwner (stringValueOfObject (atlasEntityHeader .getAttribute ("owner" )));
90
+ hiveTblSimpleInfo .setQualifiedName (qualifiedName );
91
+ hiveTblSimpleInfo .setOwner (stringValueOfObject (atlasEntityHeader .getAttribute ("owner" )));
88
92
Object createTime = atlasEntityHeader .getAttribute ("createTime" );
89
93
if (createTime != null ) {
90
- hiveTblBasic .setCreateTime (DateUtil .unixToTimeStr ((Double ) createTime ));
94
+ hiveTblSimpleInfo .setCreateTime (DateUtil .unixToTimeStr ((Double ) createTime ));
91
95
}
92
96
if (null != qualifiedName && qualifiedName .split ("\\ ." ).length >0 ){
93
97
String dbName = qualifiedName .split ("\\ ." )[0 ];
94
- hiveTblBasic .setDbName (dbName );
98
+ hiveTblSimpleInfo .setDbName (dbName );
95
99
}
100
+ hiveTblSimpleInfo .setLabels (atlasEntityHeader .getLabels ());
96
101
97
102
try {
98
103
AtlasEntity atlasEntity = atlasService .getHiveTblByGuid (atlasEntityHeader .getGuid ());
104
+
105
+ //comment
106
+ hiveTblSimpleInfo .setComment (stringValueOfObject (atlasEntity .getAttribute ("comment" )));
107
+ List <Map <String ,Object >> atlasRelatedObjectIdListForColumns = (List <Map <String ,Object >>)atlasEntity .getRelationshipAttribute ("columns" );
108
+ if (null != query && !query .trim ().equalsIgnoreCase ("" )) {
109
+ hiveTblSimpleInfo .setColumns (atlasRelatedObjectIdListForColumns .stream ().map (columnMap -> columnMap .getOrDefault ("displayText" ,"" ).toString ())
110
+ .filter (columnName -> regex .matcher (columnName ).find ()).collect (Collectors .toList ()));
111
+ }
112
+ //classifications
99
113
List <HiveTblDetailInfo .HiveClassificationInfo > classificationInfoList = getClassificationInfoList (atlasEntity );
100
- hiveTblBasic .setClassifications (classificationInfoList );
114
+ hiveTblSimpleInfo .setClassifications (classificationInfoList );
101
115
} catch (AtlasServiceException ex ) {
102
116
logger .error (ex .getMessage ());
103
117
}
104
118
105
- return hiveTblBasic ;
119
+ return hiveTblSimpleInfo ;
106
120
}).collect (Collectors .toList ());
107
121
}
108
122
return null ;
@@ -155,8 +169,8 @@ public HiveTblDetailInfo getHiveTblDetail(String guid) throws DataGovernanceExce
155
169
}
156
170
157
171
private HiveTblDetailInfo .HiveTblBasicInfo getBasicInfo (String guid , AtlasEntity atlasEntity ) throws AtlasServiceException {
158
- Map <String , Object > hiveTblNameAndIsPartById = atlasService .getHiveTblNameAndIsPartById (guid );
159
- Boolean isPartTable = (Boolean ) hiveTblNameAndIsPartById .get ("isPartition" );
172
+ Map <String , Object > hiveTblAttributesMap = atlasService .getHiveTblAttributesByGuid (guid );
173
+ Boolean isPartTable = (Boolean ) hiveTblAttributesMap .get ("isPartition" );
160
174
int storage = 0 ;
161
175
String db_name = String .valueOf (atlasEntity .getAttributes ().get ("qualifiedName" )).split ("@" )[0 ];
162
176
String tableName = db_name .split ("\\ ." )[1 ];
@@ -169,14 +183,16 @@ private HiveTblDetailInfo.HiveTblBasicInfo getBasicInfo(String guid, AtlasEntity
169
183
170
184
HiveTblDetailInfo .HiveTblBasicInfo basic = new HiveTblDetailInfo .HiveTblBasicInfo ();
171
185
basic .setName (tableName );
172
- basic .setOwner (String .valueOf (atlasEntity .getAttributes ().get ("owner" )));
186
+ basic .setOwner (String .valueOf (atlasEntity .getAttributes ().getOrDefault ("owner" , "NULL " )));
173
187
basic .setCreateTime (new java .text .SimpleDateFormat ("yyyy MM-dd HH:mm:ss" ).format (atlasEntity .getCreateTime ()));
174
188
basic .setStore (String .valueOf (storage ));
175
- basic .setComment (String .valueOf (atlasEntity .getAttributes ().get ("comment" )));
189
+ basic .setComment (String .valueOf (atlasEntity .getAttributes ().getOrDefault ("comment" , "NULL " )));
176
190
Set <String > labels = atlasEntity .getLabels ();
177
191
basic .setLabels (labels );
178
192
basic .setIsParTbl (isPartTable );
179
193
basic .setGuid (guid );
194
+ basic .setTableType (hiveTblAttributesMap .getOrDefault ("tableType" ,"NULL" ).toString ());
195
+ basic .setLocation (hiveTblAttributesMap .getOrDefault ("location" ,"NULL" ).toString ());
180
196
181
197
return basic ;
182
198
}
@@ -288,8 +304,8 @@ public String getTbSelect(String guid) throws DataGovernanceException {
288
304
for (AtlasEntity hiveColumnsByGuid : hiveColumnsByGuids ) {
289
305
fields .add ((String ) hiveColumnsByGuid .getAttributes ().get ("name" ));
290
306
}
291
- Map <String , Object > hiveTblNameAndIsPartById = atlasService .getHiveTblNameAndIsPartById (guid );
292
- Boolean isPartTable = (Boolean ) hiveTblNameAndIsPartById .get ("isPartition" );
307
+ Map <String , Object > hiveTblAttributesMap = atlasService .getHiveTblAttributesByGuid (guid );
308
+ Boolean isPartTable = (Boolean ) hiveTblAttributesMap .get ("isPartition" );
293
309
if (isPartTable == true ) {
294
310
List <String > partguids = new ArrayList <>();
295
311
List <LinkedTreeMap <String , String >> partitionKeys = (List <LinkedTreeMap <String , String >>) atlasEntity .getAttributes ().get ("partitionKeys" );
@@ -355,8 +371,8 @@ public String getTbCreate(String guid) throws DataGovernanceException {
355
371
sql .append (field );
356
372
}
357
373
sql .append (") @$ " );
358
- Map <String , Object > hiveTblNameAndIsPartById = atlasService .getHiveTblNameAndIsPartById (guid );
359
- Boolean isPartTable = (Boolean ) hiveTblNameAndIsPartById .get ("isPartition" );
374
+ Map <String , Object > hiveTblAttributesMap = atlasService .getHiveTblAttributesByGuid (guid );
375
+ Boolean isPartTable = (Boolean ) hiveTblAttributesMap .get ("isPartition" );
360
376
if (isPartTable == true ) {
361
377
sql .append ("PARTITIONED BY @$ ( @$ " );
362
378
List <String > partguids = new ArrayList <>();
0 commit comments