@@ -167,15 +167,6 @@ impl Catalog {
167
167
Ok ( db)
168
168
}
169
169
170
- pub fn add_table_to_lookup ( & self , db_id : DbId , table_id : TableId , name : Arc < str > ) {
171
- self . inner
172
- . write ( )
173
- . table_map
174
- . entry ( db_id)
175
- . or_default ( )
176
- . insert ( table_id, name) ;
177
- }
178
-
179
170
pub fn db_name_to_id ( & self , db_name : Arc < str > ) -> Option < DbId > {
180
171
self . inner . read ( ) . db_map . get_by_right ( & db_name) . copied ( )
181
172
}
@@ -184,23 +175,6 @@ impl Catalog {
184
175
self . inner . read ( ) . db_map . get_by_left ( & db_id) . map ( Arc :: clone)
185
176
}
186
177
187
- pub fn table_name_to_id ( & self , db_id : DbId , table_name : Arc < str > ) -> Option < TableId > {
188
- self . inner
189
- . read ( )
190
- . table_map
191
- . get ( & db_id)
192
- . and_then ( |map| map. get_by_right ( & table_name) . copied ( ) )
193
- }
194
-
195
- pub fn table_id_to_name ( & self , db_id : DbId , table_id : TableId ) -> Option < Arc < str > > {
196
- self . inner
197
- . read ( )
198
- . table_map
199
- . get ( & db_id)
200
- . and_then ( |map| map. get_by_left ( & table_id) )
201
- . map ( Arc :: clone)
202
- }
203
-
204
178
pub fn db_schema ( & self , id : & DbId ) -> Option < Arc < DatabaseSchema > > {
205
179
self . inner . read ( ) . databases . get ( id) . cloned ( )
206
180
}
@@ -267,19 +241,6 @@ impl Catalog {
267
241
268
242
pub fn insert_database ( & mut self , db : DatabaseSchema ) {
269
243
let mut inner = self . inner . write ( ) ;
270
- for ( table_id, table_def) in db. tables . iter ( ) {
271
- inner
272
- . table_map
273
- . entry ( db. id )
274
- . and_modify ( |map : & mut BiHashMap < TableId , Arc < str > > | {
275
- map. insert ( * table_id, Arc :: clone ( & table_def. table_name ) ) ;
276
- } )
277
- . or_insert_with ( || {
278
- let mut map = BiHashMap :: new ( ) ;
279
- map. insert ( * table_id, Arc :: clone ( & table_def. table_name ) ) ;
280
- map
281
- } ) ;
282
- }
283
244
inner. db_map . insert ( db. id , Arc :: clone ( & db. name ) ) ;
284
245
inner. databases . insert ( db. id , Arc :: new ( db) ) ;
285
246
inner. sequence = inner. sequence . next ( ) ;
@@ -321,8 +282,6 @@ pub struct InnerCatalog {
321
282
updated : bool ,
322
283
#[ serde_as( as = "DbMapAsArray" ) ]
323
284
db_map : BiHashMap < DbId , Arc < str > > ,
324
- #[ serde_as( as = "TableMapAsArray" ) ]
325
- pub table_map : HashMap < DbId , BiHashMap < TableId , Arc < str > > > ,
326
285
}
327
286
328
287
serde_with:: serde_conv!(
@@ -351,45 +310,33 @@ struct DbMap {
351
310
name : Arc < str > ,
352
311
}
353
312
313
+ #[ derive( Debug , Serialize , Deserialize ) ]
314
+ struct TableMap {
315
+ table_id : TableId ,
316
+ name : Arc < str > ,
317
+ }
318
+
354
319
serde_with:: serde_conv!(
355
320
TableMapAsArray ,
356
- HashMap <DbId , BiHashMap <TableId , Arc <str >>>,
357
- |map: & HashMap <DbId , BiHashMap <TableId , Arc <str >>>| {
358
- map. iter( ) . fold( Vec :: new( ) , |mut acc, ( db_id, table_map) | {
359
- for ( table_id, name) in table_map. iter( ) {
360
- acc. push( TableMap {
361
- db_id: * db_id,
362
- table_id: * table_id,
363
- name: Arc :: clone( & name)
364
- } ) ;
365
- }
321
+ BiHashMap <TableId , Arc <str >>,
322
+ |map: & BiHashMap <TableId , Arc <str >>| {
323
+ map. iter( ) . fold( Vec :: new( ) , |mut acc, ( table_id, name) | {
324
+ acc. push( TableMap {
325
+ table_id: * table_id,
326
+ name: Arc :: clone( & name)
327
+ } ) ;
366
328
acc
367
329
} )
368
330
} ,
369
331
|vec: Vec <TableMap >| -> Result <_, std:: convert:: Infallible > {
370
- let mut map = HashMap :: new( ) ;
332
+ let mut map = BiHashMap :: new( ) ;
371
333
for item in vec {
372
- map. entry( item. db_id)
373
- . and_modify( |entry: & mut BiHashMap <TableId , Arc <str >>| {
374
- entry. insert( item. table_id, Arc :: clone( & item. name) ) ;
375
- } )
376
- . or_insert_with( ||{
377
- let mut inner_map = BiHashMap :: new( ) ;
378
- inner_map. insert( item. table_id, Arc :: clone( & item. name) ) ;
379
- inner_map
380
- } ) ;
334
+ map. insert( item. table_id, item. name) ;
381
335
}
382
336
Ok ( map)
383
337
}
384
338
) ;
385
339
386
- #[ derive( Debug , Serialize , Deserialize ) ]
387
- struct TableMap {
388
- db_id : DbId ,
389
- table_id : TableId ,
390
- name : Arc < str > ,
391
- }
392
-
393
340
serde_with:: serde_conv!(
394
341
DatabasesAsArray ,
395
342
HashMap <DbId , Arc <DatabaseSchema >>,
@@ -406,17 +353,20 @@ serde_with::serde_conv!(
406
353
|vec: Vec <DatabasesSerialized >| -> Result <_, String > {
407
354
vec. into_iter( ) . fold( Ok ( HashMap :: new( ) ) , |acc, db| {
408
355
let mut acc = acc?;
356
+ let mut table_map = BiHashMap :: new( ) ;
409
357
if let Some ( _) = acc. insert( db. id, Arc :: new( DatabaseSchema {
410
358
id: db. id,
411
359
name: Arc :: clone( & db. name) ,
412
360
tables: db. tables. into_iter( ) . fold( Ok ( BTreeMap :: new( ) ) , |acc, table| {
413
361
let mut acc = acc?;
414
362
let table_name = Arc :: clone( & table. table_name) ;
363
+ table_map. insert( table. table_id, Arc :: clone( & table_name) ) ;
415
364
if let Some ( _) = acc. insert( table. table_id, table) {
416
365
return Err ( format!( "found duplicate table: {}" , table_name) ) ;
417
366
}
418
367
Ok ( acc)
419
- } ) ?
368
+ } ) ?,
369
+ table_map
420
370
} ) ) {
421
371
return Err ( format!( "found duplicate db: {}" , db. name) ) ;
422
372
}
@@ -441,7 +391,6 @@ impl InnerCatalog {
441
391
instance_id,
442
392
updated : false ,
443
393
db_map : BiHashMap :: new ( ) ,
444
- table_map : HashMap :: new ( ) ,
445
394
}
446
395
}
447
396
@@ -471,18 +420,6 @@ impl InnerCatalog {
471
420
self . sequence = self . sequence . next ( ) ;
472
421
self . updated = true ;
473
422
self . db_map . insert ( new_db. id , Arc :: clone ( & new_db. name ) ) ;
474
- for ( table_id, table_def) in new_db. tables . iter ( ) {
475
- self . table_map
476
- . entry ( new_db. id )
477
- . and_modify ( |map| {
478
- map. insert ( * table_id, Arc :: clone ( & table_def. table_name ) ) ;
479
- } )
480
- . or_insert_with ( || {
481
- let mut map = BiHashMap :: new ( ) ;
482
- map. insert ( * table_id, Arc :: clone ( & table_def. table_name ) ) ;
483
- map
484
- } ) ;
485
- }
486
423
}
487
424
} else {
488
425
if self . databases . len ( ) >= Catalog :: NUM_DBS_LIMIT {
@@ -499,18 +436,6 @@ impl InnerCatalog {
499
436
self . sequence = self . sequence . next ( ) ;
500
437
self . updated = true ;
501
438
self . db_map . insert ( new_db. id , Arc :: clone ( & new_db. name ) ) ;
502
- for ( table_id, table_def) in new_db. tables . iter ( ) {
503
- self . table_map
504
- . entry ( new_db. id )
505
- . and_modify ( |map| {
506
- map. insert ( * table_id, Arc :: clone ( & table_def. table_name ) ) ;
507
- } )
508
- . or_insert_with ( || {
509
- let mut map = BiHashMap :: new ( ) ;
510
- map. insert ( * table_id, Arc :: clone ( & table_def. table_name ) ) ;
511
- map
512
- } ) ;
513
- }
514
439
}
515
440
516
441
Ok ( ( ) )
@@ -532,6 +457,8 @@ pub struct DatabaseSchema {
532
457
pub name : Arc < str > ,
533
458
/// The database is a map of tables
534
459
pub tables : BTreeMap < TableId , TableDefinition > ,
460
+ #[ serde_as( as = "TableMapAsArray" ) ]
461
+ pub table_map : BiHashMap < TableId , Arc < str > > ,
535
462
}
536
463
537
464
impl DatabaseSchema {
@@ -540,6 +467,7 @@ impl DatabaseSchema {
540
467
id,
541
468
name,
542
469
tables : BTreeMap :: new ( ) ,
470
+ table_map : BiHashMap :: new ( ) ,
543
471
}
544
472
}
545
473
@@ -636,10 +564,17 @@ impl DatabaseSchema {
636
564
}
637
565
}
638
566
567
+ // With the final list of updated/new tables update the current mapping
568
+ let new_table_maps = updated_or_new_tables
569
+ . iter ( )
570
+ . map ( |( table_id, table_def) | ( * table_id, Arc :: clone ( & table_def. table_name ) ) )
571
+ . collect ( ) ;
572
+
639
573
Ok ( Some ( Self {
640
574
id : self . id ,
641
575
name : Arc :: clone ( & self . name ) ,
642
576
tables : updated_or_new_tables,
577
+ table_map : new_table_maps,
643
578
} ) )
644
579
}
645
580
}
@@ -681,6 +616,14 @@ impl DatabaseSchema {
681
616
pub fn tables ( & self ) -> impl Iterator < Item = & TableDefinition > {
682
617
self . tables . values ( )
683
618
}
619
+
620
+ pub fn table_name_to_id ( & self , table_name : Arc < str > ) -> Option < TableId > {
621
+ self . table_map . get_by_right ( & table_name) . copied ( )
622
+ }
623
+
624
+ pub fn table_id_to_name ( & self , table_id : TableId ) -> Option < Arc < str > > {
625
+ self . table_map . get_by_left ( & table_id) . map ( Arc :: clone)
626
+ }
684
627
}
685
628
686
629
#[ derive( Debug , Eq , PartialEq , Clone ) ]
@@ -960,6 +903,12 @@ mod tests {
960
903
id : DbId :: from ( 0 ) ,
961
904
name : "test_db" . into ( ) ,
962
905
tables : BTreeMap :: new ( ) ,
906
+ table_map : {
907
+ let mut map = BiHashMap :: new ( ) ;
908
+ map. insert ( TableId :: from ( 1 ) , "test_table_1" . into ( ) ) ;
909
+ map. insert ( TableId :: from ( 2 ) , "test_table_2" . into ( ) ) ;
910
+ map
911
+ } ,
963
912
} ;
964
913
use InfluxColumnType :: * ;
965
914
use InfluxFieldType :: * ;
@@ -1106,6 +1055,7 @@ mod tests {
1106
1055
id : DbId :: from ( 0 ) ,
1107
1056
name : "test" . into ( ) ,
1108
1057
tables : BTreeMap :: new ( ) ,
1058
+ table_map : BiHashMap :: new ( ) ,
1109
1059
} ;
1110
1060
database. tables . insert (
1111
1061
TableId :: from ( 0 ) ,
@@ -1142,6 +1092,11 @@ mod tests {
1142
1092
id : DbId :: from ( 0 ) ,
1143
1093
name : "test_db" . into ( ) ,
1144
1094
tables : BTreeMap :: new ( ) ,
1095
+ table_map : {
1096
+ let mut map = BiHashMap :: new ( ) ;
1097
+ map. insert ( TableId :: from ( 1 ) , "test_table_1" . into ( ) ) ;
1098
+ map
1099
+ } ,
1145
1100
} ;
1146
1101
use InfluxColumnType :: * ;
1147
1102
use InfluxFieldType :: * ;
@@ -1188,6 +1143,11 @@ mod tests {
1188
1143
id : DbId :: from ( 0 ) ,
1189
1144
name : "test_db" . into ( ) ,
1190
1145
tables : BTreeMap :: new ( ) ,
1146
+ table_map : {
1147
+ let mut map = BiHashMap :: new ( ) ;
1148
+ map. insert ( TableId :: from ( 0 ) , "test" . into ( ) ) ;
1149
+ map
1150
+ } ,
1191
1151
} ;
1192
1152
use InfluxColumnType :: * ;
1193
1153
use InfluxFieldType :: * ;
0 commit comments