@@ -9,7 +9,7 @@ import axios from 'axios';
99import { configs } from '../constants' ;
1010import { getOrigin } from './origin' ;
1111import { get } from 'lodash' ;
12- import { setMetadataHash } from '../state/settings/reducer' ;
12+ import { MetadataProcessingResult , setMetadataHash , setMetadataProcessingCache , clearMetadataProcessingCache } from '../state/settings/reducer' ;
1313import { getState } from '../state/store' ;
1414import { dispatch } from '../state/dispatch' ;
1515import { getAllCards , getAllCardsLegacy , getDatabaseTablesAndModelsWithoutFields , getAllFields } from '../../../apps/src/metabase/helpers/metabaseAPIHelpers' ;
@@ -70,6 +70,9 @@ async function calculateMetadataHash(metadataType: string, metadataValue: any, v
7070// Global map to track ongoing uploads by hash
7171const ongoingUploads = new Map < string , Promise < string > > ( ) ;
7272
73+ // Global map to track ongoing metadata processing by dbId
74+ const ongoingMetadataProcessing = new Map < number , Promise < MetadataProcessingResult > > ( ) ;
75+
7376/**
7477 * Generic function to upload any metadata type to the backend
7578 * @param metadataType The type of metadata (e.g., 'cards', 'dbSchema')
@@ -151,7 +154,7 @@ async function processMetadataWithCaching(
151154 return currentHash
152155}
153156
154- export async function processAllMetadata ( ) {
157+ export async function processAllMetadata ( ) : Promise < MetadataProcessingResult > {
155158 console . log ( '[minusx] Starting coordinated metadata processing with parallel API calls...' )
156159
157160 // Step 1: Start all expensive API calls in parallel
@@ -162,82 +165,126 @@ export async function processAllMetadata() {
162165 throw new Error ( 'No database selected for metadata processing' )
163166 }
164167
165- const [ dbSchema , { cards, tables : referencedTables } , allFields ] = await Promise . all ( [
166- getDatabaseTablesAndModelsWithoutFields ( ) ,
167- getAllCards ( ) ,
168- fetchDatabaseFields ( { db_id : selectedDbId } )
169- ] )
170-
171- console . log ( '[minusx] All API calls completed. Processing data...' )
172-
173- // Step 2: Create sets for efficient lookup of existing tables
174- const existingTableNames = new Set < string > ( )
168+ // Check cache for this database ID first (synchronous)
169+ const currentState = getState ( )
170+ const cacheEntry = currentState . settings . metadataProcessingCache [ selectedDbId ]
175171
176- // Add tables from dbSchema
177- if ( dbSchema . tables ) {
178- dbSchema . tables . forEach ( ( table : any ) => {
179- const tableName = table . name
180- const schemaName = table . schema || dbSchema . default_schema
181- const fullName = schemaName ? `${ schemaName } .${ tableName } ` : tableName
182-
183- existingTableNames . add ( tableName )
184- existingTableNames . add ( fullName )
185- } )
172+ if ( cacheEntry ) {
173+ const SEVEN_DAYS_MS = 7 * 24 * 60 * 60 * 1000
174+ const isStale = Date . now ( ) - cacheEntry . timestamp > SEVEN_DAYS_MS
175+
176+ if ( ! isStale ) {
177+ console . log ( `[minusx] Using cached metadata for database ${ selectedDbId } ` )
178+ return cacheEntry . result
179+ } else {
180+ console . log ( `[minusx] Cached metadata for database ${ selectedDbId } is stale, clearing cache` )
181+ // Clear stale cache entry using proper Redux action
182+ dispatch ( clearMetadataProcessingCache ( selectedDbId ) )
183+ }
186184 }
187185
188- // Add models from dbSchema
189- if ( dbSchema . models ) {
190- dbSchema . models . forEach ( ( model : any ) => {
191- existingTableNames . add ( model . name )
192- } )
186+ // Check if processing is already in progress for this database ID
187+ if ( ongoingMetadataProcessing . has ( selectedDbId ) ) {
188+ console . log ( `[minusx] Metadata processing already in progress for database ${ selectedDbId } , returning existing promise` )
189+ return await ongoingMetadataProcessing . get ( selectedDbId ) !
193190 }
194191
195- console . log ( '[minusx] Found existing tables/models:' , existingTableNames . size )
196-
197- // Step 3: Find intersection of referenced tables that actually exist
198- const validReferencedTables = referencedTables . filter ( ( table : any ) => {
199- const tableName = table . name
200- const schemaName = table . schema
201- const fullName = schemaName ? `${ schemaName } .${ tableName } ` : tableName
202-
203- return existingTableNames . has ( tableName ) || existingTableNames . has ( fullName )
204- } )
205-
206- console . log ( '[minusx] Valid referenced tables:' , validReferencedTables . length , 'out of' , referencedTables . length )
207-
208- // Step 4: Filter fields in-memory using table names
209- const validTableNames = new Set ( validReferencedTables . map ( ( table : any ) => {
210- const schemaName = table . schema
211- return schemaName ? `${ schemaName } .${ table . name } ` : table . name
212- } ) )
213-
214- console . log ( '[minusx] Filtering fields for' , validTableNames . size , 'valid tables...' )
215-
216- const filteredFields = allFields . filter ( ( field : any ) => {
217- const tableName = get ( field , 'table_name' )
218- const tableSchema = get ( field , 'schema' )
219- const fullTableName = tableSchema ? `${ tableSchema } .${ tableName } ` : tableName
220-
221- return validTableNames . has ( tableName ) || validTableNames . has ( fullTableName )
222- } )
223-
224- console . log ( '[minusx] Fields after filtering:' , filteredFields . length , 'out of' , allFields . length )
225-
226- // Step 5: Process metadata for all three with filtered data
227- console . log ( '[minusx] Processing metadata with filtered data...' )
192+ // Create and store the processing promise
193+ const processingPromise = ( async ( ) => {
194+ try {
195+
196+ const [ dbSchema , { cards, tables : referencedTables } , allFields ] = await Promise . all ( [
197+ getDatabaseTablesAndModelsWithoutFields ( ) ,
198+ getAllCards ( ) ,
199+ fetchDatabaseFields ( { db_id : selectedDbId } )
200+ ] )
201+
202+ console . log ( '[minusx] All API calls completed. Processing data...' )
203+
204+ // Step 2: Create sets for efficient lookup of existing tables
205+ const existingTableNames = new Set < string > ( )
206+
207+ // Add tables from dbSchema
208+ if ( dbSchema . tables ) {
209+ dbSchema . tables . forEach ( ( table : any ) => {
210+ const tableName = table . name
211+ const schemaName = table . schema || dbSchema . default_schema
212+ const fullName = schemaName ? `${ schemaName } .${ tableName } ` : tableName
213+
214+ existingTableNames . add ( tableName )
215+ existingTableNames . add ( fullName )
216+ } )
217+ }
218+
219+ // Add models from dbSchema
220+ if ( dbSchema . models ) {
221+ dbSchema . models . forEach ( ( model : any ) => {
222+ existingTableNames . add ( model . name )
223+ } )
224+ }
225+
226+ console . log ( '[minusx] Found existing tables/models:' , existingTableNames . size )
227+
228+ // Step 3: Find intersection of referenced tables that actually exist
229+ const validReferencedTables = referencedTables . filter ( ( table : any ) => {
230+ const tableName = table . name
231+ const schemaName = table . schema
232+ const fullName = schemaName ? `${ schemaName } .${ tableName } ` : tableName
233+
234+ return existingTableNames . has ( tableName ) || existingTableNames . has ( fullName )
235+ } )
236+
237+ console . log ( '[minusx] Valid referenced tables:' , validReferencedTables . length , 'out of' , referencedTables . length )
238+
239+ // Step 4: Filter fields in-memory using table names
240+ const validTableNames = new Set ( validReferencedTables . map ( ( table : any ) => {
241+ const schemaName = table . schema
242+ return schemaName ? `${ schemaName } .${ table . name } ` : table . name
243+ } ) )
244+
245+ console . log ( '[minusx] Filtering fields for' , validTableNames . size , 'valid tables...' )
246+
247+ const filteredFields = allFields . filter ( ( field : any ) => {
248+ const tableName = get ( field , 'table_name' )
249+ const tableSchema = get ( field , 'schema' )
250+ const fullTableName = tableSchema ? `${ tableSchema } .${ tableName } ` : tableName
251+
252+ return validTableNames . has ( tableName ) || validTableNames . has ( fullTableName )
253+ } )
254+
255+ console . log ( '[minusx] Fields after filtering:' , filteredFields . length , 'out of' , allFields . length )
256+
257+ // Step 5: Process metadata for all three with filtered data
258+ console . log ( '[minusx] Processing metadata with filtered data...' )
259+
260+ const [ cardsHash , dbSchemaHash , fieldsHash ] = await Promise . all ( [
261+ processMetadataWithCaching ( 'cards' , async ( ) => cards ) ,
262+ processMetadataWithCaching ( 'dbSchema' , async ( ) => dbSchema ) ,
263+ processMetadataWithCaching ( 'fields' , async ( ) => filteredFields )
264+ ] )
265+
266+ console . log ( '[minusx] Coordinated metadata processing complete' )
267+
268+ const result = {
269+ cardsHash,
270+ dbSchemaHash,
271+ fieldsHash
272+ }
228273
229- const [ cardsHash , dbSchemaHash , fieldsHash ] = await Promise . all ( [
230- processMetadataWithCaching ( 'cards' , async ( ) => cards ) ,
231- processMetadataWithCaching ( 'dbSchema' , async ( ) => dbSchema ) ,
232- processMetadataWithCaching ( 'fields' , async ( ) => filteredFields )
233- ] )
274+ // Cache the result for this database ID
275+ dispatch ( setMetadataProcessingCache ( { dbId : selectedDbId , result } ) )
276+ console . log ( `[minusx] Cached metadata processing result for database ${ selectedDbId } ` )
277+
278+ return result
279+ } finally {
280+ // Clean up the ongoing processing tracking
281+ ongoingMetadataProcessing . delete ( selectedDbId )
282+ }
283+ } ) ( )
234284
235- console . log ( '[minusx] Coordinated metadata processing complete' )
285+ // Store the promise in the map
286+ ongoingMetadataProcessing . set ( selectedDbId , processingPromise )
236287
237- return {
238- cardsHash,
239- dbSchemaHash,
240- fieldsHash
241- }
288+ return await processingPromise
242289}
243290
0 commit comments