@@ -467,7 +467,6 @@ def calc_megakey(fileset):
467
467
def db_insert (data_arr , username = None , skiplog = False ):
468
468
header = data_arr [0 ]
469
469
game_data = data_arr [1 ]
470
- resources = data_arr [2 ]
471
470
filepath = data_arr [3 ]
472
471
473
472
try :
@@ -533,9 +532,6 @@ def db_insert(data_arr, username=None, skiplog=False):
533
532
insert_game (
534
533
engine_name , engineid , title , gameid , extra , platform , lang , conn
535
534
)
536
- elif src == "dat" :
537
- if "romof" in fileset and fileset ["romof" ] in resources :
538
- fileset ["rom" ] = fileset ["rom" ] + resources [fileset ["romof" ]]["rom" ]
539
535
540
536
log_text = f"size { os .path .getsize (filepath )} , author { author } , version { version } . State { status } ."
541
537
@@ -854,6 +850,7 @@ def match_fileset(data_arr, username=None, skiplog=False):
854
850
skiplog ,
855
851
)
856
852
else :
853
+ game_data_lookup = {fs ["name" ]: fs for fs in game_data }
857
854
for fileset in game_data :
858
855
process_fileset (
859
856
fileset ,
@@ -867,6 +864,7 @@ def match_fileset(data_arr, username=None, skiplog=False):
867
864
version ,
868
865
source_status ,
869
866
user ,
867
+ game_data_lookup ,
870
868
)
871
869
finalize_fileset_insertion (
872
870
conn , transaction_id , src , filepath , author , version , source_status , user
@@ -905,9 +903,25 @@ def set_process(
905
903
set_to_candidate_dict = defaultdict (list )
906
904
id_to_fileset_dict = defaultdict (dict )
907
905
906
+ game_data_lookup = {fs ["name" ]: fs for fs in game_data }
907
+
908
908
for fileset in game_data :
909
- if "romof" in fileset and fileset ["romof" ] in resources :
910
- fileset ["rom" ] += resources [fileset ["romof" ]]["rom" ]
909
+ # Ideally romof should be enough, but adding in case of an edge case
910
+ current_name = fileset .get ("romof" ) or fileset .get ("cloneof" )
911
+
912
+ # Iteratively check for extra files if linked to multiple filesets
913
+ while current_name :
914
+ if current_name in resources :
915
+ fileset ["rom" ] += resources [current_name ]["rom" ]
916
+ break
917
+
918
+ elif current_name in game_data_lookup :
919
+ linked = game_data_lookup [current_name ]
920
+ fileset ["rom" ] += linked .get ("rom" , [])
921
+ current_name = linked .get ("romof" ) or linked .get ("cloneof" )
922
+ else :
923
+ break
924
+
911
925
key = calc_key (fileset )
912
926
megakey = ""
913
927
log_text = f"State { source_status } ."
@@ -938,7 +952,7 @@ def set_process(
938
952
fileset_description = (
939
953
fileset ["description" ] if "description" in fileset else ""
940
954
)
941
- log_text = f"Drop fileset as no matching candidates. Name: { fileset_name } , Description: { fileset_description } "
955
+ log_text = f"Drop fileset as no matching candidates. Name: { fileset_name } , Description: { fileset_description } . "
942
956
create_log (
943
957
escape_string (category_text ), user , escape_string (log_text ), conn
944
958
)
@@ -955,14 +969,31 @@ def set_process(
955
969
value_to_keys [candidates [0 ]].append (set_fileset )
956
970
for candidate , set_filesets in value_to_keys .items ():
957
971
if len (set_filesets ) > 1 :
972
+ query = """
973
+ SELECT e.engineid, g.gameid, g.platform, g.language
974
+ FROM fileset fs
975
+ JOIN game g ON fs.game = g.id
976
+ JOIN engine e ON e.id = g.engine
977
+ WHERE fs.id = %s
978
+ """
979
+ result = None
980
+ with conn .cursor () as cursor :
981
+ cursor .execute (query , (candidate ,))
982
+ result = cursor .fetchone ()
983
+
984
+ engine = result ["engineid" ]
985
+ gameid = result ["gameid" ]
986
+ platform = result ["platform" ]
987
+ language = result ["language" ]
988
+
958
989
for set_fileset in set_filesets :
959
990
fileset = id_to_fileset_dict [set_fileset ]
960
991
category_text = "Drop set fileset - B"
961
992
fileset_name = fileset ["name" ] if "name" in fileset else ""
962
993
fileset_description = (
963
994
fileset ["description" ] if "description" in fileset else ""
964
995
)
965
- log_text = f"Drop fileset, multiple filesets mapping to single detection. Name: { fileset_name } , Description: { fileset_description } "
996
+ log_text = f"Drop fileset, multiple filesets mapping to single detection. Name: { fileset_name } , Description: { fileset_description } . Clashed with Fileset: { candidate } ( { engine } : { gameid } - { platform } - { language } ) "
966
997
create_log (
967
998
escape_string (category_text ), user , escape_string (log_text ), conn
968
999
)
@@ -996,7 +1027,8 @@ def set_process(
996
1027
# Final log
997
1028
with conn .cursor () as cursor :
998
1029
cursor .execute (
999
- f"SELECT COUNT(fileset) from transactions WHERE `transaction` = { transaction_id } "
1030
+ "SELECT COUNT(fileset) from transactions WHERE `transaction` = %s" ,
1031
+ (transaction_id ,),
1000
1032
)
1001
1033
fileset_insertion_count = cursor .fetchone ()["COUNT(fileset)" ]
1002
1034
category_text = f"Uploaded from { src } "
@@ -1037,7 +1069,7 @@ def set_perform_match(
1037
1069
set_populate_file (fileset , matched_fileset_id , conn , detection )
1038
1070
auto_merged_filesets += 1
1039
1071
if not skiplog :
1040
- log_matched_fileset (
1072
+ set_log_matched_fileset (
1041
1073
src ,
1042
1074
fileset_id ,
1043
1075
matched_fileset_id ,
@@ -1087,7 +1119,7 @@ def set_perform_match(
1087
1119
set_populate_file (fileset , candidate_fileset , conn , detection )
1088
1120
auto_merged_filesets += 1
1089
1121
if not skiplog :
1090
- log_matched_fileset (
1122
+ set_log_matched_fileset (
1091
1123
src ,
1092
1124
fileset_id ,
1093
1125
candidate_fileset ,
@@ -1185,17 +1217,28 @@ def set_filter_candidate_filesets(fileset_id, fileset, transaction_id, conn):
1185
1217
FROM candidate_fileset cf
1186
1218
JOIN set_fileset sf ON cf.name = sf.name AND (cf.size = sf.size OR cf.size = -1)
1187
1219
GROUP BY cf.fileset_id
1188
- )
1189
- SELECT mdf.fileset_id
1220
+ ),
1221
+ valid_matched_detection_files AS (
1222
+ SELECT mdf.fileset_id, mdf.match_files_count AS valid_match_files_count
1190
1223
FROM matched_detection_files mdf
1191
- JOIN total_detection_files tdf ON mdf.fileset_id = tdf.fileset_id
1192
- WHERE mdf.match_files_count = tdf.detection_files_found
1193
- ORDER BY mdf.match_files_count DESC;
1224
+ JOIN total_detection_files tdf ON tdf.fileset_id = mdf.fileset_id
1225
+ WHERE tdf.detection_files_found = mdf.match_files_count
1226
+ ),
1227
+ max_match_count AS (
1228
+ SELECT MAX(valid_match_files_count) AS max_count FROM valid_matched_detection_files
1229
+ )
1230
+ SELECT vmdf.fileset_id
1231
+ FROM valid_matched_detection_files vmdf
1232
+ JOIN total_detection_files tdf ON vmdf.fileset_id = tdf.fileset_id
1233
+ JOIN max_match_count mmc ON vmdf.valid_match_files_count = mmc.max_count
1234
+ WHERE vmdf.valid_match_files_count = tdf.detection_files_found;
1194
1235
"""
1236
+
1195
1237
cursor .execute (
1196
1238
query , (fileset_id , fileset ["sourcefile" ], transaction_id , fileset_id )
1197
1239
)
1198
1240
rows = cursor .fetchall ()
1241
+
1199
1242
candidates = []
1200
1243
if rows :
1201
1244
for row in rows :
@@ -1216,11 +1259,26 @@ def process_fileset(
1216
1259
version ,
1217
1260
source_status ,
1218
1261
user ,
1262
+ game_data_lookup ,
1219
1263
):
1220
1264
if detection :
1221
1265
insert_game_data (fileset , conn )
1222
- elif src == "dat" and "romof" in fileset and fileset ["romof" ] in resources :
1223
- fileset ["rom" ] += resources [fileset ["romof" ]]["rom" ]
1266
+
1267
+ # Ideally romof should be enough, but adding in case of an edge case
1268
+ current_name = fileset .get ("romof" ) or fileset .get ("cloneof" )
1269
+
1270
+ # Iteratively check for extra files if linked to multiple filesets
1271
+ while current_name :
1272
+ if current_name in resources :
1273
+ fileset ["rom" ] += resources [current_name ]["rom" ]
1274
+ break
1275
+
1276
+ elif current_name in game_data_lookup :
1277
+ linked = game_data_lookup [current_name ]
1278
+ fileset ["rom" ] += linked .get ("rom" , [])
1279
+ current_name = linked .get ("romof" ) or linked .get ("cloneof" )
1280
+ else :
1281
+ break
1224
1282
1225
1283
key = calc_key (fileset ) if not detection else ""
1226
1284
megakey = calc_megakey (fileset ) if detection else ""
@@ -1639,6 +1697,17 @@ def log_matched_fileset(src, fileset_last, fileset_id, state, user, conn):
1639
1697
update_history (fileset_last , fileset_id , conn , log_last )
1640
1698
1641
1699
1700
+ def set_log_matched_fileset (src , fileset_last , fileset_id , state , user , conn ):
1701
+ category_text = f"Matched from { src } "
1702
+ log_text = (
1703
+ f"Matched Fileset:{ fileset_last } with Fileset:{ fileset_id } . State { state } ."
1704
+ )
1705
+ log_last = create_log (
1706
+ escape_string (category_text ), user , escape_string (log_text ), conn
1707
+ )
1708
+ update_history (fileset_last , fileset_id , conn , log_last )
1709
+
1710
+
1642
1711
def finalize_fileset_insertion (
1643
1712
conn , transaction_id , src , filepath , author , version , source_status , user
1644
1713
):
0 commit comments