@@ -67,34 +67,20 @@ def _discover_new_files(self, for_metadata: bool = False):
67
67
Returns:
68
68
list: A list of new files.
69
69
"""
70
- listed_image_files = StateTracker .get_image_files (
70
+ all_image_files = StateTracker .get_image_files (
71
71
data_backend_id = self .data_backend .id
72
72
)
73
- if listed_image_files is None :
73
+ if all_image_files is None :
74
74
logger .debug ("No image file cache available, retrieving fresh" )
75
- listed_image_files = self .data_backend .list_files (
75
+ all_image_files = self .data_backend .list_files (
76
76
instance_data_root = self .instance_data_root ,
77
77
str_pattern = "*.[jJpP][pPnN][gG]" ,
78
78
)
79
- # flatten the os.path.walk results into a dictionary
80
- all_image_files = []
81
- for sublist in listed_image_files :
82
- logger .debug (f"Listed image files sublist: { sublist } " )
83
- root , dirs , files = sublist
84
- for file in files :
85
- all_image_files .append (os .path .join (root , file ))
86
-
87
- StateTracker .set_image_files (
79
+ all_image_files = StateTracker .set_image_files (
88
80
all_image_files , data_backend_id = self .data_backend .id
89
81
)
90
82
else :
91
83
logger .debug ("Using cached image file list" )
92
- all_image_files = listed_image_files
93
- del listed_image_files
94
-
95
- logger .debug (
96
- f"Before flattening, all image files: { json .dumps (all_image_files , indent = 4 )} "
97
- )
98
84
99
85
# Flatten the list if it contains nested lists
100
86
if any (isinstance (i , list ) for i in all_image_files ):
0 commit comments