From 5d7a1dcde55af4d5ce020b9ae56414825327b92f Mon Sep 17 00:00:00 2001 From: Will Miao <13051207myq@gmail.com> Date: Mon, 18 Aug 2025 16:46:16 +0800 Subject: [PATCH] refactor: Comment out duplicate filename logging in ModelScanner for cleaner cache build process, fixes #365 --- py/services/model_scanner.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/py/services/model_scanner.py b/py/services/model_scanner.py index dd995ee5..4b6c391a 100644 --- a/py/services/model_scanner.py +++ b/py/services/model_scanner.py @@ -303,11 +303,11 @@ class ModelScanner: self._tags_count[tag] = self._tags_count.get(tag, 0) + 1 # Log duplicate filename warnings after building the index - duplicate_filenames = self._hash_index.get_duplicate_filenames() - if duplicate_filenames: - logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:") - for filename, paths in duplicate_filenames.items(): - logger.warning(f" Duplicate filename '{filename}': {paths}") + # duplicate_filenames = self._hash_index.get_duplicate_filenames() + # if duplicate_filenames: + # logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:") + # for filename, paths in duplicate_filenames.items(): + # logger.warning(f" Duplicate filename '{filename}': {paths}") # Update cache self._cache.raw_data = raw_data @@ -375,11 +375,11 @@ class ModelScanner: self._tags_count[tag] = self._tags_count.get(tag, 0) + 1 # Log duplicate filename warnings after building the index - duplicate_filenames = self._hash_index.get_duplicate_filenames() - if duplicate_filenames: - logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:") - for filename, paths in duplicate_filenames.items(): - logger.warning(f" Duplicate filename '{filename}': {paths}") + # duplicate_filenames = self._hash_index.get_duplicate_filenames() + # if duplicate_filenames: + # logger.warning(f"Found {len(duplicate_filenames)} filename(s) with duplicates during {self.model_type} cache build:") + # for filename, paths in duplicate_filenames.items(): + # logger.warning(f" Duplicate filename '{filename}': {paths}") # Update cache self._cache = ModelCache(