feat(localization): update toast messages for consistency and improved error handling across various components

This commit is contained in:
Will Miao
2025-08-31 18:38:42 +08:00
parent 7bef562541
commit f80e266d02
8 changed files with 366 additions and 120 deletions

View File

@@ -1,67 +1,22 @@
{ {
"common": { "common": {
"file": "File",
"folder": "Folder",
"name": "Name",
"size": "Size",
"date": "Date",
"type": "Type",
"path": "Path",
"fileSize": {
"zero": "0 Bytes",
"bytes": "Bytes",
"kb": "KB",
"mb": "MB",
"gb": "GB",
"tb": "TB"
},
"actions": { "actions": {
"save": "Save", "save": "Save",
"cancel": "Cancel", "cancel": "Cancel",
"delete": "Delete", "delete": "Delete",
"edit": "Edit",
"copy": "Copy",
"move": "Move", "move": "Move",
"refresh": "Refresh", "refresh": "Refresh",
"download": "Download",
"upload": "Upload",
"search": "Search",
"filter": "Filter",
"sort": "Sort",
"select": "Select",
"selectAll": "Select All",
"deselectAll": "Deselect All",
"confirm": "Confirm",
"close": "Close",
"back": "Back", "back": "Back",
"next": "Next", "next": "Next",
"previous": "Previous",
"view": "View",
"preview": "Preview",
"details": "Details",
"backToTop": "Back to top", "backToTop": "Back to top",
"settings": "Settings", "settings": "Settings",
"help": "Help", "help": "Help"
"about": "About"
}, },
"status": { "status": {
"loading": "Loading...", "loading": "Loading...",
"saving": "Saving...",
"saved": "Saved",
"error": "Error",
"success": "Success",
"warning": "Warning",
"info": "Information",
"processing": "Processing...",
"completed": "Completed",
"failed": "Failed",
"cancelled": "Cancelled",
"pending": "Pending",
"ready": "Ready",
"unknown": "Unknown" "unknown": "Unknown"
}, },
"language": { "language": {
"current": "Language",
"select": "Select Language", "select": "Select Language",
"select_help": "Choose your preferred language for the interface", "select_help": "Choose your preferred language for the interface",
"english": "English", "english": "English",
@@ -304,24 +259,10 @@
} }
}, },
"checkpoints": { "checkpoints": {
"title": "Checkpoint Models", "title": "Checkpoint Models"
"info": {
"filename": "Filename",
"modelName": "Model Name",
"baseModel": "Base Model",
"fileSize": "File Size",
"dateAdded": "Date Added"
}
}, },
"embeddings": { "embeddings": {
"title": "Embedding Models", "title": "Embedding Models"
"info": {
"filename": "Filename",
"modelName": "Model Name",
"triggerWords": "Trigger Words",
"fileSize": "File Size",
"dateAdded": "Date Added"
}
}, },
"sidebar": { "sidebar": {
"modelRoot": "Model Root", "modelRoot": "Model Root",

View File

@@ -430,7 +430,7 @@ async function handleImportFiles(files, modelHash, importContainer) {
} }
} catch (error) { } catch (error) {
console.error('Error importing examples:', error); console.error('Error importing examples:', error);
showToast('import.importFailed', { message: error.message }, 'error'); showToast('toast.import.importFailed', { message: error.message }, 'error');
} }
} }

View File

@@ -418,7 +418,7 @@ export class DownloadManager {
const config = this.apiClient.apiConfig.config; const config = this.apiClient.apiConfig.config;
if (!modelRoot) { if (!modelRoot) {
showToast('models.pleaseSelectRoot', { type: config.displayName }, 'error'); showToast('toast.models.pleaseSelectRoot', { type: config.displayName }, 'error');
return; return;
} }

View File

@@ -285,7 +285,7 @@ class ExampleImagesManager {
// Close settings modal // Close settings modal
modalManager.closeModal('settingsModal'); modalManager.closeModal('settingsModal');
} else { } else {
showToast('exampleImages.downloadStartFailed', { error: data.error }, 'error'); showToast('toast.exampleImages.downloadStartFailed', { error: data.error }, 'error');
} }
} catch (error) { } catch (error) {
console.error('Failed to start download:', error); console.error('Failed to start download:', error);
@@ -321,7 +321,7 @@ class ExampleImagesManager {
this.updateDownloadButtonText(); this.updateDownloadButtonText();
showToast('toast.exampleImages.downloadPaused', {}, 'info'); showToast('toast.exampleImages.downloadPaused', {}, 'info');
} else { } else {
showToast('exampleImages.pauseFailed', { error: data.error }, 'error'); showToast('toast.exampleImages.pauseFailed', { error: data.error }, 'error');
} }
} catch (error) { } catch (error) {
console.error('Failed to pause download:', error); console.error('Failed to pause download:', error);
@@ -357,7 +357,7 @@ class ExampleImagesManager {
this.updateDownloadButtonText(); this.updateDownloadButtonText();
showToast('toast.exampleImages.downloadResumed', {}, 'success'); showToast('toast.exampleImages.downloadResumed', {}, 'success');
} else { } else {
showToast('exampleImages.resumeFailed', { error: data.error }, 'error'); showToast('toast.exampleImages.resumeFailed', { error: data.error }, 'error');
} }
} catch (error) { } catch (error) {
console.error('Failed to resume download:', error); console.error('Failed to resume download:', error);

View File

@@ -77,7 +77,7 @@ export class DownloadManager {
if (!result.success) { if (!result.success) {
// Handle save error // Handle save error
console.error("Failed to save recipe:", result.error); console.error("Failed to save recipe:", result.error);
showToast('import.recipeSaveFailed', { error: result.error }, 'error'); showToast('toast.import.recipeSaveFailed', { error: result.error }, 'error');
// Close modal // Close modal
modalManager.closeModal('importModal'); modalManager.closeModal('importModal');
return; return;
@@ -107,7 +107,7 @@ export class DownloadManager {
} catch (error) { } catch (error) {
console.error('Error:', error); console.error('Error:', error);
showToast('import.processingError', { message: error.message }, 'error'); showToast('toast.import.processingError', { message: error.message }, 'error');
} finally { } finally {
this.importManager.loadingManager.hide(); this.importManager.loadingManager.hide();
} }

View File

@@ -136,7 +136,7 @@ export class FolderBrowser {
this.initializeFolderBrowser(); this.initializeFolderBrowser();
} catch (error) { } catch (error) {
console.error('Error in API calls:', error); console.error('Error in API calls:', error);
showToast('import.folderBrowserError', { message: error.message }, 'error'); showToast('toast.import.folderBrowserError', { message: error.message }, 'error');
} }
} }

View File

@@ -5,6 +5,7 @@
import { i18n } from '../i18n/index.js'; import { i18n } from '../i18n/index.js';
import { initializePageI18n, t, formatFileSize, formatDate, formatNumber } from '../utils/i18nHelpers.js'; import { initializePageI18n, t, formatFileSize, formatDate, formatNumber } from '../utils/i18nHelpers.js';
import { findUnusedTranslationKeys, findMissingTranslationKeys, extractLeafKeys } from '../i18n/validator.js';
// Mock DOM elements for testing // Mock DOM elements for testing
function createMockDOM() { function createMockDOM() {
@@ -96,6 +97,35 @@ function testLanguageDetection() {
console.log(`Browser language: ${navigator.language}`); console.log(`Browser language: ${navigator.language}`);
} }
// Test unused translations detection
function testUnusedTranslationsDetection() {
console.log('=== Testing Unused Translations Detection ===');
// Mock used keys
const mockUsedKeys = [
'common.actions.save',
'common.actions.cancel',
'header.appTitle'
];
// Get all translations
const allTranslations = i18n.getTranslations();
// Find unused keys (only considering leaf nodes)
const unusedKeys = findUnusedTranslationKeys(allTranslations, mockUsedKeys);
console.log(`Found ${unusedKeys.length} unused translation keys`);
console.log('First 5 unused keys:', unusedKeys.slice(0, 5));
// Find missing keys
const missingKeys = findMissingTranslationKeys(allTranslations, [
...mockUsedKeys,
'non.existent.key'
]);
console.log(`Found ${missingKeys.length} missing translation keys:`, missingKeys);
}
// Run all tests // Run all tests
function runTests() { function runTests() {
console.log('Starting i18n System Tests...'); console.log('Starting i18n System Tests...');
@@ -110,6 +140,9 @@ function runTests() {
testDOMTranslation(); testDOMTranslation();
} }
// Test unused translations detection
testUnusedTranslationsDetection();
console.log('====================================='); console.log('=====================================');
console.log('i18n System Tests Completed!'); console.log('i18n System Tests Completed!');
} }

View File

@@ -7,7 +7,10 @@ This tests both JavaScript loading and Python server-side functionality.
import os import os
import sys import sys
import json import json
import re
import asyncio import asyncio
import glob
from typing import Set, List, Dict
# Add the parent directory to the path so we can import the modules # Add the parent directory to the path so we can import the modules
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__))) sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
@@ -15,42 +18,7 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
def test_json_files_exist(): def test_json_files_exist():
"""Test that all JSON locale files exist and are valid JSON.""" """Test that all JSON locale files exist and are valid JSON."""
print("Testing JSON locale files...") print("Testing JSON locale files...")
return test_json_structure_validation()
locales_dir = os.path.join(os.path.dirname(__file__), 'locales')
if not os.path.exists(locales_dir):
print("❌ Locales directory does not exist!")
return False
expected_locales = ['en', 'zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko']
for locale in expected_locales:
file_path = os.path.join(locales_dir, f'{locale}.json')
if not os.path.exists(file_path):
print(f"{locale}.json does not exist!")
return False
try:
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
# Check that required sections exist
required_sections = ['common', 'header', 'loras', 'recipes', 'modals']
for section in required_sections:
if section not in data:
print(f"{locale}.json missing required section: {section}")
return False
print(f"{locale}.json is valid")
except json.JSONDecodeError as e:
print(f"{locale}.json has invalid JSON: {e}")
return False
except Exception as e:
print(f"❌ Error reading {locale}.json: {e}")
return False
print("✅ All JSON locale files are valid")
return True
def test_server_i18n(): def test_server_i18n():
"""Test the Python server-side i18n system.""" """Test the Python server-side i18n system."""
@@ -117,17 +85,7 @@ def test_translation_completeness():
with open(os.path.join(locales_dir, 'en.json'), 'r', encoding='utf-8') as f: with open(os.path.join(locales_dir, 'en.json'), 'r', encoding='utf-8') as f:
en_data = json.load(f) en_data = json.load(f)
def get_all_keys(data, prefix=''): en_keys = get_all_translation_keys(en_data)
"""Recursively get all keys from nested dictionary."""
keys = set()
for key, value in data.items():
full_key = f"{prefix}.{key}" if prefix else key
keys.add(full_key)
if isinstance(value, dict):
keys.update(get_all_keys(value, full_key))
return keys
en_keys = get_all_keys(en_data)
print(f"English has {len(en_keys)} translation keys") print(f"English has {len(en_keys)} translation keys")
# Check other languages # Check other languages
@@ -137,7 +95,7 @@ def test_translation_completeness():
with open(os.path.join(locales_dir, f'{locale}.json'), 'r', encoding='utf-8') as f: with open(os.path.join(locales_dir, f'{locale}.json'), 'r', encoding='utf-8') as f:
locale_data = json.load(f) locale_data = json.load(f)
locale_keys = get_all_keys(locale_data) locale_keys = get_all_translation_keys(locale_data)
missing_keys = en_keys - locale_keys missing_keys = en_keys - locale_keys
extra_keys = locale_keys - en_keys extra_keys = locale_keys - en_keys
@@ -158,13 +116,323 @@ def test_translation_completeness():
return True return True
def extract_i18n_keys_from_js(file_path: str) -> Set[str]:
"""Extract translation keys from JavaScript files."""
keys = set()
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# Remove comments to avoid false positives
# Remove single-line comments
content = re.sub(r'//.*$', '', content, flags=re.MULTILINE)
# Remove multi-line comments
content = re.sub(r'/\*.*?\*/', '', content, flags=re.DOTALL)
# Pattern for translate() function calls - more specific
# Matches: translate('key.name', ...) or translate("key.name", ...)
# Must have opening parenthesis immediately after translate
translate_pattern = r"\btranslate\s*\(\s*['\"]([a-zA-Z0-9._-]+)['\"]"
translate_matches = re.findall(translate_pattern, content)
# Filter out single words that are likely not translation keys
# Translation keys should typically have dots or be in specific namespaces
filtered_translate = [key for key in translate_matches if '.' in key or key in [
'loading', 'error', 'success', 'warning', 'info', 'cancel', 'save', 'delete'
]]
keys.update(filtered_translate)
# Pattern for showToast() function calls - more specific
# Matches: showToast('key.name', ...) or showToast("key.name", ...)
showtoast_pattern = r"\bshowToast\s*\(\s*['\"]([a-zA-Z0-9._-]+)['\"]"
showtoast_matches = re.findall(showtoast_pattern, content)
# Filter showToast matches as well
filtered_showtoast = [key for key in showtoast_matches if '.' in key or key in [
'loading', 'error', 'success', 'warning', 'info', 'cancel', 'save', 'delete'
]]
keys.update(filtered_showtoast)
# Additional patterns for other i18n function calls you might have
# Pattern for t() function calls (if used in JavaScript)
t_pattern = r"\bt\s*\(\s*['\"]([a-zA-Z0-9._-]+)['\"]"
t_matches = re.findall(t_pattern, content)
filtered_t = [key for key in t_matches if '.' in key or key in [
'loading', 'error', 'success', 'warning', 'info', 'cancel', 'save', 'delete'
]]
keys.update(filtered_t)
except Exception as e:
print(f"⚠️ Error reading {file_path}: {e}")
return keys
def extract_i18n_keys_from_html(file_path: str) -> Set[str]:
"""Extract translation keys from HTML template files."""
keys = set()
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# Remove HTML comments to avoid false positives
content = re.sub(r'<!--.*?-->', '', content, flags=re.DOTALL)
# Pattern for t() function calls in Jinja2 templates
# Matches: {{ t('key.name') }} or {% ... t('key.name') ... %}
# More specific pattern that ensures we're in template context
t_pattern = r"(?:\{\{|\{%)[^}]*\bt\s*\(\s*['\"]([a-zA-Z0-9._-]+)['\"][^}]*(?:\}\}|%\})"
t_matches = re.findall(t_pattern, content)
# Filter HTML matches
filtered_t = [key for key in t_matches if '.' in key or key in [
'loading', 'error', 'success', 'warning', 'info', 'cancel', 'save', 'delete'
]]
keys.update(filtered_t)
# Also check for translate() calls in script tags within HTML
script_pattern = r'<script[^>]*>(.*?)</script>'
script_matches = re.findall(script_pattern, content, flags=re.DOTALL)
for script_content in script_matches:
# Apply JavaScript extraction to script content
translate_pattern = r"\btranslate\s*\(\s*['\"]([a-zA-Z0-9._-]+)['\"]"
script_translate_matches = re.findall(translate_pattern, script_content)
filtered_script = [key for key in script_translate_matches if '.' in key]
keys.update(filtered_script)
except Exception as e:
print(f"⚠️ Error reading {file_path}: {e}")
return keys
def get_all_translation_keys(data: dict, prefix: str = '') -> Set[str]:
"""Recursively get all translation keys from nested dictionary."""
keys = set()
for key, value in data.items():
full_key = f"{prefix}.{key}" if prefix else key
keys.add(full_key)
if isinstance(value, dict):
keys.update(get_all_translation_keys(value, full_key))
return keys
def test_static_code_analysis():
"""Test static code analysis to detect missing translation keys."""
print("\nTesting static code analysis for translation keys...")
# Load English translations as reference
locales_dir = os.path.join(os.path.dirname(__file__), 'locales')
with open(os.path.join(locales_dir, 'en.json'), 'r', encoding='utf-8') as f:
en_data = json.load(f)
available_keys = get_all_translation_keys(en_data)
print(f"Available translation keys in en.json: {len(available_keys)}")
# Known false positives to exclude from analysis
# These are typically HTML attributes, CSS classes, or other non-translation strings
false_positives = {
'checkpoint', 'civitai_api_key', 'div', 'embedding', 'lora', 'show_only_sfw',
'model', 'type', 'name', 'value', 'id', 'class', 'style', 'src', 'href',
'data', 'width', 'height', 'size', 'format', 'version', 'url', 'path',
'file', 'folder', 'image', 'text', 'number', 'boolean', 'array', 'object'
}
# Extract keys from JavaScript files
js_dir = os.path.join(os.path.dirname(__file__), 'static', 'js')
js_files = []
if os.path.exists(js_dir):
# Recursively find all JS files
for root, dirs, files in os.walk(js_dir):
for file in files:
if file.endswith('.js'):
js_files.append(os.path.join(root, file))
js_keys = set()
js_files_with_keys = []
for js_file in js_files:
file_keys = extract_i18n_keys_from_js(js_file)
# Filter out false positives
file_keys = file_keys - false_positives
js_keys.update(file_keys)
if file_keys:
rel_path = os.path.relpath(js_file, os.path.dirname(__file__))
js_files_with_keys.append((rel_path, len(file_keys)))
print(f" Found {len(file_keys)} keys in {rel_path}")
print(f"Total unique keys found in JavaScript files: {len(js_keys)}")
# Extract keys from HTML template files
templates_dir = os.path.join(os.path.dirname(__file__), 'templates')
html_files = []
if os.path.exists(templates_dir):
html_files = glob.glob(os.path.join(templates_dir, '*.html'))
# Also check for HTML files in subdirectories
html_files.extend(glob.glob(os.path.join(templates_dir, '**', '*.html'), recursive=True))
html_keys = set()
html_files_with_keys = []
for html_file in html_files:
file_keys = extract_i18n_keys_from_html(html_file)
# Filter out false positives
file_keys = file_keys - false_positives
html_keys.update(file_keys)
if file_keys:
rel_path = os.path.relpath(html_file, os.path.dirname(__file__))
html_files_with_keys.append((rel_path, len(file_keys)))
print(f" Found {len(file_keys)} keys in {rel_path}")
print(f"Total unique keys found in HTML templates: {len(html_keys)}")
# Combine all used keys
all_used_keys = js_keys.union(html_keys)
print(f"Total unique keys used in code: {len(all_used_keys)}")
# Check for missing keys
missing_keys = all_used_keys - available_keys
unused_keys = available_keys - all_used_keys
success = True
if missing_keys:
print(f"\n❌ Found {len(missing_keys)} missing translation keys:")
for key in sorted(missing_keys):
print(f" - {key}")
success = False
# Group missing keys by category for better analysis
key_categories = {}
for key in missing_keys:
category = key.split('.')[0] if '.' in key else 'root'
if category not in key_categories:
key_categories[category] = []
key_categories[category].append(key)
print(f"\n Missing keys by category:")
for category, keys in sorted(key_categories.items()):
print(f" {category}: {len(keys)} keys")
# Provide helpful suggestion
print(f"\n💡 If these are false positives, add them to the false_positives set in test_static_code_analysis()")
else:
print("\n✅ All translation keys used in code are available in en.json")
if unused_keys:
print(f"\n⚠️ Found {len(unused_keys)} unused translation keys in en.json:")
# Only show first 20 to avoid cluttering output
for key in sorted(unused_keys)[:20]:
print(f" - {key}")
if len(unused_keys) > 20:
print(f" ... and {len(unused_keys) - 20} more")
# Group unused keys by category for better analysis
unused_categories = {}
for key in unused_keys:
category = key.split('.')[0] if '.' in key else 'root'
if category not in unused_categories:
unused_categories[category] = []
unused_categories[category].append(key)
print(f"\n Unused keys by category:")
for category, keys in sorted(unused_categories.items()):
print(f" {category}: {len(keys)} keys")
# Summary statistics
print(f"\n📊 Static Code Analysis Summary:")
print(f" JavaScript files analyzed: {len(js_files)}")
print(f" JavaScript files with translations: {len(js_files_with_keys)}")
print(f" HTML template files analyzed: {len(html_files)}")
print(f" HTML template files with translations: {len(html_files_with_keys)}")
print(f" Translation keys in en.json: {len(available_keys)}")
print(f" Translation keys used in code: {len(all_used_keys)}")
print(f" Usage coverage: {len(all_used_keys)/len(available_keys)*100:.1f}%")
return success
def test_json_structure_validation():
"""Test JSON file structure and syntax validation."""
print("\nTesting JSON file structure and syntax validation...")
locales_dir = os.path.join(os.path.dirname(__file__), 'locales')
if not os.path.exists(locales_dir):
print("❌ Locales directory does not exist!")
return False
expected_locales = ['en', 'zh-CN', 'zh-TW', 'ja', 'ru', 'de', 'fr', 'es', 'ko']
success = True
for locale in expected_locales:
file_path = os.path.join(locales_dir, f'{locale}.json')
if not os.path.exists(file_path):
print(f"{locale}.json does not exist!")
success = False
continue
try:
with open(file_path, 'r', encoding='utf-8') as f:
data = json.load(f)
# Check for valid JSON structure
if not isinstance(data, dict):
print(f"{locale}.json root must be an object/dictionary")
success = False
continue
# Check that required sections exist
required_sections = ['common', 'header', 'loras', 'recipes', 'modals']
missing_sections = []
for section in required_sections:
if section not in data:
missing_sections.append(section)
if missing_sections:
print(f"{locale}.json missing required sections: {', '.join(missing_sections)}")
success = False
# Check for empty values
empty_values = []
def check_empty_values(obj, path=''):
if isinstance(obj, dict):
for key, value in obj.items():
current_path = f"{path}.{key}" if path else key
if isinstance(value, dict):
check_empty_values(value, current_path)
elif isinstance(value, str) and not value.strip():
empty_values.append(current_path)
elif value is None:
empty_values.append(current_path)
check_empty_values(data)
if empty_values:
print(f"⚠️ {locale}.json has {len(empty_values)} empty translation values:")
for path in empty_values[:5]: # Show first 5
print(f" - {path}")
if len(empty_values) > 5:
print(f" ... and {len(empty_values) - 5} more")
print(f"{locale}.json structure is valid")
except json.JSONDecodeError as e:
print(f"{locale}.json has invalid JSON syntax: {e}")
success = False
except Exception as e:
print(f"❌ Error validating {locale}.json: {e}")
success = False
return success
def main(): def main():
"""Run all tests.""" """Run all tests."""
print("🚀 Testing updated i18n system...\n") print("🚀 Testing updated i18n system...\n")
success = True success = True
# Test JSON files # Test JSON files structure and syntax
if not test_json_files_exist(): if not test_json_files_exist():
success = False success = False
@@ -172,10 +440,14 @@ def main():
if not test_server_i18n(): if not test_server_i18n():
success = False success = False
# Test completeness # Test translation completeness
if not test_translation_completeness(): if not test_translation_completeness():
success = False success = False
# Test static code analysis
if not test_static_code_analysis():
success = False
print(f"\n{'🎉 All tests passed!' if success else '❌ Some tests failed!'}") print(f"\n{'🎉 All tests passed!' if success else '❌ Some tests failed!'}")
return success return success