chore(logging): remove autocomplete debug logs (#895)

This commit is contained in:
Will Miao
2026-04-15 20:42:55 +08:00
parent 4514ca94b7
commit 083de395b1
3 changed files with 1 additions and 80 deletions

View File

@@ -2434,7 +2434,6 @@ class CustomWordsHandler:
even without category filtering. even without category filtering.
""" """
try: try:
started_at = time.perf_counter()
search_term = request.query.get("search", "") search_term = request.query.get("search", "")
limit = int(request.query.get("limit", "20")) limit = int(request.query.get("limit", "20"))
offset = max(0, int(request.query.get("offset", "0"))) offset = max(0, int(request.query.get("offset", "0")))
@@ -2446,16 +2445,6 @@ class CustomWordsHandler:
if category_param: if category_param:
categories = self._parse_category_param(category_param) categories = self._parse_category_param(category_param)
logger.info(
"LM custom words request search=%r category_param=%r categories=%s limit=%s offset=%s enriched=%s",
search_term,
category_param,
categories,
limit,
offset,
enriched_param,
)
results = self._service.search_words( results = self._service.search_words(
search_term, search_term,
limit, limit,
@@ -2464,14 +2453,6 @@ class CustomWordsHandler:
enriched=enriched_param, enriched=enriched_param,
) )
elapsed_ms = (time.perf_counter() - started_at) * 1000
logger.info(
"LM custom words response search=%r result_count=%s elapsed_ms=%.2f",
search_term,
len(results),
elapsed_ms,
)
return web.json_response({"success": True, "words": results}) return web.json_response({"success": True, "words": results})
except Exception as exc: except Exception as exc:
logger.error("Error searching custom words: %s", exc, exc_info=True) logger.error("Error searching custom words: %s", exc, exc_info=True)

View File

@@ -96,32 +96,11 @@ class CustomWordsService:
logger.debug("Skipping prompt-like custom words query: %s", normalized_search) logger.debug("Skipping prompt-like custom words query: %s", normalized_search)
return [] return []
logger.info(
"LM custom words service start search=%r categories=%s limit=%s offset=%s enriched=%s",
normalized_search,
categories,
limit,
offset,
enriched,
)
tag_index = self._get_tag_index() tag_index = self._get_tag_index()
if tag_index is not None: if tag_index is not None:
logger.info( return tag_index.search(
"LM custom words service tag_index ready=%s indexing=%s",
getattr(tag_index, "is_ready", lambda: "unknown")(),
getattr(tag_index, "is_indexing", lambda: "unknown")(),
)
results = tag_index.search(
normalized_search, categories=categories, limit=limit, offset=offset normalized_search, categories=categories, limit=limit, offset=offset
) )
logger.info(
"LM custom words service done search=%r result_count=%s",
normalized_search,
len(results),
)
return results
logger.debug("TagFTSIndex not available, returning empty results") logger.debug("TagFTSIndex not available, returning empty results")
return [] return []

View File

@@ -464,17 +464,6 @@ class TagFTSIndex:
List of dictionaries with tag_name, category, post_count, List of dictionaries with tag_name, category, post_count,
rank_score, and optionally matched_alias. rank_score, and optionally matched_alias.
""" """
search_started_at = time.perf_counter()
logger.info(
"LM tag FTS search start query=%r categories=%s limit=%s offset=%s ready=%s indexing=%s",
query,
categories,
limit,
offset,
self.is_ready(),
self.is_indexing(),
)
# Ensure index is ready (lazy initialization) # Ensure index is ready (lazy initialization)
if not self.ensure_ready(): if not self.ensure_ready():
if not self._warned_not_ready: if not self._warned_not_ready:
@@ -489,18 +478,10 @@ class TagFTSIndex:
if not fts_query: if not fts_query:
return [] return []
logger.info(
"LM tag FTS search built query=%r fts_query=%r",
query,
fts_query,
)
query_lower = query.lower().strip() query_lower = query.lower().strip()
try: try:
logger.info("LM tag FTS search waiting_for_lock query=%r", query)
with self._lock: with self._lock:
logger.info("LM tag FTS search acquired_lock query=%r", query)
conn = self._connect(readonly=True) conn = self._connect(readonly=True)
try: try:
sql, params = self._build_search_statement( sql, params = self._build_search_statement(
@@ -510,21 +491,8 @@ class TagFTSIndex:
limit=limit, limit=limit,
offset=offset, offset=offset,
) )
logger.info(
"LM tag FTS search executing_sql query=%r query_len=%s category_count=%s",
query,
len(query_lower),
len(categories) if categories else 0,
)
cursor = conn.execute(sql, params) cursor = conn.execute(sql, params)
logger.info("LM tag FTS search execute_returned query=%r", query)
rows = cursor.fetchall() rows = cursor.fetchall()
logger.info(
"LM tag FTS search fetchall_returned query=%r row_count=%s",
query,
len(rows),
)
results = [] results = []
for row in rows: for row in rows:
result = { result = {
@@ -548,13 +516,6 @@ class TagFTSIndex:
result["matched_alias"] = matched_alias result["matched_alias"] = matched_alias
results.append(result) results.append(result)
elapsed_ms = (time.perf_counter() - search_started_at) * 1000
logger.info(
"LM tag FTS search done query=%r result_count=%s elapsed_ms=%.2f",
query,
len(results),
elapsed_ms,
)
return results return results
finally: finally:
conn.close() conn.close()