From 05c62d86e3e87b0946707c9dd13d8808b965cffd Mon Sep 17 00:00:00 2001
From: Eunhak Lee <lee@enak.kr>
Date: Mon, 9 Dec 2024 13:00:53 +0900
Subject: [PATCH] feat: enhance logging

---
 push_to_db.py | 77 ++++++++++++++++++++++++++++++++++-----------------
 1 file changed, 51 insertions(+), 26 deletions(-)

diff --git a/push_to_db.py b/push_to_db.py
index a7e68e4..7aebcf6 100644
--- a/push_to_db.py
+++ b/push_to_db.py
@@ -6,6 +6,8 @@ import re
 import sys
 import time
 import redis
+import socket
+import logging
 import psycopg2
 import traceback
 
@@ -14,6 +16,28 @@ import utils
 
 from datetime import datetime, timedelta
 
+logger = logging.getLogger()
+logger.setLevel(logging.DEBUG)
+formatter = logging.Formatter('[' + socket.gethostname() + '] - [%(asctime)s] -- %(levelname)s -- %(filename)s:%(lineno)d -- %(message)s')
+
+stdoutHandler = logging.StreamHandler()
+stdoutHandler.setFormatter(formatter)
+stdoutHandler.setLevel(logging.DEBUG)
+logger.addHandler(stdoutHandler)
+
+fileHandler = None
+try:
+    fileHandler = logging.FileHandler("/var/log/hwinfo.log", encoding="UTF-8")
+    print("FIle out to /var/log/hwinfo.log")
+except:
+    fileHandler = logging.FileHandler("./hwinfo.log", encoding="UTF-8")
+    print("FIle out to", os.getcwd())
+finally:
+    if fileHandler:
+        fileHandler.setLevel(logging.DEBUG)
+        fileHandler.setFormatter(formatter)
+        logger.addHandler(fileHandler)
+
 
 def check_similarity(_conn, name, part_type):
     _cursor = _conn.cursor()
@@ -23,9 +47,6 @@ def check_similarity(_conn, name, part_type):
     _cursor.execute("WITH comp AS (SELECT id, name, similarity(name::text, %s::text) as sim FROM parts " + where_clause + ") SELECT * FROM comp WHERE sim >=%s ORDER BY sim DESC LIMIT 5;", (name, _auto_threshold,))
 
     data = _cursor.fetchall()
-    # for row in (data or []):
-    #     print(row)
-
     return data[:1] if data else None
 
 
@@ -122,7 +143,9 @@ def match_part_obj_into_db(_part, part_type):
 
 
 def finalize_transaction(_cursor, _transaction_id, _user_id, _combination_id):
-    _cursor.execute("UPDATE transactions SET combination_id=%s WHERE id=%s and user_id=%s;", (_combination_id, _transaction_id, _user_id,))
+    sql = _cursor.mogrify("UPDATE transactions SET combination_id=%s WHERE id=%s and user_id=%s;", (_combination_id, _transaction_id, _user_id,))
+    logger.debug(sql)
+    _cursor.execute(sql)
 
 
 def func():
@@ -131,14 +154,14 @@ def func():
         return
 
     code = value.decode()
-    print("Processing code:", code)
+    logger.info("Processing code: {}".format(code))
 
     _user_id = R.get(f"mypc:code:{code}:user_id")
     _transaction_id = R.get(f"mypc:code:{code}:transaction_id")
     _xml_document = R.get(f"mypc:code:{code}:document")
-    print("(User ID (raw):", _user_id)
-    print("Transaction ID: (raw)", _transaction_id)
-    print("XML starts with (raw):", _xml_document)
+    logger.info("(User ID (raw): {}".format(_user_id))
+    logger.info("Transaction ID (raw): {}".format(_transaction_id))
+    logger.info("XML starts with (raw): {}".format(_xml_document[:50]))
 
     assert _user_id is not None, "userId must not be null"
     assert _transaction_id is not None, "transactionId must not be null"
@@ -148,15 +171,15 @@ def func():
     _transaction_id = _transaction_id.decode()
     _xml_document = _xml_document.decode()
 
-    print("User ID:", _user_id)
-    print("Transaction ID:", _transaction_id)
-    print("XML starts with:", repr(_xml_document[:50]))
+    logger.info("User ID: {}".format(_user_id))
+    logger.info("Transaction ID: {}".format(_transaction_id))
+    logger.info("XML starts with: {}".format(repr(_xml_document[:50])))
 
     _parts = parse.handle_xml_body(_xml_document)
     _part_ids = []
     _created_parts = False
 
-    print("Matching parts")
+    logger.info("Matching parts")
 
     for part_type, parts in _parts.items():  # 부품 유형별 iterate
         for part in parts:  # 부품별 iterat
@@ -164,7 +187,7 @@ def func():
                 disk_part_type = (part.pop("type", None) or "not_set").upper()
 
                 if disk_part_type not in ("SSD", "HDD",):
-                    print("[WARNING] not supported disk type", disk_part_type, file=sys.stderr)
+                    logger.warn("[WARNING] not supported disk type={}".format(disk_part_type))
                     continue
 
                 part_type = disk_part_type
@@ -173,7 +196,7 @@ def func():
 
             if not _matched_id:  # DB에 없는 부품이라면
                 _created_parts = True
-                print(".. creating new part with", part["name"])
+                logger.info(".. creating new part with {}".format(part["name"]))
                 with db_conn.cursor() as cursor:
                     _matched_id = create_new_one(part, part_type, cursor)
                     db_conn.commit()
@@ -182,44 +205,44 @@ def func():
                 _part_ids.append(_matched_id,)
                 continue
 
-            print(f"[WARNING] failed to match part type={part_type}, part:", part, file=sys.stderr)
+            logger.warn(f"[WARNING] failed to match part type={part_type}, part:{part}")
 
 
     _combination_id = None
     # 이미 존재하는 경우
     if not _created_parts:
-        print("Checking combination existance")
+        logger.info("Checking combination existance")
         with db_conn.cursor() as cursor:
             _combination_id = check_if_parts_exist(cursor, _user_id, _part_ids)
 
             if _combination_id is not None:
-                print("Combination exist. Using combination id", _combination_id)
+                logger.info("Combination exist. Using combination id={}".format(_combination_id))
 
     # 새로 추가하는 경우
     if _combination_id is not None:
-        print("Creating combination")
+        logger.info("Creating combination")
         with db_conn.cursor() as cursor:
             _combination_id = create_combination(cursor, None, _user_id)
 
         if not _combination_id:
-            print("Failed to create combination", file=sys.stderr)
+            logger.warn("Failed to create combination")
             exit(1)
 
-        print("Wiring parts", _part_ids, "to combination id", _combination_id)
+        logger.info(f"Wiring parts {repr(_part_ids)} to combination id={_combination_id}")
         with db_conn.cursor() as cursor:
             wire_part_to_combination(cursor, _combination_id, *_part_ids)
 
-        print("Committing to DB")
+        logger.info("Committing to DB")
         db_conn.commit()
 
-    print("Finalizing transaction")
+    logger.info("Finalizing transaction")
     with db_conn.cursor() as cursor:
         finalize_transaction(cursor, _transaction_id, _user_id, _combination_id)
 
-    print("Committing to DB")
+    logger.info("Committing to DB")
     db_conn.commit()
 
-    print("Removing anchor key from Redis")
+    logger.info("Removing anchor key from Redis")
     R.delete(f"mypc:code:{code}:document")
 
 
@@ -227,6 +250,7 @@ if __name__ == "__main__":
     _first_run = True
 
     global R
+    logger.debug("Opening redis connection")
     R = utils.env_connect_redis()
     if not R:
         exit(1)
@@ -237,7 +261,7 @@ if __name__ == "__main__":
         exit(1)
 
     _next_run_delta = timedelta(seconds=int(os.getenv("PARSE_DAEMON_REFRESH_INTERVAL", 5)))
-    print("Check Redis as interval", _next_run_delta)
+    logger.info("Check Redis as interval {}".format(_next_run_delta))
 
     last_run = datetime.now()
     last_run -= timedelta(microseconds=last_run.microsecond)
@@ -256,9 +280,10 @@ if __name__ == "__main__":
     except KeyboardInterrupt:
         exit(0)
     except Exception as e:
+        logger.warn(traceback.format_exc())
         raise e
     finally:
-        print("Closing connections")
+        logger.info("Closing connections")
         R.close()
         db_conn.close()
 
-- 
GitLab