Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • meanspec/meanspec-hwinfo-daemon
1 result
Select Git revision
Show changes
Commits on Source (3)
......@@ -6,6 +6,8 @@ import re
import sys
import time
import redis
import socket
import logging
import psycopg2
import traceback
......@@ -14,18 +16,39 @@ import utils
from datetime import datetime, timedelta
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('[' + socket.gethostname() + '] - [%(asctime)s] -- %(levelname)s -- %(filename)s:%(lineno)d -- %(message)s')
stdoutHandler = logging.StreamHandler()
stdoutHandler.setFormatter(formatter)
stdoutHandler.setLevel(logging.DEBUG)
logger.addHandler(stdoutHandler)
fileHandler = None
try:
fileHandler = logging.FileHandler("/var/log/hwinfo.log", encoding="UTF-8")
print("FIle out to /var/log/hwinfo.log")
except:
fileHandler = logging.FileHandler("./hwinfo.log", encoding="UTF-8")
print("FIle out to", os.getcwd())
finally:
if fileHandler:
fileHandler.setLevel(logging.DEBUG)
fileHandler.setFormatter(formatter)
logger.addHandler(fileHandler)
def check_similarity(_conn, name, part_type):
_cursor = _conn.cursor()
where_clause = "WHERE type = '{}'".format(part_type.upper()) if part_type else ""
_auto_threshold = 1.0 if part_type in ('RAM', 'SSD', 'HDD',) else 0.5
_cursor.execute("WITH comp AS (SELECT id, name, similarity(name::text, %s::text) as sim FROM parts " + where_clause + ") SELECT * FROM comp WHERE sim >=%s ORDER BY sim DESC LIMIT 5;", (name, _auto_threshold,))
sql = _cursor.mogrify("WITH comp AS (SELECT id, name, similarity(name::text, %s::text) as sim FROM parts " + where_clause + ") SELECT * FROM comp WHERE sim >=%s ORDER BY sim DESC LIMIT 5;", (name, _auto_threshold,))
logger.debug(sql)
_cursor.execute(sql)
data = _cursor.fetchall()
# for row in (data or []):
# print(row)
return data[:1] if data else None
......@@ -65,7 +88,9 @@ def create_new_one(_dict, part_type, _cursor):
INSERT INTO part_info_{type} (part_id, {columns})
(SELECT rows.id, {value_entries} FROM rows)
RETURNING part_id;""".format(type=_part_type_enum, columns=columns, value_entries=", ".join(["%s"] * len(values)))
_cursor.execute(sql, (name, _part_type_enum, "/static/img/placeholder/{}.png".format(part_type.lower()), *values))
sql = _cursor.mogrify(sql, (name, _part_type_enum, "/static/img/placeholder/{}.png".format(part_type.lower()), *values))
logger.debug(sql)
_cursor.execute(sql)
data = _cursor.fetchall()
if data:
......@@ -77,7 +102,9 @@ def check_if_parts_exist(_cursor, _user_id, _part_ids):
sql = """WITH result AS (SELECT combinations.id AS combination_id,
ARRAY(SELECT part_id FROM relations WHERE relations.combination_id = combinations.id ORDER BY relations.part_id) AS parts
FROM combinations WHERE combinations.owner_id = %s) SELECT combination_id FROM result WHERE parts = %s;"""
_cursor.execute(sql, (_user_id, _part_ids))
sql = _cursor.mogrify(sql, (_user_id, _part_ids))
logger.debug(sql)
_cursor.execute(sql)
data = _cursor.fetchall()
if not data:
......@@ -92,7 +119,9 @@ def create_combination(_cursor, nickname, _user_id):
if nickname:
_pc_name = "" + str(nickname) + "의 PC"
_cursor.execute("INSERT INTO combinations (owner_id, name) VALUES (%s, %s) RETURNING id;", (_user_id, _pc_name,))
sql = _cursor.mogrify("INSERT INTO combinations (owner_id, name) VALUES (%s, %s) RETURNING id;", (_user_id, _pc_name,))
logger.debug(sql)
_cursor.execute(sql)
data = _cursor.fetchall()
if data:
......@@ -107,7 +136,9 @@ def wire_part_to_combination(_cursor, _combination_id, *_part_ids):
values.append(_combination_id)
values.append(_part_id)
_cursor.execute("INSERT INTO relations (combination_id, part_id) VALUES " + ", ".join(["(%s, %s)" for _ in _part_ids]) + "RETURNING id, part_id", values)
sql = _cursor.mogrify("INSERT INTO relations (combination_id, part_id) VALUES " + ", ".join(["(%s, %s)" for _ in _part_ids]) + "RETURNING id, part_id", values)
logger.debug(sql)
_cursor.execute(sql)
return _cursor.fetchall()
......@@ -122,7 +153,9 @@ def match_part_obj_into_db(_part, part_type):
def finalize_transaction(_cursor, _transaction_id, _user_id, _combination_id):
_cursor.execute("UPDATE transactions SET combination_id=%s WHERE id=%s and user_id=%s;", (_combination_id, _transaction_id, _user_id,))
sql = _cursor.mogrify("UPDATE transactions SET combination_id=%s WHERE id=%s and user_id=%s;", (_combination_id, _transaction_id, _user_id,))
logger.debug(sql)
_cursor.execute(sql)
def func():
......@@ -131,14 +164,14 @@ def func():
return
code = value.decode()
print("Processing code:", code)
logger.info("Processing code: {}".format(code))
_user_id = R.get(f"mypc:code:{code}:user_id")
_transaction_id = R.get(f"mypc:code:{code}:transaction_id")
_xml_document = R.get(f"mypc:code:{code}:document")
print("(User ID (raw):", _user_id)
print("Transaction ID: (raw)", _transaction_id)
print("XML starts with (raw):", _xml_document)
logger.info("(User ID (raw): {}".format(_user_id))
logger.info("Transaction ID (raw): {}".format(_transaction_id))
logger.info("XML starts with (raw): {}".format(_xml_document[:50]))
assert _user_id is not None, "userId must not be null"
assert _transaction_id is not None, "transactionId must not be null"
......@@ -148,15 +181,15 @@ def func():
_transaction_id = _transaction_id.decode()
_xml_document = _xml_document.decode()
print("User ID:", _user_id)
print("Transaction ID:", _transaction_id)
print("XML starts with:", repr(_xml_document[:50]))
logger.info("User ID: {}".format(_user_id))
logger.info("Transaction ID: {}".format(_transaction_id))
logger.info("XML starts with: {}".format(repr(_xml_document[:50])))
_parts = parse.handle_xml_body(_xml_document)
_part_ids = []
_created_parts = False
print("Matching parts")
logger.info("Matching parts")
for part_type, parts in _parts.items(): # 부품 유형별 iterate
for part in parts: # 부품별 iterat
......@@ -164,7 +197,7 @@ def func():
disk_part_type = (part.pop("type", None) or "not_set").upper()
if disk_part_type not in ("SSD", "HDD",):
print("[WARNING] not supported disk type", disk_part_type, file=sys.stderr)
logger.warn("[WARNING] not supported disk type={}".format(disk_part_type))
continue
part_type = disk_part_type
......@@ -173,7 +206,7 @@ def func():
if not _matched_id: # DB에 없는 부품이라면
_created_parts = True
print(".. creating new part with", part["name"])
logger.info(".. creating new part with {}".format(part["name"]))
with db_conn.cursor() as cursor:
_matched_id = create_new_one(part, part_type, cursor)
db_conn.commit()
......@@ -182,44 +215,44 @@ def func():
_part_ids.append(_matched_id,)
continue
print(f"[WARNING] failed to match part type={part_type}, part:", part, file=sys.stderr)
logger.warn(f"[WARNING] failed to match part type={part_type}, part:{part}")
_combination_id = None
# 이미 존재하는 경우
if not _created_parts:
print("Checking combination existance")
logger.info("Checking combination existance")
with db_conn.cursor() as cursor:
_combination_id = check_if_parts_exist(cursor, _user_id, _part_ids)
if _combination_id is not None:
print("Combination exist. Using combination id", _combination_id)
logger.info("Combination exist. Using combination id={}".format(_combination_id))
# 새로 추가하는 경우
if _combination_id is not None:
print("Creating combination")
if _combination_id is None:
logger.info("Creating combination")
with db_conn.cursor() as cursor:
_combination_id = create_combination(cursor, None, _user_id)
if not _combination_id:
print("Failed to create combination", file=sys.stderr)
logger.warn("Failed to create combination")
exit(1)
print("Wiring parts", _part_ids, "to combination id", _combination_id)
logger.info(f"Wiring parts {repr(_part_ids)} to combination id={_combination_id}")
with db_conn.cursor() as cursor:
wire_part_to_combination(cursor, _combination_id, *_part_ids)
print("Committing to DB")
logger.info("Committing to DB")
db_conn.commit()
print("Finalizing transaction")
logger.info("Finalizing transaction")
with db_conn.cursor() as cursor:
finalize_transaction(cursor, _transaction_id, _user_id, _combination_id)
print("Committing to DB")
logger.info("Committing to DB")
db_conn.commit()
print("Removing anchor key from Redis")
logger.info("Removing anchor key from Redis")
R.delete(f"mypc:code:{code}:document")
......@@ -227,6 +260,7 @@ if __name__ == "__main__":
_first_run = True
global R
logger.debug("Opening redis connection")
R = utils.env_connect_redis()
if not R:
exit(1)
......@@ -237,7 +271,7 @@ if __name__ == "__main__":
exit(1)
_next_run_delta = timedelta(seconds=int(os.getenv("PARSE_DAEMON_REFRESH_INTERVAL", 5)))
print("Check Redis as interval", _next_run_delta)
logger.info("Check Redis as interval {}".format(_next_run_delta))
last_run = datetime.now()
last_run -= timedelta(microseconds=last_run.microsecond)
......@@ -256,9 +290,10 @@ if __name__ == "__main__":
except KeyboardInterrupt:
exit(0)
except Exception as e:
logger.warn(traceback.format_exc())
raise e
finally:
print("Closing connections")
logger.info("Closing connections")
R.close()
db_conn.close()