fix: logs working correctly and previous_scan bug

For debug purposes, configs and logs are now stored using semi-absolute
paths, so you can start the script from anywhere. Also fixed a weird bug
where items in the scans.json began to multiply every time items have
been scanned.
This commit is contained in:
Lunaresk 2021-12-20 11:58:45 +01:00
parent 67e726f95e
commit 821b8161df
6 changed files with 52 additions and 41 deletions

1
.gitignore vendored
View File

@ -130,3 +130,4 @@ dmypy.json
# Misc # Misc
config.yaml config.yaml
scans.json

View File

@ -1,14 +1,17 @@
from os.path import dirname
from requests import get, put, post, delete from requests import get, put, post, delete
from yaml import safe_load from yaml import safe_load
import logging import logging
DIR = dirname(__file__) + "/"
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG) LOGGER.setLevel(logging.DEBUG)
logFormatter = logging.Formatter( logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s") "%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
fileHandler = logging.FileHandler("../logs/connection.log") fileHandler = logging.FileHandler(DIR + "../logs/connection.log")
fileHandler.setFormatter(logFormatter) fileHandler.setFormatter(logFormatter)
fileHandler.setLevel(logging.INFO) fileHandler.setLevel(logging.INFO)
LOGGER.addHandler(fileHandler) LOGGER.addHandler(fileHandler)
@ -18,7 +21,7 @@ consoleHandler.setLevel(logging.DEBUG)
LOGGER.addHandler(consoleHandler) LOGGER.addHandler(consoleHandler)
with open("config.yaml", 'r') as file: with open(DIR + "config.yaml", 'r') as file:
data = safe_load(file)['server'] data = safe_load(file)['server']
SERVER = data['host'] SERVER = data['host']
PORT = data['port'] PORT = data['port']

View File

@ -1,8 +1,11 @@
from os.path import dirname
from yaml import safe_load from yaml import safe_load
DIR = dirname(__file__) + "/"
class Barcode_CodeID: class Barcode_CodeID:
with open("config.yaml", 'r') as file: with open(DIR + "config.yaml", 'r') as file:
data = safe_load(file)['options'] data = safe_load(file)['options']
CODEID_POS = data['barcode']['codeid']['position'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'position' in data['barcode']['codeid'] else None CODEID_POS = data['barcode']['codeid']['position'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'position' in data['barcode']['codeid'] else None
CODE128 = data['barcode']['codeid']['Code128'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'Code128' in data['barcode']['codeid'] else "A" CODE128 = data['barcode']['codeid']['Code128'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'Code128' in data['barcode']['codeid'] else "A"
@ -11,7 +14,7 @@ class Barcode_CodeID:
del(data) del(data)
class Offline_Login: class Offline_Login:
with open("config.yaml", 'r') as file: with open(DIR + "config.yaml", 'r') as file:
data = safe_load(file)['options'] data = safe_load(file)['options']
OFFLINE_LOGIN = data['users'] if "users" in data else "" OFFLINE_LOGIN = data['users'] if "users" in data else ""
del(data) del(data)

View File

@ -3,7 +3,7 @@ from copy import deepcopy
from datetime import date from datetime import date
from json import dump as jdump, load as jload from json import dump as jdump, load as jload
from os import makedirs, remove from os import makedirs, remove
from os.path import exists from os.path import exists, dirname
from select import select as timedInput from select import select as timedInput
from sys import stdin from sys import stdin
from yaml import safe_load from yaml import safe_load
@ -11,15 +11,17 @@ import connection
import logging import logging
if not exists("../logs"): DIR = dirname(__file__) + "/"
makedirs("../logs")
if not exists(DIR + "../logs"):
makedirs(DIR + "../logs")
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG) LOGGER.setLevel(logging.DEBUG)
logFormatter = logging.Formatter( logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s") "%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
fileHandler = logging.FileHandler("../logs/client.log") fileHandler = logging.FileHandler(DIR + "../logs/client.log")
fileHandler.setFormatter(logFormatter) fileHandler.setFormatter(logFormatter)
fileHandler.setLevel(logging.INFO) fileHandler.setLevel(logging.INFO)
LOGGER.addHandler(fileHandler) LOGGER.addHandler(fileHandler)
@ -28,7 +30,7 @@ consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.DEBUG) consoleHandler.setLevel(logging.DEBUG)
LOGGER.addHandler(consoleHandler) LOGGER.addHandler(consoleHandler)
TEMPFILE = "scans.json" TEMPFILE = DIR + "scans.json"
TIMEOUT = 60 # Number of seconds for a timeout after being logged in TIMEOUT = 60 # Number of seconds for a timeout after being logged in
SET_AMOUNTS = ["1", "2", "5", "10"] SET_AMOUNTS = ["1", "2", "5", "10"]
@ -39,8 +41,6 @@ def main() -> None:
user = login() user = login()
if not user: if not user:
continue continue
if user == "quit":
break
LOGGER.debug("Login successful") LOGGER.debug("Login successful")
scanning(user) scanning(user)
@ -84,22 +84,18 @@ def delete(scanned: list[dict[int: int]]):
except ValueError as e: except ValueError as e:
LOGGER.exception("") LOGGER.exception("")
def group_previous_scans(previous_scans: list): def group_previous_scans(previous_scans: list[dict[any: any]]):
newscans = [] for i in range(len(previous_scans))[::-1]:
for scan in previous_scans: for j in range(len(previous_scans[i:])-1):
found = False j = i+j+1
for newscan in newscans: if previous_scans[i]['date'] == previous_scans[j]['date'] and previous_scans[i]['user'] == previous_scans[j]['user']:
if newscan['date'] == scan['date'] and newscan['user'] == scan['user']: for key, value in previous_scans[i]['items'].items():
for key, value in scan['items'].items(): if key in previous_scans[j]['items']:
if key in newscan['items']: previous_scans[j]['items'][key] += value
newscan['items'][key] += value
else: else:
newscan['items'][key] = value previous_scans[j]['items'][key] = value
found = True del(previous_scans[i])
break break
if not found:
newscans.append(deepcopy(scan))
return newscans
def group_scanning(scanned: list[dict[int: int]]) -> dict[int: int]: def group_scanning(scanned: list[dict[int: int]]) -> dict[int: int]:
scan_dict = {} scan_dict = {}
@ -137,7 +133,7 @@ def scanning(user: str) -> dict[int: int]:
if not i: if not i:
break # send a short timeout message before break break # send a short timeout message before break
scan = stdin.readline().strip() scan = stdin.readline().strip()
codeid, scan = split_codeid(scan, "A") codeid, scan = split_codeid(scan, Barcode_CodeID.EAN8)
match codeid: match codeid:
case Barcode_CodeID.EAN8: case Barcode_CodeID.EAN8:
scanned.append({scan: amount}) scanned.append({scan: amount})
@ -169,17 +165,21 @@ def scanning(user: str) -> dict[int: int]:
scanned = group_scanning(scanned) scanned = group_scanning(scanned)
send_scan(user, scanned) send_scan(user, scanned)
def send_scan(user: str, scanned: dict[int: int], previous_scans: list[dict] = []): def send_scan(user: str, scanned: dict[int: int], previous_scans: list[dict[any: any]] = None):
LOGGER.debug(previous_scans)
if not previous_scans:
previous_scans = []
if exists(TEMPFILE): if exists(TEMPFILE):
with open(TEMPFILE, "r") as file: with open(TEMPFILE, "r") as file:
previous_scans.extend(jload(file)) previous_scans.extend(jload(file))
result = connection.send_scan(user, scanned) if scanned:
if result != True: result = connection.send_scan(user, scanned)
result['date'] = str(date.today()) if result != True:
previous_scans.append(result) result['date'] = str(date.today())
previous_scans.append(result)
if previous_scans: if previous_scans:
previous_scans = group_previous_scans(previous_scans) group_previous_scans(previous_scans)
for bought in list(previous_scans): for bought in previous_scans:
result = connection.send_scan(bought['user'], bought['items'], bought['date']) result = connection.send_scan(bought['user'], bought['items'], bought['date'])
previous_scans.remove(bought) previous_scans.remove(bought)
if result != True: if result != True:
@ -189,7 +189,6 @@ def send_scan(user: str, scanned: dict[int: int], previous_scans: list[dict] = [
jdump(previous_scans, file) jdump(previous_scans, file)
elif exists(TEMPFILE): # if no scans remain, delete the json elif exists(TEMPFILE): # if no scans remain, delete the json
remove(TEMPFILE) remove(TEMPFILE)
LOGGER.info(previous_scans)
def split_codeid(scan: str, default_codeid: str = ""): def split_codeid(scan: str, default_codeid: str = ""):
match Barcode_CodeID.CODEID_POS: match Barcode_CodeID.CODEID_POS:

View File

@ -1,14 +1,17 @@
from os.path import dirname
from psycopg2 import connect as psyconn, ProgrammingError, errors from psycopg2 import connect as psyconn, ProgrammingError, errors
from yaml import safe_load from yaml import safe_load
import logging import logging
DIR = dirname(__file__) + "/"
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG) LOGGER.setLevel(logging.DEBUG)
logFormatter = logging.Formatter( logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s") "%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
fileHandler = logging.FileHandler("../logs/database.log") fileHandler = logging.FileHandler(DIR + "../logs/database.log")
fileHandler.setFormatter(logFormatter) fileHandler.setFormatter(logFormatter)
fileHandler.setLevel(logging.INFO) fileHandler.setLevel(logging.INFO)
LOGGER.addHandler(fileHandler) LOGGER.addHandler(fileHandler)
@ -24,7 +27,7 @@ class Database:
pass pass
def connect(self, **kwargs): def connect(self, **kwargs):
with open('config.yaml', 'r') as file: with open(DIR + 'config.yaml', 'r') as file:
data = safe_load(file)['database'] data = safe_load(file)['database']
LOGGER.debug('Merging passed arguments with default arguments.') LOGGER.debug('Merging passed arguments with default arguments.')
for key, value in data.items(): for key, value in data.items():

View File

@ -3,19 +3,21 @@ from flask import Flask, abort, request
from flask.json import jsonify from flask.json import jsonify
from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIServer
from os import makedirs from os import makedirs
from os.path import exists from os.path import exists, dirname
import logging import logging
if not exists("../logs"): DIR = dirname(__file__) + "/"
makedirs("../logs")
if not exists(DIR + "../logs"):
makedirs(DIR + "../logs")
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG) LOGGER.setLevel(logging.DEBUG)
logFormatter = logging.Formatter( logFormatter = logging.Formatter(
"%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s") "%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
fileHandler = logging.FileHandler("../logs/server.log") fileHandler = logging.FileHandler(DIR + "../logs/server.log")
fileHandler.setFormatter(logFormatter) fileHandler.setFormatter(logFormatter)
fileHandler.setLevel(logging.INFO) fileHandler.setLevel(logging.INFO)
LOGGER.addHandler(fileHandler) LOGGER.addHandler(fileHandler)