major: new repo structure
Moved the client and server in different repositories.
This commit is contained in:
parent
971ca19d79
commit
e73cd46611
2
.gitignore
vendored
2
.gitignore
vendored
@ -104,6 +104,7 @@ celerybeat.pid
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
.flaskenv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
@ -132,3 +133,4 @@ dmypy.json
|
||||
config.yaml
|
||||
scans.json
|
||||
test.*
|
||||
app.db
|
||||
13
app/__init__.py
Normal file
13
app/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
from flask import Flask
|
||||
from flask_login import LoginManager
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_migrate import Migrate
|
||||
from yaml import safe_load
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config.from_file("config.yaml", safe_load)
|
||||
db = SQLAlchemy(app)
|
||||
migrate = Migrate(app, db, render_as_batch=True)
|
||||
login = LoginManager(app)
|
||||
|
||||
from app import views, models
|
||||
29
app/forms.py
Normal file
29
app/forms.py
Normal file
@ -0,0 +1,29 @@
|
||||
from app.models import Brand, Category
|
||||
from flask_wtf import FlaskForm
|
||||
from wtforms import StringField, PasswordField, BooleanField, SubmitField, SelectMultipleField, DateField, IntegerField, SelectField, FloatField
|
||||
from wtforms.validators import DataRequired
|
||||
|
||||
class LoginForm(FlaskForm):
|
||||
username = StringField('Username', validators=[DataRequired()])
|
||||
password = PasswordField('Password', validators=[DataRequired()])
|
||||
remember_me = BooleanField('Remember Me')
|
||||
submit = SubmitField('Sign In')
|
||||
|
||||
class NewItemForm(FlaskForm):
|
||||
id = IntegerField("Product EAN", validators=[DataRequired()])
|
||||
name = StringField("Name", validators=[DataRequired()])
|
||||
description = StringField("Description", validators=[DataRequired()])
|
||||
date = DateField("Insert Date", validators=[DataRequired()])
|
||||
price_change = FloatField("Price", validators=[DataRequired()])
|
||||
amount_change = IntegerField("Amount", validators=[DataRequired()])
|
||||
category = SelectMultipleField("Categories", choices=[(c.id, c.name) for c in Category.query.order_by("name").all()], validators=[DataRequired()])
|
||||
brand = SelectField("Brand", choices=[(b.id, b.name) for b in Brand.query.order_by("name").all()], validators=[DataRequired()])
|
||||
submit = SubmitField("Submit")
|
||||
|
||||
class NewCategoryForm(FlaskForm):
|
||||
name = StringField("Name", validators=[DataRequired()])
|
||||
submit = SubmitField("Submit")
|
||||
|
||||
class NewBrandForm(FlaskForm):
|
||||
name = StringField("Name", validators=[DataRequired()])
|
||||
submit = SubmitField("Submit")
|
||||
84
app/models.py
Normal file
84
app/models.py
Normal file
@ -0,0 +1,84 @@
|
||||
from app import db, login
|
||||
from flask_login import UserMixin
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
|
||||
item_category = db.Table("item_category",
|
||||
db.Column("item", db.ForeignKey("item.id"), primary_key=True),
|
||||
db.Column("category", db.ForeignKey("category.id"), primary_key=True)
|
||||
)
|
||||
|
||||
class User(UserMixin, db.Model):
|
||||
id = db.Column(db.String(10), primary_key=True)
|
||||
name = db.Column(db.String(64))
|
||||
password_hash = db.Column(db.String(128))
|
||||
|
||||
Bought = db.relationship("Bought", backref='User', lazy='dynamic')
|
||||
|
||||
def set_password(self, password):
|
||||
self.password_hash = generate_password_hash(password)
|
||||
|
||||
def check_password(self, password):
|
||||
return check_password_hash(self.password_hash, password)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<User {self.id} ({self.name})>"
|
||||
|
||||
class Brand(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(32))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Brand {self.id} ({self.name})>"
|
||||
|
||||
class Category(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(32))
|
||||
|
||||
Item = db.relationship("Item", secondary=item_category, lazy="dynamic", back_populates="Category")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Category {self.id} ({self.name})>"
|
||||
|
||||
class Item(db.Model):
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(64))
|
||||
brand = db.Column(db.ForeignKey('brand.id'))
|
||||
description = db.Column(db.Text)
|
||||
|
||||
Category = db.relationship("Category", secondary=item_category, lazy="dynamic", back_populates="Item")
|
||||
|
||||
product = db.relationship("Bought", backref='Item', lazy='dynamic')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Item {self.id} ({self.name})>"
|
||||
|
||||
class Bought(db.Model):
|
||||
user = db.Column(db.ForeignKey('user.id'), primary_key=True)
|
||||
item = db.Column(db.ForeignKey('item.id'), primary_key=True)
|
||||
date = db.Column(db.Date)
|
||||
amount = db.Column(db.SmallInteger)
|
||||
|
||||
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Bought Object>"
|
||||
|
||||
class Price_Change(db.Model):
|
||||
item = db.Column(db.ForeignKey('item.id'), primary_key=True)
|
||||
date = db.Column(db.Date, primary_key=True)
|
||||
price = db.Column(db.SmallInteger)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Price_Change {self.item} ({self.date})>"
|
||||
|
||||
class Amount_Change(db.Model):
|
||||
item = db.Column(db.ForeignKey('item.id'), primary_key=True)
|
||||
date = db.Column(db.Date, primary_key=True)
|
||||
Amount = db.Column(db.SmallInteger)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Amount_Change {self.item} ({self.date})>"
|
||||
|
||||
@login.user_loader
|
||||
def load_user(id):
|
||||
return User.query.get(int(id))
|
||||
43
app/templates/base.html
Normal file
43
app/templates/base.html
Normal file
@ -0,0 +1,43 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" crossorigin="anonymous">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
</head>
|
||||
<body>
|
||||
<nav class="navbar navbar-expand-lg navbar-light bg-light">
|
||||
<div class="container-fluid">
|
||||
<a class="navbar-brand" href="#">Navbar</a>
|
||||
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarSupportedContent" aria-controls="navbarSupportedContent" aria-expanded="false" aria-label="Toggle navigation">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<div class="collapse navbar-collapse" id="navbarSupportedContent">
|
||||
<ul class="navbar-nav me-auto mb-2 mb-lg-0">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link active" aria-current="page" href="#">Home</a>
|
||||
</li>
|
||||
<li class="nav-item dropdown">
|
||||
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-bs-toggle="dropdown" aria-expanded="false">
|
||||
Overview
|
||||
</a>
|
||||
<ul class="dropdown-menu" aria-labelledby="navbarDropdown">
|
||||
<li><a class="dropdown-item" href="#">Monthly</a></li>
|
||||
<li><a class="dropdown-item" href="#">Yearly</a></li>
|
||||
<li><hr class="dropdown-divider"></li>
|
||||
<li><a class="dropdown-item" href="#">Full Overview</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
<div class="container">
|
||||
<div class="row-md-3">
|
||||
{% block content %}{% endblock %}
|
||||
</div>
|
||||
</div>
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p" crossorigin="anonymous"></script>
|
||||
</body>
|
||||
</html>
|
||||
@ -1,5 +1,6 @@
|
||||
from app import app
|
||||
from app.database import Database
|
||||
from app.forms import NewItemForm
|
||||
from flask import abort, request, render_template
|
||||
from flask.json import jsonify
|
||||
from os import makedirs
|
||||
@ -35,6 +36,11 @@ APPNAME = "scan2kasse"
|
||||
def index():
|
||||
return "<h1>Hello, World!</h>", 200
|
||||
|
||||
@app.route('/test')
|
||||
def test():
|
||||
form = NewItemForm()
|
||||
return render_template("test.html", form=form)
|
||||
|
||||
|
||||
@app.route(f'/{APPNAME}/login')
|
||||
def login():
|
||||
@ -1,11 +0,0 @@
|
||||
server:
|
||||
host: "http://localhost"
|
||||
port: "5000"
|
||||
|
||||
options:
|
||||
barcode:
|
||||
codeid:
|
||||
position: Null
|
||||
Code128: "A"
|
||||
EAN8: "C"
|
||||
EAN13: "D"
|
||||
@ -1,53 +0,0 @@
|
||||
from os.path import dirname
|
||||
from requests import get, put, post, delete
|
||||
from yaml import safe_load
|
||||
import logging
|
||||
|
||||
|
||||
DIR = dirname(__file__) + "/"
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
LOGGER.setLevel(logging.DEBUG)
|
||||
logFormatter = logging.Formatter(
|
||||
"%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
|
||||
|
||||
fileHandler = logging.FileHandler(DIR + "../logs/connection.log")
|
||||
fileHandler.setFormatter(logFormatter)
|
||||
fileHandler.setLevel(logging.INFO)
|
||||
LOGGER.addHandler(fileHandler)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setLevel(logging.DEBUG)
|
||||
LOGGER.addHandler(consoleHandler)
|
||||
|
||||
|
||||
with open(DIR + "config.yaml", 'r') as file:
|
||||
data = safe_load(file)['server']
|
||||
SERVER = data['host']
|
||||
PORT = data['port']
|
||||
del(data)
|
||||
|
||||
|
||||
def check_login(login: str) -> bool:
|
||||
try:
|
||||
response = get(url=":".join([SERVER, str(PORT)]) + '/scan2kasse/login', json={'login': login}, timeout=1)
|
||||
except Exception as e:
|
||||
LOGGER.debug("Server not reachable.")
|
||||
return False
|
||||
else:
|
||||
if response.status_code == 200:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def send_scan(user: str, scanned: dict[int: int], date:str = None):
|
||||
infos = {'user': user, 'items': scanned}
|
||||
if date:
|
||||
infos['date'] = date
|
||||
try:
|
||||
response = post(url=":".join([SERVER, str(
|
||||
PORT)]) + '/scan2kasse/insert', json=infos, timeout=1)
|
||||
return True if response.status_code == 201 else response.json()
|
||||
except Exception as e:
|
||||
LOGGER.exception("")
|
||||
return infos
|
||||
@ -1,24 +0,0 @@
|
||||
from os.path import dirname
|
||||
from yaml import safe_load
|
||||
|
||||
|
||||
DIR = dirname(__file__) + "/"
|
||||
|
||||
class Barcode_CodeID:
|
||||
with open(DIR + "config.yaml", 'r') as file:
|
||||
data = safe_load(file)['options']
|
||||
CODEID_POS = data['barcode']['codeid']['position'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'position' in data['barcode']['codeid'] else None
|
||||
CODE128 = data['barcode']['codeid']['Code128'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'Code128' in data['barcode']['codeid'] else "A"
|
||||
EAN8 = data['barcode']['codeid']['EAN8'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'EAN8' in data['barcode']['codeid'] else "C"
|
||||
EAN13 = data['barcode']['codeid']['EAN13'] if data and 'barcode' in data and 'codeid' in data['barcode'] and 'EAN13' in data['barcode']['codeid'] else "D"
|
||||
del(data)
|
||||
|
||||
class Offline_Login:
|
||||
with open(DIR + "config.yaml", 'r') as file:
|
||||
data = safe_load(file)['options']
|
||||
OFFLINE_LOGIN = data['users'] if "users" in data else ""
|
||||
del(data)
|
||||
|
||||
class Scan_Options:
|
||||
DELETE = "delete"
|
||||
LOGOUT = "logout"
|
||||
@ -1,203 +0,0 @@
|
||||
from constants import Barcode_CodeID, Offline_Login, Scan_Options
|
||||
from copy import deepcopy
|
||||
from datetime import date
|
||||
from json import dump as jdump, load as jload
|
||||
from os import makedirs, remove
|
||||
from os.path import dirname, exists
|
||||
from select import select as timedInput
|
||||
from sys import stdin
|
||||
from yaml import safe_load
|
||||
import connection
|
||||
import logging
|
||||
|
||||
|
||||
DIR = dirname(__file__) + "/"
|
||||
|
||||
if not exists(DIR + "../logs"):
|
||||
makedirs(DIR + "../logs")
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
LOGGER.setLevel(logging.DEBUG)
|
||||
logFormatter = logging.Formatter(
|
||||
"%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
|
||||
|
||||
fileHandler = logging.FileHandler(DIR + "../logs/client.log")
|
||||
fileHandler.setFormatter(logFormatter)
|
||||
fileHandler.setLevel(logging.INFO)
|
||||
LOGGER.addHandler(fileHandler)
|
||||
|
||||
consoleHandler = logging.StreamHandler()
|
||||
consoleHandler.setLevel(logging.DEBUG)
|
||||
LOGGER.addHandler(consoleHandler)
|
||||
|
||||
TEMPFILE = DIR + "scans.json"
|
||||
|
||||
TIMEOUT = 60 # Number of seconds for a timeout after being logged in
|
||||
SET_AMOUNTS = ["1", "2", "5", "10"]
|
||||
|
||||
|
||||
def main() -> None:
|
||||
while True:
|
||||
user = login()
|
||||
if not user:
|
||||
continue
|
||||
LOGGER.debug("Login successful")
|
||||
scanning(user)
|
||||
|
||||
def delete(scanned: list[dict[int: int]]):
|
||||
amount = 1
|
||||
while True:
|
||||
i, _, _ = timedInput([stdin], [], [], TIMEOUT)
|
||||
if not i:
|
||||
return #TODO send a short timeout message before return
|
||||
scan = stdin.readline().strip()
|
||||
codeid, scan = split_codeid(scan, "")
|
||||
match codeid:
|
||||
case Barcode_CodeID.EAN8:
|
||||
try:
|
||||
scanned.remove({scan: amount})
|
||||
except ValueError as e:
|
||||
scanned.insert(0, {scan: -amount})
|
||||
LOGGER.debug(f"Tried to delete {scan} with amount {amount}.")
|
||||
finally:
|
||||
break
|
||||
case Barcode_CodeID.EAN13:
|
||||
try:
|
||||
scanned.remove({scan: amount})
|
||||
except ValueError as e:
|
||||
scanned.insert(0, {scan: -amount})
|
||||
LOGGER.debug(f"Tried to delete {scan} with amount {amount}.")
|
||||
finally:
|
||||
break
|
||||
case Barcode_CodeID.CODE128:
|
||||
match scan:
|
||||
case Scan_Options.DELETE:
|
||||
try:
|
||||
scanned.pop()
|
||||
except IndexError as e:
|
||||
LOGGER.exception("")
|
||||
finally:
|
||||
break
|
||||
case _:
|
||||
try:
|
||||
amount += int(scan)
|
||||
except ValueError as e:
|
||||
LOGGER.exception("")
|
||||
|
||||
def group_previous_scans(previous_scans: list[dict[any: any]]):
|
||||
for i in range(len(previous_scans))[::-1]:
|
||||
for j in range(len(previous_scans[i:])-1):
|
||||
j = i+j+1
|
||||
if previous_scans[i]['date'] == previous_scans[j]['date'] and previous_scans[i]['user'] == previous_scans[j]['user']:
|
||||
for key, value in previous_scans[i]['items'].items():
|
||||
if key in previous_scans[j]['items']:
|
||||
previous_scans[j]['items'][key] += value
|
||||
else:
|
||||
previous_scans[j]['items'][key] = value
|
||||
del(previous_scans[i])
|
||||
break
|
||||
|
||||
def group_scanning(scanned: list[dict[int: int]]) -> dict[int: int]:
|
||||
scan_dict = {}
|
||||
for scan in scanned:
|
||||
for key, value in scan.items():
|
||||
if key not in scan_dict:
|
||||
scan_dict[key] = value
|
||||
else:
|
||||
scan_dict[key] += value
|
||||
for key, value in scan_dict.items():
|
||||
if value <= 0:
|
||||
del(scan_dict[key])
|
||||
return scan_dict
|
||||
|
||||
def login(user: str = None):
|
||||
if not user:
|
||||
user = input("Enter Login: ")
|
||||
codeid, user = split_codeid(user, Barcode_CodeID.CODE128)
|
||||
else:
|
||||
codeid = Barcode_CodeID.CODE128
|
||||
if codeid != Barcode_CodeID.CODE128:
|
||||
return None
|
||||
if not connection.check_login(user):
|
||||
LOGGER.debug("Login failed")
|
||||
if not user in Offline_Login.OFFLINE_LOGIN:
|
||||
return None
|
||||
LOGGER.debug("Using local login")
|
||||
return user
|
||||
|
||||
def scanning(user: str) -> dict[int: int]:
|
||||
scan, scanned = "", []
|
||||
amount = 1
|
||||
while True:
|
||||
i, _, _ = timedInput([stdin], [], [], TIMEOUT)
|
||||
if not i:
|
||||
break # send a short timeout message before break
|
||||
scan = stdin.readline().strip()
|
||||
codeid, scan = split_codeid(scan, Barcode_CodeID.EAN8)
|
||||
match codeid:
|
||||
case Barcode_CodeID.EAN8:
|
||||
scanned.append({scan: amount})
|
||||
amount = 1
|
||||
case Barcode_CodeID.EAN13:
|
||||
scanned.append({scan: amount})
|
||||
amount = 1
|
||||
case Barcode_CodeID.CODE128:
|
||||
match scan:
|
||||
case Scan_Options.LOGOUT:
|
||||
break
|
||||
case Scan_Options.DELETE:
|
||||
delete(scanned)
|
||||
case _:
|
||||
try:
|
||||
if scan in SET_AMOUNTS:
|
||||
amount = int(scan)
|
||||
else:
|
||||
amount += int(scan)
|
||||
except:
|
||||
altuser = login(scan)
|
||||
if not altuser:
|
||||
continue
|
||||
LOGGER.debug("Login successful")
|
||||
scanning(altuser)
|
||||
break
|
||||
case _:
|
||||
LOGGER.debug(f"Unknown barcode scanned: {codeid}_{scan}")
|
||||
scanned = group_scanning(scanned)
|
||||
send_scan(user, scanned)
|
||||
|
||||
def send_scan(user: str, scanned: dict[int: int], previous_scans: list[dict[any: any]] = None):
|
||||
if not previous_scans:
|
||||
previous_scans = []
|
||||
if exists(TEMPFILE):
|
||||
with open(TEMPFILE, "r") as file:
|
||||
previous_scans.extend(jload(file))
|
||||
if scanned:
|
||||
result = connection.send_scan(user, scanned)
|
||||
if result != True:
|
||||
result['date'] = str(date.today())
|
||||
previous_scans.append(result)
|
||||
if previous_scans:
|
||||
group_previous_scans(previous_scans)
|
||||
for bought in deepcopy(previous_scans):
|
||||
result = connection.send_scan(bought['user'], bought['items'], bought['date'])
|
||||
previous_scans.remove(bought)
|
||||
if result != True:
|
||||
previous_scans.append(result)
|
||||
if previous_scans: # if previous scans still present, save it
|
||||
with open(TEMPFILE, "w") as file:
|
||||
jdump(previous_scans, file)
|
||||
elif exists(TEMPFILE): # if no scans remain, delete the json
|
||||
remove(TEMPFILE)
|
||||
LOGGER.info(previous_scans)
|
||||
|
||||
def split_codeid(scan: str, default_codeid: str = ""):
|
||||
match Barcode_CodeID.CODEID_POS:
|
||||
case "prefix":
|
||||
return(scan[0], scan[1:])
|
||||
case "suffix":
|
||||
return(scan[-1], scan[:-1])
|
||||
case _:
|
||||
return(default_codeid, scan)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Binary file not shown.
1
migrations/README
Normal file
1
migrations/README
Normal file
@ -0,0 +1 @@
|
||||
Single-database configuration for Flask.
|
||||
50
migrations/alembic.ini
Normal file
50
migrations/alembic.ini
Normal file
@ -0,0 +1,50 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic,flask_migrate
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[logger_flask_migrate]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = flask_migrate
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
91
migrations/env.py
Normal file
91
migrations/env.py
Normal file
@ -0,0 +1,91 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
logger = logging.getLogger('alembic.env')
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
config.set_main_option(
|
||||
'sqlalchemy.url',
|
||||
str(current_app.extensions['migrate'].db.get_engine().url).replace(
|
||||
'%', '%%'))
|
||||
target_metadata = current_app.extensions['migrate'].db.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
# this callback is used to prevent an auto-migration from being generated
|
||||
# when there are no changes to the schema
|
||||
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
|
||||
def process_revision_directives(context, revision, directives):
|
||||
if getattr(config.cmd_opts, 'autogenerate', False):
|
||||
script = directives[0]
|
||||
if script.upgrade_ops.is_empty():
|
||||
directives[:] = []
|
||||
logger.info('No changes in schema detected.')
|
||||
|
||||
connectable = current_app.extensions['migrate'].db.get_engine()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
process_revision_directives=process_revision_directives,
|
||||
**current_app.extensions['migrate'].configure_args
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
24
migrations/script.py.mako
Normal file
24
migrations/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
||||
64
migrations/versions/97738dc497a8_full_structure.py
Normal file
64
migrations/versions/97738dc497a8_full_structure.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""full structure
|
||||
|
||||
Revision ID: 97738dc497a8
|
||||
Revises: cec6bb222997
|
||||
Create Date: 2022-02-01 01:21:55.570500
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '97738dc497a8'
|
||||
down_revision = 'cec6bb222997'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('brand',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=32), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('category',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=32), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('amount__change',
|
||||
sa.Column('item', sa.Integer(), nullable=False),
|
||||
sa.Column('date', sa.Date(), nullable=False),
|
||||
sa.Column('Amount', sa.SmallInteger(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['item'], ['item.id'], ),
|
||||
sa.PrimaryKeyConstraint('item', 'date')
|
||||
)
|
||||
op.create_table('item_category',
|
||||
sa.Column('item', sa.Integer(), nullable=False),
|
||||
sa.Column('category', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['category'], ['category.id'], ),
|
||||
sa.ForeignKeyConstraint(['item'], ['item.id'], ),
|
||||
sa.PrimaryKeyConstraint('item', 'category')
|
||||
)
|
||||
op.create_table('price__change',
|
||||
sa.Column('item', sa.Integer(), nullable=False),
|
||||
sa.Column('date', sa.Date(), nullable=False),
|
||||
sa.Column('price', sa.SmallInteger(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['item'], ['item.id'], ),
|
||||
sa.PrimaryKeyConstraint('item', 'date')
|
||||
)
|
||||
op.create_foreign_key(None, 'item', 'brand', ['brand'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'item', type_='foreignkey')
|
||||
op.drop_table('price__change')
|
||||
op.drop_table('item_category')
|
||||
op.drop_table('amount__change')
|
||||
op.drop_table('category')
|
||||
op.drop_table('brand')
|
||||
# ### end Alembic commands ###
|
||||
32
migrations/versions/c9dcd301d327_users_table.py
Normal file
32
migrations/versions/c9dcd301d327_users_table.py
Normal file
@ -0,0 +1,32 @@
|
||||
"""users table
|
||||
|
||||
Revision ID: c9dcd301d327
|
||||
Revises:
|
||||
Create Date: 2022-01-20 22:58:43.707307
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c9dcd301d327'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('user',
|
||||
sa.Column('id', sa.String(length=10), nullable=False),
|
||||
sa.Column('name', sa.String(length=64), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('user')
|
||||
# ### end Alembic commands ###
|
||||
44
migrations/versions/cec6bb222997_bought_and_item.py
Normal file
44
migrations/versions/cec6bb222997_bought_and_item.py
Normal file
@ -0,0 +1,44 @@
|
||||
"""bought and item
|
||||
|
||||
Revision ID: cec6bb222997
|
||||
Revises: c9dcd301d327
|
||||
Create Date: 2022-01-21 09:38:53.679649
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'cec6bb222997'
|
||||
down_revision = 'c9dcd301d327'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('item',
|
||||
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||
sa.Column('name', sa.String(length=64), nullable=True),
|
||||
sa.Column('brand', sa.String(length=32), nullable=True),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('bought',
|
||||
sa.Column('user', sa.String(length=10), nullable=False),
|
||||
sa.Column('item', sa.BigInteger(), nullable=False),
|
||||
sa.Column('date', sa.Date(), nullable=True),
|
||||
sa.Column('amount', sa.SmallInteger(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['item'], ['item.id'], ),
|
||||
sa.ForeignKeyConstraint(['user'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('user', 'item')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('bought')
|
||||
op.drop_table('item')
|
||||
# ### end Alembic commands ###
|
||||
11
run.py
Normal file
11
run.py
Normal file
@ -0,0 +1,11 @@
|
||||
from app import app, db
|
||||
from app.models import *
|
||||
from gevent.pywsgi import WSGIServer
|
||||
|
||||
@app.shell_context_processor
|
||||
def make_shell_context():
|
||||
return {'db': db, 'User': User, 'Bought': Bought, 'Item': Item}
|
||||
|
||||
if __name__ == '__main__':
|
||||
http_server = WSGIServer(('', 5000), app)
|
||||
http_server.serve_forever()
|
||||
@ -1,5 +0,0 @@
|
||||
from flask import Flask
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
from app import views
|
||||
@ -1,15 +0,0 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" crossorigin="anonymous">
|
||||
<link rel="stylesheet" href="style.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
{% block content %}{% endblock %}
|
||||
</div>
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p" crossorigin="anonymous"></script>
|
||||
</body>
|
||||
</html>
|
||||
@ -1,6 +0,0 @@
|
||||
from app import app
|
||||
from gevent.pywsgi import WSGIServer
|
||||
|
||||
if __name__ == '__main__':
|
||||
http_server = WSGIServer(('', 5000), app)
|
||||
http_server.serve_forever()
|
||||
Loading…
x
Reference in New Issue
Block a user