Skip to content
Snippets Groups Projects
Commit 03e8cd45 authored by David Huss's avatar David Huss :speech_balloon:
Browse files

Refactor once more

parent 26554da5
Branches
No related tags found
No related merge requests found
......@@ -4,6 +4,6 @@ To make a development server run on localhost:
```python3
export FLASK_APP=stechuhr_server/server.py
export FLASK_ENV=development
poetry run flask run
poetry run flask run --cert=adhoc
```
click==7.1.2 \
--hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc \
--hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a
flask==1.1.2 \
--hash=sha256:8a4fdd8936eba2512e9c85df320a37e694c93945b33ef33c89946a340a238557 \
--hash=sha256:4efa1ae2d7c9865af48986de8aeb8504bf32c7f3d6fdc9353d34b21f4b127060
gunicorn==20.0.4 \
--hash=sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c \
--hash=sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626
itsdangerous==1.1.0 \
--hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \
--hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19
jinja2==2.11.2 \
--hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
--hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0
markupsafe==1.1.1 \
--hash=sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161 \
--hash=sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7 \
--hash=sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183 \
--hash=sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b \
--hash=sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e \
--hash=sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f \
--hash=sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1 \
--hash=sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5 \
--hash=sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1 \
--hash=sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735 \
--hash=sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21 \
--hash=sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235 \
--hash=sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b \
--hash=sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f \
--hash=sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905 \
--hash=sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1 \
--hash=sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d \
--hash=sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff \
--hash=sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473 \
--hash=sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e \
--hash=sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66 \
--hash=sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5 \
--hash=sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d \
--hash=sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e \
--hash=sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6 \
--hash=sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2 \
--hash=sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c \
--hash=sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15 \
--hash=sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2 \
--hash=sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42 \
--hash=sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b \
--hash=sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be \
--hash=sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b
toml==0.10.2 \
--hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \
--hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f
werkzeug==1.0.1 \
--hash=sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43 \
--hash=sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c
......@@ -44,7 +44,7 @@ def get_dirs(application_name):
return dirs
def initialize_config(application_name: str, default_config: str) -> dict:
def initialize_config(application_name: str, default_config: str, app) -> dict:
"""
Initialize a configuration. If none exists, create a default one
"""
......@@ -52,6 +52,7 @@ def initialize_config(application_name: str, default_config: str) -> dict:
dirs = get_dirs(application_name)
# Convert the default config to a dict
default_strconfig = default_config
default_config = toml.loads(default_config)
# Generate a environment variable name from the application name
......@@ -65,18 +66,46 @@ def initialize_config(application_name: str, default_config: str) -> dict:
# Create the config_dir if it doesn't exist
if not Path.is_dir(Path(config_dir)):
Path(config_dir).mkdir(parents=True, exist_ok=True)
print("Config directory didn't exist, created directory: {}".format(config_dir))
app.logger.info("Config directory didn't exist, created directory: {}".format(config_dir))
# Create a default config if it does exist
if not Path.is_file(Path(config_path)):
write_config(config_path, default_config)
print("Created new default config.toml at:\n{}".format(config_path))
app.logger.info("Created new default config.toml at:\n{}".format(config_path))
config = default_config
else:
config = read_config(config_path)
print("Read config from: {}".format(config_path))
if config_has_missing_keys(config, default_config, default_strconfig, app):
app.logger.error("Error: there are keys missing in the config. Delete to let {} create a new one, or add the missing keys manually".format(application_name))
exit(1)
app.logger.info("Read config from: {}".format(config_path))
return config
def config_has_missing_keys(config, default_config, default_strconfig, app) -> bool:
"""
Returns True if any of the keys from the default config are missing
Prints those missing keys and the default config, if this is the case
"""
missing = []
for default_key in default_config.keys():
if not default_key in config.keys():
app.logger.error("Your config.toml is missing the [{}] section!".format(default_key))
missing.append(True)
else:
for sub_key in default_config[default_key].keys():
if not sub_key in config[default_key].keys():
app.logger.error("Your config.toml is missing the key \"{}\" in the [{}] section!".format(sub_key, default_key))
missing.append(True)
if any(missing):
app.logger.error("The default config looks like this:")
app.logger.error(default_strconfig)
return True
else:
return False
def write_config(config_path: str, config: dict):
"""
......
......@@ -30,27 +30,17 @@ id_patterns = [
# minimum and maximum lengths for the received strings
min_entrance_length = 1
max_entrance_length = 128
min_place_length = 1
max_place_length = 128
[log]
level = "INFO"
format = "[%(asctime)s] %(levelname)s in %(module)s: %(message)s"
min_location_length = 1
max_location_length = 128
"""
# Initialize the configuration (create a default one if needed)
config = initialize_config(APPLICATION_NAME, DEFAULT_CONFIG)
# Compile the patterns for the ids once at startup
config["database"]["id_patterns"] = [re.compile(p) for p in config["database"]["id_patterns"]]
# Config for the logger, there should be no need to make
# manual changes here
dictConfig({
'version': 1,
'formatters': {'default': {
'format': config["log"]["format"],
'format': '[%(asctime)s] %(levelname)s in %(module)s: %(message)s',
}},
'handlers': {'wsgi': {
'class': 'logging.StreamHandler',
......@@ -58,15 +48,20 @@ dictConfig({
'formatter': 'default'
}},
'root': {
'level': config["log"]["level"],
'level': 'INFO',
'handlers': ['wsgi']
}
})
# Initialization
app = Flask(APPLICATION_NAME)
# Initialize the configuration (create a default one if needed)
config = initialize_config(APPLICATION_NAME, DEFAULT_CONFIG, app)
# Compile the patterns for the ids once at startup
config["database"]["id_patterns"] = [re.compile(p) for p in config["database"]["id_patterns"]]
# Create an initial connection and create the needed tables if they
# don't exist yet.
conn = sqlite3.connect(config["database"]["path"])
......@@ -74,7 +69,7 @@ cursor = conn.cursor()
sqlite_create_table_query = '''CREATE TABLE visitors (
movement_id TEXT PRIMARY KEY,
time timestamp NOT NULL,
place TEXT NOT NULL,
location TEXT NOT NULL,
entrance TEXT NOT NULL,
direction TEXT NOT NULL,
id TEXT NOT NULL);'''
......@@ -88,6 +83,7 @@ except sqlite3.OperationalError as e:
pass
conn.close()
app.logger.info('Ready to take requests')
def register_movement(data: dict, conn, cursor):
"""
......@@ -101,11 +97,11 @@ def register_movement(data: dict, conn, cursor):
# 1. Draft SQL-Statement
sqlite_insert_with_param = """INSERT INTO 'visitors'
('movement_id', 'id', 'place', 'entrance', 'direction', 'time')
('movement_id', 'id', 'location', 'entrance', 'direction', 'time')
VALUES (?, ?, ?, ?, ?, ?);"""
# 2. Draft parameters
data_tuple = (data["movement_id"], data["id"], data["place"], data["entrance"], data["direction"], data["time"])
data_tuple = (data["movement_id"], data["id"], data["location"], data["entrance"], data["direction"], data["time"])
# Execute both and commit to db
cursor.execute(sqlite_insert_with_param, data_tuple)
......@@ -124,10 +120,10 @@ def construct_movement_id(data: dict) -> dict:
# Use isoformat time like 2020-11-12T07:06:08.595770 ...
time = data["time"].isoformat()
# ... but limit the precision of sub seconds to two places
# ... but limit the precision of sub seconds to two locations
time = "{}.{}".format(time.rsplit(".")[0], time.rsplit(".")[1][:2])
# Construct final movement ID.
movement_id = "{}--{}-{}_{}/{}".format(time, data["id"], data["place"], data["entrance"], data["direction"])
movement_id = "{}--{}-{}_{}/{}".format(time, data["id"], data["location"], data["entrance"], data["direction"])
data["movement_id"] = movement_id
return data
......@@ -137,12 +133,12 @@ def get_records(conn, cursor):
Get all existing movement records
"""
# Draft select query
sqlite_select_query = """SELECT place, entrance, direction, time, id from visitors"""
sqlite_select_query = """SELECT location, entrance, direction, time, id from visitors"""
try:
# Execute query and fill visitor with results
cursor.execute(sqlite_select_query)
records = cursor.fetchall()
visitors = [{"place":r[0], "entrance":r[1], "direction":r[2], "time":r[3], "id":r[4]} for r in records]
visitors = [{"location":r[0], "entrance":r[1], "direction":r[2], "time":r[3], "id":r[4]} for r in records]
except sqlite3.OperationalError as e:
# If there is no table or no records, just return an empty list
app.logger.info('Couldn\'t retrieve visitor records: {}'.format(e))
......@@ -154,11 +150,8 @@ def is_valid_data(data: dict) -> bool:
"""
Check if the body data of the request is valid
"""
expected_keys = ["place", "entrance", "direction", "id"]
expected_keys = ["location", "entrance", "direction", "id"]
expected_directions = ["in", "out"]
max_entrance_length = 128
max_place_length = 128
max_id_length = 128
has_all_keys = all([k in data.keys() for k in expected_keys ])
# Check if all needed keys are contained
......@@ -171,8 +164,8 @@ def is_valid_data(data: dict) -> bool:
app.logger.info('400, JSON "direction"-key invalid: was \"{}\" should be one of {}'.format(data["direction"], expected_directions))
return False
# Basic length check for place
if not length_check(data, "place", config["database"]["min_place_length"], config["database"]["max_place_length"]):
# Basic length check for location
if not length_check(data, "location", config["database"]["min_location_length"], config["database"]["max_location_length"]):
return False
# Basic length check for entrance
......@@ -220,10 +213,11 @@ def post():
"""
This function runs when a POST request on / is received
"""
if not request.data:
# Missing data body, reject
app.logger.info('400, Missing data body')
return "", 400
app.logger.info('Received request: {}'.format(request))
# if not request.data:
# # Missing data body, reject
# app.logger.info('400, Missing data body')
# return "", 400
if not request.is_json:
# Non-JSON-Body, reject
app.logger.info('400, Non-JSON-body')
......@@ -252,7 +246,7 @@ def get():
Can be deactivated in the config with the ignore_get_requests setting
"""
if config["application"]["ignore_get_requests"]:
app.logger.info('501, Get Request Ignored')
app.logger.info('501, Get Request Ignored due to config settings')
return "", 501
else:
conn = sqlite3.connect(config["database"]["path"])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment