Less invasive modification to shorten uuids for AVRs which can't handle it. Uses already established file caching mechanism.

This commit is contained in:
Canuma 2021-11-15 20:46:30 +01:00
parent f349a2686c
commit 468b8e75f3
3 changed files with 67 additions and 14 deletions

View file

@ -4,6 +4,7 @@ import argparse
import logging
import sys
from generic import clear_cache
from ycast import __version__
from ycast import server
@ -23,6 +24,8 @@ def launch_server():
logging.debug("Debug logging enabled")
else:
logging.getLogger('werkzeug').setLevel(logging.WARNING)
# TODO Should cleaning cache be optional?
clear_cache()
server.run(arguments.config, arguments.address, arguments.port)

View file

@ -1,9 +1,12 @@
import logging
import os
import shutil
import hashlib
USER_AGENT = 'YCast'
VAR_PATH = os.path.expanduser("~") + '/.ycast'
CACHE_PATH = VAR_PATH + '/cache'
CACHE_NAME = 'shortid'
class Directory:
@ -23,6 +26,11 @@ def generate_stationid_with_prefix(uid, prefix):
if not uid:
logging.error("Missing station id for full station id generation")
return None
if prefix != 'MY':
uid = write_uuid(str(uid))
if not uid:
logging.error("Unable to store uuid. See previous errors")
return None
return str(prefix) + '_' + str(uid)
@ -37,7 +45,10 @@ def get_stationid_without_prefix(uid):
if len(uid) < 4:
logging.error("Could not extract stationid (Invalid station id length)")
return None
return uid[3:]
wopid = uid[3:]
if get_stationid_prefix(uid) != 'MY':
return read_uuid(wopid)
return wopid
def get_cache_path(cache_name):
@ -50,3 +61,54 @@ def get_cache_path(cache_name):
logging.error("Could not create cache folders (%s) because of access permissions", cache_path)
return None
return cache_path
def get_checksum(feed, charlimit=12):
hash_feed = feed.encode()
hash_object = hashlib.md5(hash_feed)
digest = hash_object.digest()
xor_fold = bytearray(digest[:8])
for i, b in enumerate(digest[8:]):
xor_fold[i] ^= b
digest_xor_fold = ''.join(format(x, '02x') for x in bytes(xor_fold))
return digest_xor_fold[:charlimit]
def write_uuid(uid):
cache_path = get_cache_path(CACHE_NAME)
if not cache_path:
return None
shortid = get_checksum(uid)
id_file = cache_path + '/' + shortid
try:
with open(id_file, 'w') as file:
file.write(uid)
except PermissionError:
logging.error("Could not access station id file in cache (%s) because of access permissions",
id_file)
return None
return shortid
def read_uuid(shortid):
cache_path = get_cache_path(CACHE_NAME)
if not cache_path:
return None
id_file = cache_path + '/' + shortid
try:
with open(id_file, 'r') as file:
uid = file.read()
except PermissionError:
logging.error("Could not access station id file in cache (%s) because of access permissions",
id_file)
return None
return uid
def clear_cache():
try:
for root, dirs, files in os.walk(CACHE_PATH):
for d in dirs:
shutil.rmtree(os.path.join(root, d))
except OSError:
logging.error("Could not clean cache)")

View file

@ -1,5 +1,4 @@
import logging
import hashlib
import yaml
@ -71,17 +70,6 @@ def get_stations_by_category(category):
if my_stations_yaml and category in my_stations_yaml:
for station_name in my_stations_yaml[category]:
station_url = my_stations_yaml[category][station_name]
station_id = str(get_checksum(station_name + station_url)).upper()
station_id = str(generic.get_checksum(station_name + station_url)).upper()
stations.append(Station(station_id, station_name, station_url, category))
return stations
def get_checksum(feed, charlimit=12):
hash_feed = feed.encode()
hash_object = hashlib.md5(hash_feed)
digest = hash_object.digest()
xor_fold = bytearray(digest[:8])
for i, b in enumerate(digest[8:]):
xor_fold[i] ^= b
digest_xor_fold = ''.join(format(x, '02x') for x in bytes(xor_fold))
return digest_xor_fold[:charlimit]