Commit 5da41416 authored by Nelso Jost's avatar Nelso Jost
Browse files

NEW: full RTC support; better settings management;

parent 2310b102
...@@ -44,13 +44,13 @@ clean-venv: ...@@ -44,13 +44,13 @@ clean-venv:
rm -rf ${VENV} rm -rf ${VENV}
run: run:
sudo ${VENV}/bin/python${PYVER} logger.py ${VENV}/bin/python${PYVER} run.py
deploy: deploy:
sudo ${VENV}/bin/python${PYVER} deploy.py sudo ${VENV}/bin/python${PYVER} app/deploy.py
undeploy: undeploy:
sudo ${VENV}/bin/python${PYVER} deploy.py -u sudo ${VENV}/bin/python${PYVER} app/deploy.py -u
tail-exec: tail-exec:
tail -F logs/execution.log tail -F logs/execution.log
from .main import Meteorologger
...@@ -6,6 +6,7 @@ import sys ...@@ -6,6 +6,7 @@ import sys
PROCESS_NAME = 'meteorologger' PROCESS_NAME = 'meteorologger'
BASE_DIR = os.path.abspath(os.path.dirname(__file__)) BASE_DIR = os.path.abspath(os.path.dirname(__file__))
BASE_DIR = BASE_DIR[:BASE_DIR.rfind(os.path.sep)]
SUPERVISOR_CONFIG_FILENAME = '/etc/supervisor/conf.d/{}.conf'\ SUPERVISOR_CONFIG_FILENAME = '/etc/supervisor/conf.d/{}.conf'\
.format(PROCESS_NAME) .format(PROCESS_NAME)
...@@ -13,7 +14,7 @@ PID_FILENAME = 'logs/pid_{}'.format(PROCESS_NAME) ...@@ -13,7 +14,7 @@ PID_FILENAME = 'logs/pid_{}'.format(PROCESS_NAME)
def deploy_supervisor(): def deploy_supervisor():
with open('supervisor.conf') as f_temp: with open('app/supervisor.conf') as f_temp:
template = jinja2.Template(f_temp.read()) template = jinja2.Template(f_temp.read())
config_file_str = template.render(base_dir=BASE_DIR, config_file_str = template.render(base_dir=BASE_DIR,
process_name=PROCESS_NAME) process_name=PROCESS_NAME)
...@@ -37,12 +38,17 @@ def deploy_supervisor(): ...@@ -37,12 +38,17 @@ def deploy_supervisor():
proc.wait() proc.wait()
pid = proc.stdout.read().decode('ascii').strip() pid = proc.stdout.read().decode('ascii').strip()
try:
pid = int(pid)
with open(PID_FILENAME, 'w') as f: with open(PID_FILENAME, 'w') as f:
f.write(pid + '\n') f.write(str(pid) + '\n')
print("\nPID: {} (saved at '{}')".format(pid, PID_FILENAME)) print("\nPID: {} (saved at '{}')".format(pid, PID_FILENAME))
print('\n[{} process is running]'.format(PROCESS_NAME)) print('\n[{} process is running]'.format(PROCESS_NAME))
print('\nYou can manage it with supervisorctl tool.') print('\nYou can manage it with supervisorctl tool.')
except:
print("\nSomething went wrong and the daemon process was NOT created.")
def undeploy_supervisor(): def undeploy_supervisor():
......
...@@ -19,31 +19,23 @@ import json ...@@ -19,31 +19,23 @@ import json
import yaml import yaml
import jinja2 import jinja2
SETTINGS_YAML_SCHEMA =\ from pykwalify.core import Core
"""
SERVER:
API_POST_URL: str
LOGGER: def make_current_file_path(filename):
READING_INTERVAL: {seconds: int, minutes: int, hours: int, days: int} ''' Append filename to the current __file__ path. '''
DATETIME_FORMAT: str return os.path.join(os.path.abspath(os.path.dirname(__file__)), filename)
SENSORS:
- {nickname: str, data_format: str}
ARDUINO: class RTCDateTime:
SERIAL_PORT: str RTC_DT_FMT = '%Y-%m-%d %H:%M:%S'
BAUD_RATE: int __qualname__ = "RTCDateTime fmt='{}'".format(RTC_DT_FMT)
FILES: def __init__(self, s):
DATALOG_CSV: str self.dt = datetime.strptime(s, self.RTC_DT_FMT)
DATALOG_CSV_SEP: str
SERVER_OUTGOING_JSON: str def __str__(self):
""" return self.dt.strftime('%Y%m%d%H%M%S')
def make_current_file_path(filename):
''' Append filename to the current __file__ path. '''
return os.path.join(os.path.abspath(os.path.dirname(__file__)), filename)
class Meteorologger: class Meteorologger:
''' '''
...@@ -55,11 +47,17 @@ class Meteorologger: ...@@ -55,11 +47,17 @@ class Meteorologger:
Call the run() method to start the logging process. Call the run() method to start the logging process.
''' '''
EXECUTION_LOG_FILENAME = 'logs/execution.log' SETTINGS_SCHEMA_FILENAME = 'app/settings_schema.yaml'
SETTINGS_FILENAME = 'settings.yaml' SETTINGS_FILENAME = 'settings.yaml'
CSV_SEP = ',' EXECUTION_LOG_FILENAME = 'logs/execution.log'
DATA_FORMATS = {'int': int, 'float': float, 'str': str} OUTGOING_BASENAME = 'outgoing.json'
FILE_TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M-%S'
SERIAL_CSV_SEP = ','
DATA_FORMATS = {'int': int, 'float': float, 'str': str,
'datetime': RTCDateTime}
SERIAL_READ_TIMEOUT = 1.5 # seconds SERIAL_READ_TIMEOUT = 1.5 # seconds
FIND_PORT_TIMEOUT = 10 # seconds FIND_PORT_TIMEOUT = 10 # seconds
...@@ -81,160 +79,148 @@ class Meteorologger: ...@@ -81,160 +79,148 @@ class Meteorologger:
print("Invalid bytes!") print("Invalid bytes!")
return result return result
def _getCFG(self, *args, expected_type=None):
r = self.CFG
for key in args:
try:
r = r[key]
except:
raise Exception("Configuration file '{}' is missing the "
"required key\n {}"
.format(self.SETTINGS_FILENAME,
': '.join(args)))
if expected_type is not None:
try:
if expected_type is int and isinstance(r, float):
raise TypeError
elif expected_type is str and not isinstance(r, str):
raise TypeError
else:
return expected_type(r)
except:
raise TypeError("Expected {} on key {} but got\n {}"
.format(expected_type, '/'.join(args), r))
return r
def load_settings(self): def load_settings(self):
''' '''
Load the configuration file onto the self.CFG attribute. Load the configuration file onto the self.CFG attribute.
Some keys will be tested and filenames will be normalized. Some keys will be tested and filenames will be normalized.
''' '''
with open(self.SETTINGS_FILENAME) as f: c = Core(source_file=self.SETTINGS_FILENAME,
self.CFG = yaml.safe_load(f) schema_files=[self.SETTINGS_SCHEMA_FILENAME])
self.API_POST_URL = self._getCFG('SERVER', 'API_POST_URL', try:
expected_type=str) self.CFG = c.validate(raise_exception=True)
except Exception as e:
print('-'*60)
print("There is something wrong with the configuration file '{}'"
"\nKeep in mind that it uses YAML syntax which require "
"proper identation.\n".format(self.SETTINGS_FILENAME))
print(e)
print("\nPlease fix it up or regenerate it.")
sys.exit(1)
self.SERIAL_PORTS = self.CFG['ARDUINO']['SERIAL_PORT'].split(',')
self.SENSORS_CSV_LINE = 'read' + self.CSV_SEP + self.CSV_SEP.join( self.READING_INTERVAL_SECONDS =\
[d['nickname'] for d in self._getCFG('SENSORS', self.CFG['LOGGER']['INTERVAL']['seconds']\
expected_type=list)]) + 60 * self.CFG['LOGGER']['INTERVAL']['minutes']\
+ 3600 * self.CFG['LOGGER']['INTERVAL']['hours']\
+ 86400 * self.CFG['LOGGER']['INTERVAL']['days']
self.DATALOG_CSV_FILENAME = self._getCFG( def create_json(self, raw_line):
'FILES', 'DATALOG_CSV', expected_type=str) '''
self.SERVER_OUTGOING_FILENAME = self._getCFG( Given the raw serial line response (CSV string), builds and returns
'FILES', 'SERVER_OUTGOING_JSON', expected_type=str) a JSON dict with validated, server-ready, sensor data.
'''
d = {'datetime': {'format': self.CFG['DATALOG']['DATETIME_FORMAT']},
'sensors': {}}
# convert raw str into normal escaped str (e.g., r'\\t' --> '\t') rtc = self.CFG['LOGGER']['USE_RTC_DATETIME']
self.DATALOG_CSV_SEP = bytes( using_rtc = rtc and rtc in self.CFG['LOGGER']['SENSORS']
self._getCFG('FILES', 'DATALOG_CSV_SEP', expected_type=str),
'utf8').decode('unicode_escape')
self.SERIAL_PORTS = [p.strip() for p in if using_rtc:
self._getCFG('ARDUINO', 'SERIAL_PORT', d['datetime']['source'] = rtc
expected_type=str).split(',')] rtc_datetime_fmt = self.CFG['LOGGER']['RTC_DATETIME_FORMAT']
else:
d['datetime']['source'] = 'logger'
d['datetime']['value'] = datetime.now().strftime(
d['datetime']['format'])
self.BOARD_RESPONSE_DELAY = self._getCFG('ARDUINO', 'RESPONSE_DELAY', for i, v in enumerate(raw_line.split(self.SERIAL_CSV_SEP)):
expected_type=int)
self.READING_INTERVAL_SECONDS =\ nickname = self.CFG['LOGGER']['SENSORS'][i]
self._getCFG('LOGGER', 'READING_INTERVAL', 'seconds', type_name = self.CFG['SENSORS_AVAILABLE'][nickname]['data_format']
expected_type=int) + \
60 * self._getCFG('LOGGER', 'READING_INTERVAL', 'minutes', if type_name == 'datetime':
expected_type=int) + \ if using_rtc:
3600 * self._getCFG('LOGGER', 'READING_INTERVAL', 'hours', d['datetime']['value'] = datetime.strptime(
expected_type=int) + \ v, rtc_datetime_fmt).strftime(d['datetime']['format'])
86400 * self._getCFG('LOGGER', 'READING_INTERVAL', 'days', continue
expected_type=int)
self.DATETIME_FORMAT = self._getCFG('LOGGER', 'DATETIME_FORMAT',
expected_type=str)
def create_json_data(self, raw_line):
'''
Given the raw serial line response (expected to be a CSV line), returns
a JSON dict with sensor data including the datetime field.
'''
raw_sensor_data = {
'datetime': datetime.now().strftime(self.DATETIME_FORMAT),
'sensors': {}}
for i, v in enumerate(raw_line.split(self.CSV_SEP)):
v = v.strip()
type_ = self.DATA_FORMATS[self.CFG['SENSORS'][i]['data_format']]
nickname = self.CFG['SENSORS'][i]['nickname']
try: try:
v = type_(v) v = self.DATA_FORMATS[type_name](v.strip())
except: except:
logging.error("Cannot convert value '{}' read from {} to {}" logging.error("[{}]: '{}' is not a valid {}"
.format(v, nickname, type_)) .format(nickname, v, type_name))
continue continue
raw_sensor_data['sensors'][nickname] = v d['sensors'][nickname] = v
logging.info("Resulting JSON: {}".format(raw_sensor_data)) logging.info("Validated JSON: {}".format(d))
return raw_sensor_data return d
def write_data_log(self, json_data): def write_datalog(self, json_data):
''' '''
For backup purposes, write the given JSON data onto the file For backup purposes, write the given JSON data onto the file
DATALOG_CSV as specficied on self.SETTINGS_FILENAME. DATALOG_CSV as specficied on self.SETTINGS_FILENAME.
''' '''
csv_line = json_data['datetime'] + self.DATALOG_CSV_SEP # convert raw str into normal escaped str (e.g., r'\\t' --> '\t')
for sensor in self.CFG['SENSORS']: csv_sep = bytes(self.CFG['DATALOG']['CSV_SEP'],
if sensor['nickname'] in json_data['sensors']: 'utf8').decode('unicode_escape')
csv_line += str(json_data['sensors']
[sensor['nickname']]) csv_line = json_data['datetime']['value'] + csv_sep
csv_line += self.DATALOG_CSV_SEP
for nickname in self.CFG['LOGGER']['SENSORS']:
if nickname in json_data['sensors']:
csv_line += str(json_data['sensors'][nickname])
csv_line += csv_sep
csv_line = csv_line[:-1] csv_line = csv_line[:-1]
try: try:
with open(self.CFG['FILES']['DATALOG_CSV'], 'a') as f: datalog_filename = self.CFG['DATALOG']['FILENAME']
with open(datalog_filename, 'a') as f:
f.write(csv_line + '\n') f.write(csv_line + '\n')
logging.info("Done! CSV line '{}' was appended to the file '{}'" logging.info("Updated datalog file: '{}'".format(datalog_filename))
.format(csv_line, self.CFG['FILES']['DATALOG_CSV'])) except Exception as e:
except: logging.error("Unable to write datalog at '{}'"
logging.error("Unable to write data log at '{}'" .format(datalog_filename))
.format(self.CFG['FILES']['DATALOG_CSV'])) logging.info("Exception: {}".format(e))
def send_data_to_server(self, json_data): def send_to_server(self, json_data):
logging.info("URL: {}".format(self.API_POST_URL))
r = None r = None
outgoing_filename = os.path.join(
os.path.dirname(self.CFG['DATALOG']['FILENAME']),
'outgoing.json')
URL = self.CFG['SERVER']['API_POST_URL']
try: try:
if os.path.exists(self.CFG['FILES']['SERVER_OUTGOING_JSON']): if os.path.exists(outgoing_filename):
logging.info("Outgoing data exists! Trying to send it first..") logging.debug("Outgoing data exists! Will try to send it.")
with open(self.CFG['FILES']['SERVER_OUTGOING_JSON']) as f: with open(outgoing_filename) as f:
for i, line in enumerate(f): for i, line in enumerate(f):
r = requests.post(self.API_POST_URL, d = json.loads(line, encoding='utf-8')
json=json.loads(line, r = requests.post(URL, json=d)
encoding='utf-8'))
if r.status_code != 200: if r.status_code != 200:
raise Exception raise Exception
logging.info('Line {}: {}'.format(i, r)) logging.info('Line {}: {}'.format(i, r))
os.remove(self.CFG['FILES']['SERVER_OUTGOING_JSON'])
logging.info("Done! Server data should be up to date.")
r = requests.post(self.API_POST_URL, json=json_data) os.remove(outgoing_filename)
logging.info("Removed file '{}'".format(outgoing_filename))
r = requests.post(URL, json=json_data)
if r.status_code != 200: if r.status_code != 200:
raise Exception raise Exception
logging.info("Done! Request: {}".format(r)) logging.info("Request: {}".format(r))
except: except:
logging.error("Unable to reach the server.") logging.error("Unable to reach the server at '{}'. Request: {}"
logging.info("Response request: {}".format(r)) .format(URL, r))
logging.info("Attempting to write current data on local file '{}'" try:
.format(self.CFG['FILES']['SERVER_OUTGOING_JSON'])) with open(outgoing_filename, 'a') as f:
with open(self.CFG['FILES']['SERVER_OUTGOING_JSON'], 'a') as f:
f.write(json.dumps(json_data) + '\n') f.write(json.dumps(json_data) + '\n')
logging.info("Done! Data is saved an will be shipped as soon as " logging.info("Updated outgoing file '{}'"
"the server is back on.") .format(outgoing_filename))
return except Exception as e:
logging.error("[DATALOST] Unable to write outgoing file '{}'".
format(outgoing_filename))
logging.info("Exception: {}".format(e))
def serial_read_sensors(self, port_index=None): def serial_read(self, port_index=None):
''' '''
Sends the 'csv_nickname_list' string to the serial port of index Sends the 'csv_nickname_list' string to the serial port of index
'port_index' (for self.SERIAL_PORTS) and returns the response line. 'port_index' (for self.SERIAL_PORTS) and returns the response line.
...@@ -243,6 +229,8 @@ class Meteorologger: ...@@ -243,6 +229,8 @@ class Meteorologger:
Example: 'DHT22_TEMP,DHT22_AH,BMP085_PRESSURE,LDR' Example: 'DHT22_TEMP,DHT22_AH,BMP085_PRESSURE,LDR'
''' '''
result_line, ser = None, None result_line, ser = None, None
read_command = self.SERIAL_CSV_SEP.join(
['read'] + self.CFG['LOGGER']['SENSORS'])
try: try:
if isinstance(port_index, int): if isinstance(port_index, int):
serial_port = self.SERIAL_PORTS[port_index] serial_port = self.SERIAL_PORTS[port_index]
...@@ -250,23 +238,23 @@ class Meteorologger: ...@@ -250,23 +238,23 @@ class Meteorologger:
serial_port = self.SERIAL_PORTS[0] serial_port = self.SERIAL_PORTS[0]
# if present, the board will be reseted # if present, the board will be reseted
ser = serial.Serial(serial_port, ser = serial.Serial(serial_port, self.CFG['ARDUINO']['BAUD_RATE'],
self._getCFG('ARDUINO', 'BAUD_RATE', timeout=self.SERIAL_READ_TIMEOUT, xonxoff=True)
expected_type=int), logging.info("Serial open <port='{}', baudrate={}>"
timeout = self.SERIAL_READ_TIMEOUT, .format(ser.port, ser.baudrate))
xonxoff=True)
logging.info(str(ser))
time.sleep(self.BOARD_RESET_TIMEOUT) time.sleep(self.BOARD_RESET_TIMEOUT)
ser.flush()
while bool(result_line) is False: while bool(result_line) is False:
result = ser.write(bytes(self.SENSORS_CSV_LINE, 'utf8')) result = ser.write(bytes(read_command, 'utf8'))
logging.info("sent: '{}' ({} bytes)".format( logging.info("sent: '{}' ({} bytes)".format(
self.SENSORS_CSV_LINE, result)) read_command, result))
time.sleep(self.BOARD_RESPONSE_DELAY) time.sleep(self.CFG['ARDUINO']['RESPONSE_DELAY'])
result_line = ser.readline() result_line = ser.readline()
...@@ -276,20 +264,21 @@ class Meteorologger: ...@@ -276,20 +264,21 @@ class Meteorologger:
result_line = self._decode_bytes(result_line) result_line = self._decode_bytes(result_line)
if result_line is None: if result_line is None:
logging.error("Unable to decode line as ASCII.") logging.error("Unable to decode line as ASCII.")
logging.info("Trying a new reading..") logging.debug("Trying a new reading..")
continue continue
ser.close() ser.close()
return result_line return result_line
except KeyboardInterrupt: except KeyboardInterrupt:
raise KeyboardInterrupt raise KeyboardInterrupt
except: except Exception as e:
logging.error("Unable to open serial port '{}'" logging.error("Unable to open serial port '{}'"
.format(serial_port)) .format(serial_port))
logging.error(e)
finally: finally:
if ser: if ser:
ser.close() ser.close()
logging.info("serial closed") logging.debug("serial closed")
return None return None
def setup_logging(self): def setup_logging(self):
...@@ -298,7 +287,7 @@ class Meteorologger: ...@@ -298,7 +287,7 @@ class Meteorologger:
# level=logging.DEBUG) # level=logging.DEBUG)
root = logging.getLogger('') root = logging.getLogger('')
root.setLevel(logging.DEBUG) root.setLevel(logging.INFO)
console = logging.StreamHandler() console = logging.StreamHandler()
console.setFormatter(logging.Formatter( console.setFormatter(logging.Formatter(
fmt='%(asctime)s : %(levelname)s : %(message)s', fmt='%(asctime)s : %(levelname)s : %(message)s',
...@@ -306,58 +295,60 @@ class Meteorologger: ...@@ -306,58 +295,60 @@ class Meteorologger:
root.addHandler(console) root.addHandler(console)
else: else:
logging.basicConfig( logging.basicConfig(
level=logging.DEBUG, level=logging.INFO,
filename=self.EXECUTION_LOG_FILENAME, filename=self.EXECUTION_LOG_FILENAME,
format='%(asctime)s : %(levelname)s : %(message)s', format='%(asctime)s : %(levelname)s : %(message)s',
datefmt='%Y-%m-%d %H:%M:%S') datefmt='%Y-%m-%d %H:%M:%S')
def run(self): def run(self):
''' '''
Starts the logger main loop, which iterate over the procedures: Starts the logger main loop, which keeps reading data from the serial
1. Read sensor data via serial port; port and trying to send it to the server.
2. If successful, save data on
Basically, the loop consists of the following steps:
1. serial_read() # send a string, recieves a string
2. create_json() # validate data and make it server-ready
3. write_datalog() # write current data on local file for backup
4. send_to_server() # try to send; if fails, save data for later
''' '''
self.setup_logging() self.setup_logging()
logging.info('='*40)
logging.info('EXECUTION START') logging.info('EXECUTION START')
port_index = 0 port_index = 0
try: try:
while True: while True:
logging.info('='*40) logging.info('='*40)
logging.info('Attempting to read from serial') logging.debug('Attempting to read from serial')
csv_result = self.serial_read_sensors(port_index)
if csv_result is not None: csv_result = self.serial_read(port_index)
logging.info("csv_result: '{}'".format(csv_result))
logging.info('-'*40) if csv_result is not None:
logging.info('Attempting create valid JSON data') logging.debug('-'*40)
json_data = self.create_json_data(csv_result) logging.debug('Attempting create valid JSON data')
json_data = self.create_json(csv_result)