Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
EMM
meteorolog
Commits
5da41416
Commit
5da41416
authored
Jun 29, 2015
by
Nelso Jost
Browse files
NEW: full RTC support; better settings management;
parent
2310b102
Changes
10
Hide whitespace changes
Inline
Side-by-side
logger/Makefile
View file @
5da41416
...
...
@@ -44,13 +44,13 @@ clean-venv:
rm
-rf
${VENV}
run
:
sudo
${VENV}
/bin/python
${PYVER}
logger
.py
${VENV}
/bin/python
${PYVER}
run
.py
deploy
:
sudo
${VENV}
/bin/python
${PYVER}
deploy.py
sudo
${VENV}
/bin/python
${PYVER}
app/
deploy.py
undeploy
:
sudo
${VENV}
/bin/python
${PYVER}
deploy.py
-u
sudo
${VENV}
/bin/python
${PYVER}
app/
deploy.py
-u
tail-exec
:
tail
-F
logs/execution.log
logger/app/__init__.py
0 → 100644
View file @
5da41416
from
.main
import
Meteorologger
logger/deploy.py
→
logger/
app/
deploy.py
View file @
5da41416
...
...
@@ -6,6 +6,7 @@ import sys
PROCESS_NAME
=
'meteorologger'
BASE_DIR
=
os
.
path
.
abspath
(
os
.
path
.
dirname
(
__file__
))
BASE_DIR
=
BASE_DIR
[:
BASE_DIR
.
rfind
(
os
.
path
.
sep
)]
SUPERVISOR_CONFIG_FILENAME
=
'/etc/supervisor/conf.d/{}.conf'
\
.
format
(
PROCESS_NAME
)
...
...
@@ -13,7 +14,7 @@ PID_FILENAME = 'logs/pid_{}'.format(PROCESS_NAME)
def
deploy_supervisor
():
with
open
(
'supervisor.conf'
)
as
f_temp
:
with
open
(
'
app/
supervisor.conf'
)
as
f_temp
:
template
=
jinja2
.
Template
(
f_temp
.
read
())
config_file_str
=
template
.
render
(
base_dir
=
BASE_DIR
,
process_name
=
PROCESS_NAME
)
...
...
@@ -37,12 +38,17 @@ def deploy_supervisor():
proc
.
wait
()
pid
=
proc
.
stdout
.
read
().
decode
(
'ascii'
).
strip
()
with
open
(
PID_FILENAME
,
'w'
)
as
f
:
f
.
write
(
pid
+
'
\n
'
)
try
:
pid
=
int
(
pid
)
print
(
"
\n
PID: {} (saved at '{}')"
.
format
(
pid
,
PID_FILENAME
))
print
(
'
\n
[{} process is running]'
.
format
(
PROCESS_NAME
))
print
(
'
\n
You can manage it with supervisorctl tool.'
)
with
open
(
PID_FILENAME
,
'w'
)
as
f
:
f
.
write
(
str
(
pid
)
+
'
\n
'
)
print
(
"
\n
PID: {} (saved at '{}')"
.
format
(
pid
,
PID_FILENAME
))
print
(
'
\n
[{} process is running]'
.
format
(
PROCESS_NAME
))
print
(
'
\n
You can manage it with supervisorctl tool.'
)
except
:
print
(
"
\n
Something went wrong and the daemon process was NOT created."
)
def
undeploy_supervisor
():
...
...
logger/
logger
.py
→
logger/
app/main
.py
View file @
5da41416
...
...
@@ -19,31 +19,23 @@ import json
import
yaml
import
jinja2
SETTINGS_YAML_SCHEMA
=
\
"""
SERVER:
API_POST_URL: str
from
pykwalify.core
import
Core
LOGGER
:
READING_INTERVAL: {seconds: int, minutes: int, hours: int, days: int}
DATETIME_FORMAT: str
def
make_current_file_path
(
filename
)
:
''' Append filename to the current __file__ path. '''
return
os
.
path
.
join
(
os
.
path
.
abspath
(
os
.
path
.
dirname
(
__file__
)),
filename
)
SENSORS:
- {nickname: str, data_format: str}
ARDUINO
:
SERIAL_PORT: str
BAUD_RATE: int
class
RTCDateTime
:
RTC_DT_FMT
=
'%Y-%m-%d %H:%M:%S'
__qualname__
=
"RTCDateTime fmt='{}'"
.
format
(
RTC_DT_FMT
)
FILES
:
DATALOG_CSV: str
DATALOG_CSV_SEP: str
SERVER_OUTGOING_JSON: str
"""
def
__init__
(
self
,
s
)
:
self
.
dt
=
datetime
.
strptime
(
s
,
self
.
RTC_DT_FMT
)
def
__str__
(
self
):
return
self
.
dt
.
strftime
(
'%Y%m%d%H%M%S'
)
def
make_current_file_path
(
filename
):
''' Append filename to the current __file__ path. '''
return
os
.
path
.
join
(
os
.
path
.
abspath
(
os
.
path
.
dirname
(
__file__
)),
filename
)
class
Meteorologger
:
'''
...
...
@@ -55,11 +47,17 @@ class Meteorologger:
Call the run() method to start the logging process.
'''
EXECUTION_LOG_FILENAME
=
'logs/execution.log
'
SETTINGS_SCHEMA_FILENAME
=
'app/settings_schema.yaml
'
SETTINGS_FILENAME
=
'settings.yaml'
CSV_SEP
=
','
DATA_FORMATS
=
{
'int'
:
int
,
'float'
:
float
,
'str'
:
str
}
EXECUTION_LOG_FILENAME
=
'logs/execution.log'
OUTGOING_BASENAME
=
'outgoing.json'
FILE_TIMESTAMP_FORMAT
=
'%Y-%m-%d-%H-%M-%S'
SERIAL_CSV_SEP
=
','
DATA_FORMATS
=
{
'int'
:
int
,
'float'
:
float
,
'str'
:
str
,
'datetime'
:
RTCDateTime
}
SERIAL_READ_TIMEOUT
=
1.5
# seconds
FIND_PORT_TIMEOUT
=
10
# seconds
...
...
@@ -81,160 +79,148 @@ class Meteorologger:
print
(
"Invalid bytes!"
)
return
result
def
_getCFG
(
self
,
*
args
,
expected_type
=
None
):
r
=
self
.
CFG
for
key
in
args
:
try
:
r
=
r
[
key
]
except
:
raise
Exception
(
"Configuration file '{}' is missing the "
"required key
\n
{}"
.
format
(
self
.
SETTINGS_FILENAME
,
': '
.
join
(
args
)))
if
expected_type
is
not
None
:
try
:
if
expected_type
is
int
and
isinstance
(
r
,
float
):
raise
TypeError
elif
expected_type
is
str
and
not
isinstance
(
r
,
str
):
raise
TypeError
else
:
return
expected_type
(
r
)
except
:
raise
TypeError
(
"Expected {} on key {} but got
\n
{}"
.
format
(
expected_type
,
'/'
.
join
(
args
),
r
))
return
r
def
load_settings
(
self
):
'''
Load the configuration file onto the self.CFG attribute.
Some keys will be tested and filenames will be normalized.
'''
with
open
(
self
.
SETTINGS_FILENAME
)
as
f
:
self
.
CFG
=
yaml
.
safe_load
(
f
)
c
=
Core
(
source_file
=
self
.
SETTINGS_FILENAME
,
schema_files
=
[
self
.
SETTINGS_SCHEMA_FILENAME
]
)
self
.
API_POST_URL
=
self
.
_getCFG
(
'SERVER'
,
'API_POST_URL'
,
expected_type
=
str
)
try
:
self
.
CFG
=
c
.
validate
(
raise_exception
=
True
)
except
Exception
as
e
:
print
(
'-'
*
60
)
print
(
"There is something wrong with the configuration file '{}'"
"
\n
Keep in mind that it uses YAML syntax which require "
"proper identation.
\n
"
.
format
(
self
.
SETTINGS_FILENAME
))
print
(
e
)
print
(
"
\n
Please fix it up or regenerate it."
)
sys
.
exit
(
1
)
self
.
SERIAL_PORTS
=
self
.
CFG
[
'ARDUINO'
][
'SERIAL_PORT'
].
split
(
','
)
self
.
SENSORS_CSV_LINE
=
'read'
+
self
.
CSV_SEP
+
self
.
CSV_SEP
.
join
(
[
d
[
'nickname'
]
for
d
in
self
.
_getCFG
(
'SENSORS'
,
expected_type
=
list
)])
self
.
READING_INTERVAL_SECONDS
=
\
self
.
CFG
[
'LOGGER'
][
'INTERVAL'
][
'seconds'
]
\
+
60
*
self
.
CFG
[
'LOGGER'
][
'INTERVAL'
][
'minutes'
]
\
+
3600
*
self
.
CFG
[
'LOGGER'
][
'INTERVAL'
][
'hours'
]
\
+
86400
*
self
.
CFG
[
'LOGGER'
][
'INTERVAL'
][
'days'
]
self
.
DATALOG_CSV_FILENAME
=
self
.
_getCFG
(
'FILES'
,
'DATALOG_CSV'
,
expected_type
=
str
)
self
.
SERVER_OUTGOING_FILENAME
=
self
.
_getCFG
(
'FILES'
,
'SERVER_OUTGOING_JSON'
,
expected_type
=
str
)
def
create_json
(
self
,
raw_line
):
'''
Given the raw serial line response (CSV string), builds and returns
a JSON dict with validated, server-ready, sensor data.
'''
d
=
{
'datetime'
:
{
'format'
:
self
.
CFG
[
'DATALOG'
][
'DATETIME_FORMAT'
]},
'sensors'
:
{}}
# convert raw str into normal escaped str (e.g., r'\\t' --> '\t')
self
.
DATALOG_CSV_SEP
=
bytes
(
self
.
_getCFG
(
'FILES'
,
'DATALOG_CSV_SEP'
,
expected_type
=
str
),
'utf8'
).
decode
(
'unicode_escape'
)
rtc
=
self
.
CFG
[
'LOGGER'
][
'USE_RTC_DATETIME'
]
using_rtc
=
rtc
and
rtc
in
self
.
CFG
[
'LOGGER'
][
'SENSORS'
]
self
.
SERIAL_PORTS
=
[
p
.
strip
()
for
p
in
self
.
_getCFG
(
'ARDUINO'
,
'SERIAL_PORT'
,
expected_type
=
str
).
split
(
','
)]
if
using_rtc
:
d
[
'datetime'
][
'source'
]
=
rtc
rtc_datetime_fmt
=
self
.
CFG
[
'LOGGER'
][
'RTC_DATETIME_FORMAT'
]
else
:
d
[
'datetime'
][
'source'
]
=
'logger'
d
[
'datetime'
][
'value'
]
=
datetime
.
now
().
strftime
(
d
[
'datetime'
][
'format'
])
self
.
BOARD_RESPONSE_DELAY
=
self
.
_getCFG
(
'ARDUINO'
,
'RESPONSE_DELAY'
,
expected_type
=
int
)
for
i
,
v
in
enumerate
(
raw_line
.
split
(
self
.
SERIAL_CSV_SEP
)):
nickname
=
self
.
CFG
[
'LOGGER'
][
'SENSORS'
][
i
]
type_name
=
self
.
CFG
[
'SENSORS_AVAILABLE'
][
nickname
][
'data_format'
]
if
type_name
==
'datetime'
:
if
using_rtc
:
d
[
'datetime'
][
'value'
]
=
datetime
.
strptime
(
v
,
rtc_datetime_fmt
).
strftime
(
d
[
'datetime'
][
'format'
])
continue
self
.
READING_INTERVAL_SECONDS
=
\
self
.
_getCFG
(
'LOGGER'
,
'READING_INTERVAL'
,
'seconds'
,
expected_type
=
int
)
+
\
60
*
self
.
_getCFG
(
'LOGGER'
,
'READING_INTERVAL'
,
'minutes'
,
expected_type
=
int
)
+
\
3600
*
self
.
_getCFG
(
'LOGGER'
,
'READING_INTERVAL'
,
'hours'
,
expected_type
=
int
)
+
\
86400
*
self
.
_getCFG
(
'LOGGER'
,
'READING_INTERVAL'
,
'days'
,
expected_type
=
int
)
self
.
DATETIME_FORMAT
=
self
.
_getCFG
(
'LOGGER'
,
'DATETIME_FORMAT'
,
expected_type
=
str
)
def
create_json_data
(
self
,
raw_line
):
'''
Given the raw serial line response (expected to be a CSV line), returns
a JSON dict with sensor data including the datetime field.
'''
raw_sensor_data
=
{
'datetime'
:
datetime
.
now
().
strftime
(
self
.
DATETIME_FORMAT
),
'sensors'
:
{}}
for
i
,
v
in
enumerate
(
raw_line
.
split
(
self
.
CSV_SEP
)):
v
=
v
.
strip
()
type_
=
self
.
DATA_FORMATS
[
self
.
CFG
[
'SENSORS'
][
i
][
'data_format'
]]
nickname
=
self
.
CFG
[
'SENSORS'
][
i
][
'nickname'
]
try
:
v
=
type_
(
v
)
v
=
self
.
DATA_FORMATS
[
type_name
](
v
.
strip
()
)
except
:
logging
.
error
(
"
Cannot convert value '{}' read from {} to
{}"
.
format
(
v
,
nickname
,
type_
))
logging
.
error
(
"
[{}]: '{}' is not a valid
{}"
.
format
(
nickname
,
v
,
type_
name
))
continue
raw_sensor_data
[
'sensors'
][
nickname
]
=
v
d
[
'sensors'
][
nickname
]
=
v
logging
.
info
(
"
Resulting
JSON: {}"
.
format
(
raw_sensor_data
))
logging
.
info
(
"
Validated
JSON: {}"
.
format
(
d
))
return
raw_sensor_data
return
d
def
write_data
_
log
(
self
,
json_data
):
def
write_datalog
(
self
,
json_data
):
'''
For backup purposes, write the given JSON data onto the file
DATALOG_CSV as specficied on self.SETTINGS_FILENAME.
'''
csv_line
=
json_data
[
'datetime'
]
+
self
.
DATALOG_CSV_SEP
for
sensor
in
self
.
CFG
[
'SENSORS'
]:
if
sensor
[
'nickname'
]
in
json_data
[
'sensors'
]:
csv_line
+=
str
(
json_data
[
'sensors'
]
[
sensor
[
'nickname'
]])
csv_line
+=
self
.
DATALOG_CSV_SEP
# convert raw str into normal escaped str (e.g., r'\\t' --> '\t')
csv_sep
=
bytes
(
self
.
CFG
[
'DATALOG'
][
'CSV_SEP'
],
'utf8'
).
decode
(
'unicode_escape'
)
csv_line
=
json_data
[
'datetime'
][
'value'
]
+
csv_sep
for
nickname
in
self
.
CFG
[
'LOGGER'
][
'SENSORS'
]:
if
nickname
in
json_data
[
'sensors'
]:
csv_line
+=
str
(
json_data
[
'sensors'
][
nickname
])
csv_line
+=
csv_sep
csv_line
=
csv_line
[:
-
1
]
try
:
with
open
(
self
.
CFG
[
'FILES'
][
'DATALOG_CSV'
],
'a'
)
as
f
:
datalog_filename
=
self
.
CFG
[
'DATALOG'
][
'FILENAME'
]
with
open
(
datalog_filename
,
'a'
)
as
f
:
f
.
write
(
csv_line
+
'
\n
'
)
logging
.
info
(
"Done! CSV line '{}' was appended to the file '{}'"
.
format
(
csv_line
,
self
.
CFG
[
'FILES'
][
'DATALOG_CSV'
]))
except
:
logging
.
error
(
"Unable to write data log at '{}'"
.
format
(
self
.
CFG
[
'FILES'
][
'DATALOG_CSV'
]))
logging
.
info
(
"Updated datalog file: '{}'"
.
format
(
datalog_filename
))
except
Exception
as
e
:
logging
.
error
(
"Unable to write datalog at '{}'"
.
format
(
datalog_filename
))
logging
.
info
(
"Exception: {}"
.
format
(
e
))
def
send_data_to_server
(
self
,
json_data
):
logging
.
info
(
"URL: {}"
.
format
(
self
.
API_POST_URL
))
def
send_to_server
(
self
,
json_data
):
r
=
None
outgoing_filename
=
os
.
path
.
join
(
os
.
path
.
dirname
(
self
.
CFG
[
'DATALOG'
][
'FILENAME'
]),
'outgoing.json'
)
URL
=
self
.
CFG
[
'SERVER'
][
'API_POST_URL'
]
try
:
if
os
.
path
.
exists
(
self
.
CFG
[
'FILES'
][
'SERVER_OUTGOING_JSON'
]
):
logging
.
info
(
"Outgoing data exists!
Trying
to send it
first.
."
)
if
os
.
path
.
exists
(
outgoing_filename
):
logging
.
debug
(
"Outgoing data exists!
Will try
to send it."
)
with
open
(
self
.
CFG
[
'FILES'
][
'SERVER_OUTGOING_JSON'
]
)
as
f
:
with
open
(
outgoing_filename
)
as
f
:
for
i
,
line
in
enumerate
(
f
):
r
=
requests
.
post
(
self
.
API_POST_URL
,
json
=
json
.
loads
(
line
,
encoding
=
'utf-8'
))
d
=
json
.
loads
(
line
,
encoding
=
'utf-8'
)
r
=
requests
.
post
(
URL
,
json
=
d
)
if
r
.
status_code
!=
200
:
raise
Exception
logging
.
info
(
'Line {}: {}'
.
format
(
i
,
r
))
os
.
remove
(
self
.
CFG
[
'FILES'
][
'SERVER_OUTGOING_JSON'
])
logging
.
info
(
"Done! Server data should be up to date."
)
r
=
requests
.
post
(
self
.
API_POST_URL
,
json
=
json_data
)
os
.
remove
(
outgoing_filename
)
logging
.
info
(
"Removed file '{}'"
.
format
(
outgoing_filename
))
r
=
requests
.
post
(
URL
,
json
=
json_data
)
if
r
.
status_code
!=
200
:
raise
Exception
logging
.
info
(
"
Done!
Request: {}"
.
format
(
r
))
logging
.
info
(
"Request: {}"
.
format
(
r
))
except
:
logging
.
error
(
"Unable to reach the server
."
)
logging
.
info
(
"Response request: {}"
.
format
(
r
))
logging
.
error
(
"Unable to reach the server
at '{}'. Request: {}"
.
format
(
URL
,
r
))
logging
.
info
(
"Attempting to write current data on local file '{}'"
.
format
(
self
.
CFG
[
'FILES'
][
'SERVER_OUTGOING_JSON'
]))
with
open
(
self
.
CFG
[
'FILES'
][
'SERVER_OUTGOING_JSON'
],
'a'
)
as
f
:
f
.
write
(
json
.
dumps
(
json_data
)
+
'
\n
'
)
try
:
with
open
(
outgoing_filename
,
'a'
)
as
f
:
f
.
write
(
json
.
dumps
(
json_data
)
+
'
\n
'
)
logging
.
info
(
"Done! Data is saved an will be shipped as soon as "
"the server is back on."
)
return
logging
.
info
(
"Updated outgoing file '{}'"
.
format
(
outgoing_filename
))
except
Exception
as
e
:
logging
.
error
(
"[DATALOST] Unable to write outgoing file '{}'"
.
format
(
outgoing_filename
))
logging
.
info
(
"Exception: {}"
.
format
(
e
))
def
serial_read
_sensors
(
self
,
port_index
=
None
):
def
serial_read
(
self
,
port_index
=
None
):
'''
Sends the 'csv_nickname_list' string to the serial port of index
'port_index' (for self.SERIAL_PORTS) and returns the response line.
...
...
@@ -243,6 +229,8 @@ class Meteorologger:
Example: 'DHT22_TEMP,DHT22_AH,BMP085_PRESSURE,LDR'
'''
result_line
,
ser
=
None
,
None
read_command
=
self
.
SERIAL_CSV_SEP
.
join
(
[
'read'
]
+
self
.
CFG
[
'LOGGER'
][
'SENSORS'
])
try
:
if
isinstance
(
port_index
,
int
):
serial_port
=
self
.
SERIAL_PORTS
[
port_index
]
...
...
@@ -250,46 +238,47 @@ class Meteorologger:
serial_port
=
self
.
SERIAL_PORTS
[
0
]
# if present, the board will be reseted
ser
=
serial
.
Serial
(
serial_port
,
self
.
_getCFG
(
'ARDUINO'
,
'BAUD_RATE'
,
expected_type
=
int
),
timeout
=
self
.
SERIAL_READ_TIMEOUT
,
xonxoff
=
True
)
logging
.
info
(
str
(
ser
))
ser
=
serial
.
Serial
(
serial_port
,
self
.
CFG
[
'ARDUINO'
][
'BAUD_RATE'
],
timeout
=
self
.
SERIAL_READ_TIMEOUT
,
xonxoff
=
True
)
logging
.
info
(
"Serial open <port='{}', baudrate={}>"
.
format
(
ser
.
port
,
ser
.
baudrate
))
time
.
sleep
(
self
.
BOARD_RESET_TIMEOUT
)
ser
.
flush
()
while
bool
(
result_line
)
is
False
:
result
=
ser
.
write
(
bytes
(
self
.
SENSORS_CSV_LINE
,
'utf8'
))
result
=
ser
.
write
(
bytes
(
read_command
,
'utf8'
))
logging
.
info
(
"sent: '{}' ({} bytes)"
.
format
(
self
.
SENSORS_CSV_LINE
,
result
))
read_command
,
result
))
time
.
sleep
(
self
.
BOARD_
RESPONSE_DELAY
)
time
.
sleep
(
self
.
CFG
[
'ARDUINO'
][
'
RESPONSE_DELAY
'
]
)
result_line
=
ser
.
readline
()
logging
.
info
(
"read: {} ({} bytes)"
.
format
(
result_line
,
len
(
result_line
)))
len
(
result_line
)))
result_line
=
self
.
_decode_bytes
(
result_line
)
if
result_line
is
None
:
logging
.
error
(
"Unable to decode line as ASCII."
)
logging
.
info
(
"Trying a new reading.."
)
logging
.
debug
(
"Trying a new reading.."
)
continue
ser
.
close
()
return
result_line
except
KeyboardInterrupt
:
raise
KeyboardInterrupt
except
:
except
Exception
as
e
:
logging
.
error
(
"Unable to open serial port '{}'"
.
format
(
serial_port
))
logging
.
error
(
e
)
finally
:
if
ser
:
ser
.
close
()
logging
.
info
(
"serial closed"
)
logging
.
debug
(
"serial closed"
)
return
None
def
setup_logging
(
self
):
...
...
@@ -298,7 +287,7 @@ class Meteorologger:
# level=logging.DEBUG)
root
=
logging
.
getLogger
(
''
)
root
.
setLevel
(
logging
.
DEBUG
)
root
.
setLevel
(
logging
.
INFO
)
console
=
logging
.
StreamHandler
()
console
.
setFormatter
(
logging
.
Formatter
(
fmt
=
'%(asctime)s : %(levelname)s : %(message)s'
,
...
...
@@ -306,58 +295,60 @@ class Meteorologger:
root
.
addHandler
(
console
)
else
:
logging
.
basicConfig
(
level
=
logging
.
DEBUG
,
level
=
logging
.
INFO
,
filename
=
self
.
EXECUTION_LOG_FILENAME
,
format
=
'%(asctime)s : %(levelname)s : %(message)s'
,
datefmt
=
'%Y-%m-%d %H:%M:%S'
)
def
run
(
self
):
'''
Starts the logger main loop, which iterate over the procedures:
1. Read sensor data via serial port;
2. If successful, save data on
Starts the logger main loop, which keeps reading data from the serial
port and trying to send it to the server.
Basically, the loop consists of the following steps:
1. serial_read() # send a string, recieves a string
2. create_json() # validate data and make it server-ready
3. write_datalog() # write current data on local file for backup
4. send_to_server() # try to send; if fails, save data for later
'''
self
.
setup_logging
()
logging
.
info
(
'='
*
40
)
logging
.
info
(
'EXECUTION START'
)
port_index
=
0
try
:
while
True
:
logging
.
info
(
'='
*
40
)
logging
.
info
(
'Attempting to read from serial'
)
csv_result
=
self
.
serial_read_sensors
(
port_index
)
logging
.
debug
(
'Attempting to read from serial'
)
if
csv_result
is
not
None
:
logging
.
info
(
"csv_result: '{}'"
.
format
(
csv_result
))
csv_result
=
self
.
serial_read
(
port_index
)
logging
.
info
(
'-'
*
40
)
logging
.
info
(
'Attempting create valid JSON data'
)
json_data
=
self
.
create_json_data
(
csv_result
)
if
csv_result
is
not
None
:
logging
.
debug
(
'-'
*
40
)
logging
.
debug
(
'Attempting create valid JSON data'
)
json_data
=
self
.
create_json
(
csv_result
)
logging
.
info
(
'-'
*
40
)
logging
.
info
(
'Attempting to write local data log'
)
self
.
write_data
_
log
(
json_data
)
logging
.
debug
(
'-'
*
40
)
logging
.
debug
(
'Attempting to write local data log'
)
self
.
write_datalog
(
json_data
)
logging
.
info
(
'-'
*
40
)
logging
.
info
(
'Attempting to send data to the server'
)
self
.
send_
data_
to_server
(
json_data
)
logging
.
debug
(
'-'
*
40
)
logging
.
debug
(
'Attempting to send data to the server'
)
self
.
send_to_server
(
json_data
)
else
:
if
port_index
<
len
(
self
.
SERIAL_PORTS
)
-
1
:
port_index
+=
1
else
:
port_index
=
0
logging
.
info
(
"Trying another port in about {} seconds.."
logging
.
debug
(
"Trying another port in about {} seconds.."
.
format
(
self
.
FIND_PORT_TIMEOUT
))
time
.
sleep
(
self
.
FIND_PORT_TIMEOUT
)
continue
logging
.
info
(
'-'
*
40
)
logging
.
debug
(
'-'
*
40
)
logging
.
info
(
"Going to sleep now for {days} days, {hours} "
"hours, {minutes} minutes and {seconds} seconds.."
.
format
(
**
self
.
CFG
[
'LOGGER'
][
'
READING_
INTERVAL'
]))
.
format
(
**
self
.
CFG
[
'LOGGER'
][
'INTERVAL'
]))
time
.
sleep
(
self
.
READING_INTERVAL_SECONDS
)
...
...
@@ -365,6 +356,3 @@ class Meteorologger:
logging
.
info
(
'KeyboardInterrupt: EXECUTION FINISHED'
)
pass
if
__name__
==
'__main__'
:
Meteorologger
(
verbose
=
False
if
'-s'
in
sys
.
argv
else
True
).
run
()
logger/requirements.pip
→
logger/
app/
requirements.pip
View file @
5da41416
docopt==0.6.2
ipython==3.1.0
Jinja2==2.7.3
MarkupSafe==0.23
pykwalify==1.2.0
pyreadline==2.0
pyserial==2.7
python-dateutil==2.4.2
PyYAML==3.11
requests==2.6.0
six==1.9.0
logger/app/settings_schema.yaml
0 → 100644
View file @
5da41416
type
:
map
mapping
:
SERVER
:
required
:
true
type
:
map
mapping
:
API_POST_URL
:
required
:
true
type
:
str
LOGGER
:
required
:
true
type
:
map
mapping
:
SENSORS
:
required
:
true
type
:
seq
sequence
:
-
type
:
str
unique
:
true
INTERVAL
:
# default: {days: 0, hours: 0, minutes: 0, seconds: 10}
required
:
true
type
:
map
mapping
:
days
:
required
:
true
type
:
int
range
:
min
:
0
hours
:
required
:
true
type
:
int
range
:
min
:
0
minutes
:
required
:
true
type
:
int
range
:
min
:
0
seconds
:
required
:
true