Merge branch 'dev' into 'master'
Version 0.3 See merge request s3lph/icalendar-timeseries-server!3
This commit is contained in:
commit
3574e04d6c
7 changed files with 68 additions and 63 deletions
14
CHANGELOG.md
14
CHANGELOG.md
|
@ -1,6 +1,20 @@
|
||||||
# iCalendar Timeseries Server Changelog
|
# iCalendar Timeseries Server Changelog
|
||||||
|
|
||||||
|
|
||||||
|
<!-- BEGIN RELEASE v0.3 -->
|
||||||
|
## Version 0.3
|
||||||
|
|
||||||
|
### Changes
|
||||||
|
|
||||||
|
<!-- BEGIN CHANGES 0.3 -->
|
||||||
|
- Replace print statements by proper logging
|
||||||
|
- Fix: Ensure scrape interval is positive
|
||||||
|
- Fix: Keep showing events that already started, but have not finished yet
|
||||||
|
<!-- END CHANGES 0.3 -->
|
||||||
|
|
||||||
|
<!-- END RELEASE v0.3 -->
|
||||||
|
|
||||||
|
|
||||||
<!-- BEGIN RELEASE v0.2 -->
|
<!-- BEGIN RELEASE v0.2 -->
|
||||||
## Version 0.2
|
## Version 0.2
|
||||||
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
{
|
|
||||||
"addr": "127.0.0.1",
|
|
||||||
"port": 8090,
|
|
||||||
"start_delta": "-PT3H",
|
|
||||||
"end_delta": "P60D",
|
|
||||||
"cache": "PT3M",
|
|
||||||
"tz": "Europe/Zurich",
|
|
||||||
"calendars": {
|
|
||||||
"tlstest": {
|
|
||||||
"interval": "PT5M",
|
|
||||||
"url": "https://localhost/private.ics",
|
|
||||||
"ca": "/home/sebastian/tlstest/ca/ca/ca.crt",
|
|
||||||
"auth": {
|
|
||||||
"type": "tls",
|
|
||||||
"keyfile": "/home/sebastian/tlstest/client/combined.pem"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"filetest": {
|
|
||||||
"interval": "PT1M",
|
|
||||||
"url": "file:///srv/http/private.ics"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"key_replace": {
|
|
||||||
"summary": "a_summary",
|
|
||||||
"description": "b_description"
|
|
||||||
},
|
|
||||||
"value_replace": {
|
|
||||||
"summary": "{{ summary|truncate(100, end=' \\N{HORIZONTAL ELLIPSIS}') }}",
|
|
||||||
"description": "{{ description|truncate(100, end=' \\N{HORIZONTAL ELLIPSIS}') }}",
|
|
||||||
"useless_metric": "{{ start.timestamp() + end.timestamp() }}"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,8 +1,7 @@
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from urllib.error import HTTPError
|
import logging
|
||||||
import traceback
|
|
||||||
|
|
||||||
import bottle
|
import bottle
|
||||||
|
|
||||||
|
@ -26,7 +25,7 @@ def prometheus_api():
|
||||||
'error': str(e)
|
'error': str(e)
|
||||||
}
|
}
|
||||||
bottle.response.status = 400
|
bottle.response.status = 400
|
||||||
traceback.print_exc()
|
logging.exception('Cannot parse PromQL query')
|
||||||
bottle.response.add_header('Content-Type', 'application/json')
|
bottle.response.add_header('Content-Type', 'application/json')
|
||||||
return json.dumps(response)
|
return json.dumps(response)
|
||||||
|
|
||||||
|
@ -42,14 +41,6 @@ def prometheus_api():
|
||||||
'result': [e.serialize() for e in events]
|
'result': [e.serialize() for e in events]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
except HTTPError as e:
|
|
||||||
response = {
|
|
||||||
'status': 'error',
|
|
||||||
'errorType': 'internal',
|
|
||||||
'error': str(e)
|
|
||||||
}
|
|
||||||
bottle.response.status = 500
|
|
||||||
traceback.print_exc()
|
|
||||||
except BaseException:
|
except BaseException:
|
||||||
response = {
|
response = {
|
||||||
'status': 'error',
|
'status': 'error',
|
||||||
|
@ -57,7 +48,7 @@ def prometheus_api():
|
||||||
'error': 'An internal error occurred.'
|
'error': 'An internal error occurred.'
|
||||||
}
|
}
|
||||||
bottle.response.status = 500
|
bottle.response.status = 500
|
||||||
traceback.print_exc()
|
logging.exception('An internal error occurred')
|
||||||
|
|
||||||
bottle.response.add_header('Content-Type', 'application/json')
|
bottle.response.add_header('Content-Type', 'application/json')
|
||||||
return json.dumps(response)
|
return json.dumps(response)
|
||||||
|
|
|
@ -2,6 +2,7 @@ from typing import Dict, List, Iterable
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
import logging
|
||||||
from datetime import datetime, date, timedelta
|
from datetime import datetime, date, timedelta
|
||||||
from threading import Lock, Timer
|
from threading import Lock, Timer
|
||||||
|
|
||||||
|
@ -54,19 +55,24 @@ def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: da
|
||||||
events = []
|
events = []
|
||||||
|
|
||||||
opener: urllib.request.OpenerDirector = config.get_url_opener()
|
opener: urllib.request.OpenerDirector = config.get_url_opener()
|
||||||
|
try:
|
||||||
with opener.open(config.url) as response:
|
with opener.open(config.url) as response:
|
||||||
data = response.read().decode('utf-8')
|
data = response.read().decode('utf-8')
|
||||||
|
except BaseException:
|
||||||
|
logging.exception(f'An error occurred while scraping the calendar endpoint "{name}" ({config.url})')
|
||||||
|
return
|
||||||
calendar = cal.Calendar.from_ical(data)
|
calendar = cal.Calendar.from_ical(data)
|
||||||
|
|
||||||
for element in calendar.walk():
|
for element in calendar.walk():
|
||||||
if element.name == "VEVENT":
|
if element.name == "VEVENT":
|
||||||
dtstart = element.get('dtstart').dt
|
dtstart = element.get('dtstart').dt
|
||||||
if isinstance(dtstart, date):
|
# Apparently datetime is a subclass of date...
|
||||||
|
if isinstance(dtstart, date) and not isinstance(dtstart, datetime):
|
||||||
dtstart = datetime(dtstart.year, dtstart.month, dtstart.day, tzinfo=start.tzinfo)
|
dtstart = datetime(dtstart.year, dtstart.month, dtstart.day, tzinfo=start.tzinfo)
|
||||||
# Process either end timestamp or duration, if present
|
# Process either end timestamp or duration, if present
|
||||||
if 'dtend' in element:
|
if 'dtend' in element:
|
||||||
evend = element.get('dtend').dt
|
evend = element.get('dtend').dt
|
||||||
if isinstance(evend, date):
|
if isinstance(evend, date) and not isinstance(evend, datetime):
|
||||||
evend = datetime(evend.year, evend.month, evend.day, tzinfo=start.tzinfo)
|
evend = datetime(evend.year, evend.month, evend.day, tzinfo=start.tzinfo)
|
||||||
duration = evend - dtstart
|
duration = evend - dtstart
|
||||||
elif 'duration' in element:
|
elif 'duration' in element:
|
||||||
|
@ -78,7 +84,7 @@ def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: da
|
||||||
else:
|
else:
|
||||||
occurences = [dtstart]
|
occurences = [dtstart]
|
||||||
for occurence in occurences:
|
for occurence in occurences:
|
||||||
if start <= occurence < end:
|
if start <= occurence + duration and occurence < end:
|
||||||
events.append(Event(name, element, occurence, occurence + duration))
|
events.append(Event(name, element, occurence, occurence + duration))
|
||||||
with _SCRAPE_CACHE_LOCK:
|
with _SCRAPE_CACHE_LOCK:
|
||||||
_SCRAPE_CACHE[name] = events
|
_SCRAPE_CACHE[name] = events
|
||||||
|
|
|
@ -6,6 +6,7 @@ from datetime import timedelta
|
||||||
import ssl
|
import ssl
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
import jinja2
|
import jinja2
|
||||||
|
@ -27,7 +28,8 @@ class CalendarConfig:
|
||||||
|
|
||||||
def __init__(self, config: Dict[str, Any], config_path: str) -> None:
|
def __init__(self, config: Dict[str, Any], config_path: str) -> None:
|
||||||
self._url: str = _keycheck('url', config, str, config_path)
|
self._url: str = _keycheck('url', config, str, config_path)
|
||||||
self._scrape_interval: Duration = _parse_timedelta('interval', config, config_path, default_value='PT15M')
|
self._scrape_interval: Duration = _parse_timedelta('interval', config, config_path, default_value='PT15M',
|
||||||
|
force_positive=True)
|
||||||
self._ca: Optional[str] = _keycheck('ca', config, str, config_path, optional=True)
|
self._ca: Optional[str] = _keycheck('ca', config, str, config_path, optional=True)
|
||||||
auth: Dict[str, Any] = _keycheck('auth', config, dict, config_path, default_value={'type': 'none'})
|
auth: Dict[str, Any] = _keycheck('auth', config, dict, config_path, default_value={'type': 'none'})
|
||||||
self._authtype: str = _keycheck('type', auth, str, f'{config_path}.auth',
|
self._authtype: str = _keycheck('type', auth, str, f'{config_path}.auth',
|
||||||
|
@ -155,7 +157,7 @@ def _keycheck(key: str,
|
||||||
raise KeyError(f'Expected member "{key}" not found at path {path}')
|
raise KeyError(f'Expected member "{key}" not found at path {path}')
|
||||||
value: Any = config[key]
|
value: Any = config[key]
|
||||||
if not isinstance(value, typ):
|
if not isinstance(value, typ):
|
||||||
raise TypeError(f'Expected {typ}, not {type(value).__name__} for path {path}.{key}')
|
raise TypeError(f'Expected {typ.__name__}, not {type(value).__name__} for path {path}.{key}')
|
||||||
if valid_values is not None:
|
if valid_values is not None:
|
||||||
if value not in valid_values:
|
if value not in valid_values:
|
||||||
raise ValueError(f'Expected one of {", ".join(valid_values)} ({typ}), not {value} for path {path}.{key}')
|
raise ValueError(f'Expected one of {", ".join(valid_values)} ({typ}), not {value} for path {path}.{key}')
|
||||||
|
@ -216,10 +218,17 @@ def get_jenv() -> jinja2.Environment:
|
||||||
|
|
||||||
def load_config(filename: str):
|
def load_config(filename: str):
|
||||||
global CONFIG, JENV
|
global CONFIG, JENV
|
||||||
|
try:
|
||||||
with open(filename, 'r') as f:
|
with open(filename, 'r') as f:
|
||||||
json_config = json.loads(f.read())
|
json_config = json.loads(f.read())
|
||||||
CONFIG = Config(json_config)
|
CONFIG = Config(json_config)
|
||||||
JENV = jinja2.Environment()
|
JENV = jinja2.Environment()
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logging.exception('Cannot parse config JSON')
|
||||||
|
raise e
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
def load_default_config():
|
def load_default_config():
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
import bottle
|
import bottle
|
||||||
|
|
||||||
|
@ -11,17 +12,32 @@ from icalendar_timeseries_server.api import prometheus_api
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
# Set up logger
|
||||||
|
log_handler = logging.StreamHandler()
|
||||||
|
log_handler.setFormatter(logging.Formatter(
|
||||||
|
'%(asctime)s %(filename)s:%(lineno)d(%(funcName)s) [%(levelname)s]: %(message)s'))
|
||||||
|
logging.getLogger().addHandler(log_handler)
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
config = get_config()
|
||||||
|
try:
|
||||||
if len(sys.argv) == 1:
|
if len(sys.argv) == 1:
|
||||||
load_default_config()
|
load_default_config()
|
||||||
elif len(sys.argv) == 2:
|
elif len(sys.argv) == 2:
|
||||||
load_config(sys.argv[1])
|
load_config(sys.argv[1])
|
||||||
else:
|
else:
|
||||||
print(f'Can only read one config file, got "{" ".join(sys.argv[1:])}"')
|
logging.log(logging.FATAL, f'Can only read one config file, got "{" ".join(sys.argv[1:])}"')
|
||||||
exit(1)
|
exit(1)
|
||||||
|
# Re-fetch config after parsing
|
||||||
config = get_config()
|
config = get_config()
|
||||||
|
except BaseException:
|
||||||
|
logging.fatal('Could not parse configuration file')
|
||||||
|
exit(1)
|
||||||
|
|
||||||
# Schedule calendar scraping in the background
|
# Schedule calendar scraping in the background
|
||||||
for calname in config.calendars.keys():
|
for calname in config.calendars.keys():
|
||||||
start_scrape_calendar(calname, config.calendars[calname])
|
start_scrape_calendar(calname, config.calendars[calname])
|
||||||
|
|
||||||
# Start the Bottle HTTP server
|
# Start the Bottle HTTP server
|
||||||
bottle.run(host=config.addr, port=get_config().port)
|
bottle.run(host=config.addr, port=get_config().port)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import logging
|
||||||
|
|
||||||
LABEL_MATCH_OPERATORS = [
|
LABEL_MATCH_OPERATORS = [
|
||||||
'=',
|
'=',
|
||||||
|
@ -64,7 +65,7 @@ class MetricQuery:
|
||||||
self.__parse(q)
|
self.__parse(q)
|
||||||
|
|
||||||
def __parse(self, q: str):
|
def __parse(self, q: str):
|
||||||
print(q)
|
logging.debug(f'Parsing PromQL query string: {q}')
|
||||||
# globalstate:
|
# globalstate:
|
||||||
# 0 = parsing metric name
|
# 0 = parsing metric name
|
||||||
# 1 = parsing filters
|
# 1 = parsing filters
|
||||||
|
|
Loading…
Reference in a new issue