Compare commits

..

No commits in common. "main" and "v0.1" have entirely different histories.
main ... v0.1

21 changed files with 385 additions and 515 deletions

View file

@ -1,38 +0,0 @@
---
on:
push:
tags:
- "v*"
jobs:
build_wheel:
runs-on: docker
steps:
- uses: https://code.forgejo.org/actions/checkout@v4
- name: Build Python wheel
run: |
apt update; apt install -y python3-pip
pip3 install --break-system-packages -e .[test]
python3 setup.py egg_info bdist_wheel
- uses: https://git.kabelsalat.ch/s3lph/forgejo-action-wheel-package-upload@v3
with:
username: ${{ secrets.API_USERNAME }}
password: ${{ secrets.API_PASSWORD }}
build_debian:
runs-on: docker
steps:
- uses: https://code.forgejo.org/actions/checkout@v4
- uses: https://git.kabelsalat.ch/s3lph/forgejo-action-python-debian-package@v5
with:
python_module: icalendar_timeseries_server
package_name: icalendar-timeseries-server
package_root: package/debian/icalendar-timeseries-server
package_output_path: package/debian
- uses: https://git.kabelsalat.ch/s3lph/forgejo-action-debian-package-upload@v2
with:
username: ${{ secrets.API_USERNAME }}
password: ${{ secrets.API_PASSWORD }}
deb: "package/debian/*.deb"

View file

@ -1,27 +0,0 @@
---
on: push
jobs:
test:
runs-on: docker
steps:
- uses: https://code.forgejo.org/actions/checkout@v4
- name: test
run: |
apt update; apt install --yes python3-pip
pip3 install --break-system-packages -e .[test]
python3 -m coverage run --rcfile=setup.cfg -m unittest discover icalendar_timeseries_server
python3 -m coverage combine
python3 -m coverage report --rcfile=setup.cfg
codestyle:
runs-on: docker
steps:
- uses: https://code.forgejo.org/actions/checkout@v4
- name: codestyle
run: |
apt update; apt install --yes python3-pip
pip3 install --break-system-packages -e .[test]
pycodestyle icalendar_timeseries_server

91
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,91 @@
---
image: s3lph/icalendar-timeseries-server-ci:20190820-01
stages:
- test
- build
- release
before_script:
- export ITS_VERSION=$(python -c 'import icalendar_timeseries_server; print(icalendar_timeseries_server.__version__)')
test:
stage: test
script:
- pip3 install -e .
- sudo -u its python3 -m coverage run --rcfile=setup.cfg -m unittest discover icalendar_timeseries_server
- sudo -u its python3 -m coverage combine
- sudo -u its python3 -m coverage report --rcfile=setup.cfg
codestyle:
stage: test
script:
- pip3 install -e .
- sudo -u its pycodestyle icalendar_timeseries_server
build_wheel:
stage: build
script:
- pip3 install -e .
- python3 setup.py egg_info bdist_wheel
- cd dist
- sha256sum *.whl > SHA256SUMS
artifacts:
paths:
- "dist/*.whl"
- dist/SHA256SUMS
only:
- tags
build_debian:
stage: build
script:
# The Python package name provided by the python3-magic Debian package is "python-magic" rather than "file-magic".
- sed -re 's/file-magic/python-magic/' -i setup.py
- echo -n > package/debian/icalendar-timeseries-server/usr/share/doc/icalendar-timeseries-server/changelog
- |
for version in "$(cat CHANGELOG.md | grep '<!-- BEGIN CHANGES' | cut -d ' ' -f 4)"; do
echo "icalendar-timeseries-server (${version}-1); urgency=medium\n" >> package/debian/icalendar-timeseries-server/usr/share/doc/icalendar-timeseries-server/changelog
cat CHANGELOG.md | grep -A 1000 "<"'!'"-- BEGIN CHANGES ${version} -->" | grep -B 1000 "<"'!'"-- END CHANGES ${version} -->" | tail -n +2 | head -n -1 | sed -re 's/^-/ */g' >> package/debian/icalendar-timeseries-server/usr/share/doc/icalendar-timeseries-server/changelog
echo "\n -- ${PACKAGE_AUTHOR} $(date -R)\n" >> package/debian/icalendar-timeseries-server/usr/share/doc/icalendar-timeseries-server/changelog
done
- gzip -9n package/debian/icalendar-timeseries-server/usr/share/doc/icalendar-timeseries-server/changelog
- python3.7 setup.py egg_info install --root=package/debian/icalendar-timeseries-server/ --prefix=/usr --optimize=1
- cd package/debian
- mkdir -p icalendar-timeseries-server/usr/lib/python3/dist-packages/
- rsync -a icalendar-timeseries-server/usr/lib/python3.7/site-packages/ icalendar-timeseries-server/usr/lib/python3/dist-packages/
- rm -rf icalendar-timeseries-server/usr/lib/python3.7/
- find icalendar-timeseries-server/usr/lib/python3/dist-packages -name __pycache__ -exec rm -r {} \; 2>/dev/null || true
- find icalendar-timeseries-server/usr/lib/python3/dist-packages -name '*.pyc' -exec rm {} \;
- mv icalendar-timeseries-server/usr/bin/icalendar-timeseries-server icalendar-timeseries-server/usr/lib/icalendar-timeseries-server/icalendar-timeseries-server
- rm -rf icalendar-timeseries-server/usr/bin
- sed -re 's$#!/usr/local/bin/python3.7$#!/usr/bin/python3$' -i icalendar-timeseries-server/usr/lib/icalendar-timeseries-server/icalendar-timeseries-server
- find icalendar-timeseries-server -type f -exec chmod 0644 {} \;
- find icalendar-timeseries-server -type d -exec chmod 755 {} \;
- find icalendar-timeseries-server -type f -name .gitkeep -delete
- chmod +x icalendar-timeseries-server/usr/lib/icalendar-timeseries-server/icalendar-timeseries-server icalendar-timeseries-server/DEBIAN/postinst icalendar-timeseries-server/DEBIAN/prerm icalendar-timeseries-server/DEBIAN/postrm
- dpkg-deb --build icalendar-timeseries-server
- mv icalendar-timeseries-server.deb "icalendar-timeseries-server_${ITS_VERSION}-1_all.deb"
- sudo -u nobody lintian "icalendar-timeseries-server_${ITS_VERSION}-1_all.deb"
- sha256sum *.deb > SHA256SUMS
artifacts:
paths:
- "package/debian/*.deb"
- package/debian/SHA256SUMS
only:
- tags
release:
stage: release
script:
- python package/release.py
only:
- tags

View file

View file

@ -1,156 +1,5 @@
# iCalendar Timeseries Server Changelog
<!-- BEGIN RELEASE v0.6.3 -->
## Version 0.6.3
### Changes
<!-- BEGIN CHANGES 0.6.3 -->
- Migration from Woodpecker to Forgejo Actions.
<!-- END CHANGES 0.6.3 -->
<!-- END RELEASE v0.6.3 -->
<!-- BEGIN RELEASE v0.6.2 -->
## Version 0.6.2
### Changes
<!-- BEGIN CHANGES 0.6.2 -->
- Migration from Gitlab-CI to Woodpecker
<!-- END CHANGES 0.6.2 -->
<!-- END RELEASE v0.6.2 -->
<!-- BEGIN RELEASE v0.6.1 -->
## Version 0.6.1
### Changes
<!-- BEGIN CHANGES 0.6.1 -->
- Same fix, but for todo as well as events.
<!-- END CHANGES 0.6.1 -->
<!-- END RELEASE v0.6.1 -->
<!-- BEGIN RELEASE v0.6 -->
## Version 0.6
### Changes
<!-- BEGIN CHANGES 0.6 -->
- Fix: A specific API field has to be a string rather than float, and recent Grafana versions validate this.
<!-- END CHANGES 0.6 -->
<!-- END RELEASE v0.6 -->
<!-- BEGIN RELEASE v0.5 -->
## Version 0.5
### Changes
<!-- BEGIN CHANGES 0.5 -->
- Retry calendar scraping with exponential backoff.
<!-- END CHANGES 0.5 -->
<!-- END RELEASE v0.5 -->
<!-- BEGIN RELEASE v0.4.1 -->
## Version 0.4.1
### Changes
<!-- BEGIN CHANGES 0.4.1 -->
- Fix todo sorting by due date.
- Update README regarding `todo` time series.
<!-- END CHANGES 0.4.1 -->
<!-- END RELEASE v0.4.1 -->
<!-- BEGIN RELEASE v0.4.0 -->
## Version 0.4.0
### Changes
<!-- BEGIN CHANGES 0.4.0 -->
- VTODO components are exported in a second time series, `todo` . Todo recurrence is not supported yet though.
<!-- END CHANGES 0.4.0 -->
<!-- END RELEASE v0.4.0 -->
<!-- BEGIN RELEASE v0.3.3 -->
## Version 0.3.3
### Changes
<!-- BEGIN CHANGES 0.3.3 -->
- Fix type confusion bug in recurring events
- Remove pytz dependency in favor of dateutil.tz
<!-- END CHANGES 0.3.3 -->
<!-- END RELEASE v0.3.3 -->
<!-- BEGIN RELEASE v0.3.2 -->
## Version 0.3.2
### Changes
<!-- BEGIN CHANGES 0.3.2 -->
- Fix Debian package build process
<!-- END CHANGES 0.3.2 -->
<!-- END RELEASE v0.3.2 -->
<!-- BEGIN RELEASE v0.3.1 -->
## Version 0.3.1
### Changes
<!-- BEGIN CHANGES 0.3.1 -->
- Bump Version Number
<!-- END CHANGES 0.3.1 -->
<!-- END RELEASE v0.3.1 -->
<!-- BEGIN RELEASE v0.3 -->
## Version 0.3
### Changes
<!-- BEGIN CHANGES 0.3 -->
- Replace print statements by proper logging
- Fix: Ensure scrape interval is positive
- Fix: Keep showing events that already started, but have not finished yet
<!-- END CHANGES 0.3 -->
<!-- END RELEASE v0.3 -->
<!-- BEGIN RELEASE v0.2 -->
## Version 0.2
### Changes
<!-- BEGIN CHANGES 0.2 -->
- Scrape intervals are now configured per calendar
- Calendar scraping now happens in the background
<!-- END CHANGES 0.2 -->
<!-- END RELEASE v0.2 -->
<!-- BEGIN RELEASE v0.1 -->
## Version 0.1

View file

@ -4,8 +4,8 @@ This project is a small service that scrapes iCalendar files served
over HTTP, parses their contents and returns the data in a timeseries
format compatible to the `/api/v1/query` API endpoint of a Prometheus
server. This allows e.g. a Grafana administrator to add a Prometheus
data source pointing at this server, returning calendar events in the
`event` metric and todos in the `todo` metric.
data source pointing at this server, returning the events in the
calendars in the `event` metric.
## Example
@ -78,6 +78,7 @@ The server would transform this into the following API response:
- `icalendar`: Parse iCalendar
- `isodate`: Parse ISO-8601 time periods
- `jinja2`: Template value replacements
- `pytz`: Work with timezones
## Configuration
@ -91,6 +92,7 @@ Configuration is done through a JSON config file:
"port": 8090,
"start_delta": "-PT3H",
"end_delta": "P30D",
"cache": "PT15M",
"tz": "Europe/Zurich",
"calendars": {
"private": {
@ -102,7 +104,6 @@ Configuration is done through a JSON config file:
}
},
"public": {
"interval": "P1D",
"url": "https://example.cloud/dav/me/public.ics"
},
"confidential": {
@ -135,11 +136,11 @@ Configuration is done through a JSON config file:
| `port` | int | The port to listen on. |
| `start_delta` | string | A signed ISO 8601 duration string, describing the event range start offset relative to the current time. |
| `end_delta` | string | An unsigned ISO 8601 duration string, describing the event range end offset relative to the current time. |
| `cache` | string | An unsigned ISO 8601 duration string, describing the cache timeout duration. |
| `tz` | string | The local timezone. |
| `calendars` | dict | The calendars to scrape. |
| `keys(calendars)` | string | Name of the calendar. |
| `calendars.*.url` | string | The HTTP or HTTPS URL to scrape. |
| `calendars.*.interval` | string | An unsigned ISO 8601 duration string, describing the scrape interval for this calendar. |
| `calendars.*.ca` | string | Path to the CA certificate file to validate the server's TLS certificate against, in PEM format (optional). |
| `calendars.*.auth` | dict | Authorization config for the calendar. |
| `calendars.*.auth[].type` | string | Authorization type, one of `none` (no authorization), `basic` (HTTP Basic Authentication), `tls` (TLS client certificate). |
@ -147,7 +148,7 @@ Configuration is done through a JSON config file:
| `calendars.*.auth[?type=='basic'].password` | string | The Basic Auth password to authenticate with. |
| `calendars.*.auth[?type=='tls'].keyfile` | string | Path to the key file containing the TLS private key, client certificate and certificate chain, in PEM format. |
| `calendars.*.auth[?type=='tls'].passphrase` | string | Passphrase for the private key (optional). |
| `key_replace` | dict | Labels to rename, might be necessary e.g. for column ordering in Grafana 6 and earlier. |
| `key_replace` | dict | Labels to rename, might be necessary e.g. for column ordering in Grafana. |
| `keys(key_replace)` | string | The labels to rename. |
| `key_replace.*` | string | The names to rename the labels to. |
| `value_replace` | dict | Label values to postprocess. |
@ -168,12 +169,6 @@ In addition, PromQL label filters can be used.
event{calendar="public",foo=~".*"}
```
Alongside with events, todos are exported in a second time series:
```
todo{status!="COMPLETED"}
```
## Why Prometheus API
- It's JSON. A JSON generator is builtin in Python, so no further dependency.

View file

@ -0,0 +1,29 @@
{
"addr": "127.0.0.1",
"port": 8090,
"start_delta": "-PT3H",
"end_delta": "P60D",
"cache": "PT3M",
"tz": "Europe/Zurich",
"calendars": {
"tlstest": {
"url": "https://localhost/private.ics",
"ca": "/home/sebastian/tlstest/ca/ca/ca.crt",
"auth": {
"type": "tls",
"keyfile": "/home/sebastian/tlstest/client/combined.pem"
}
}
},
"key_replace": {
"summary": "a_summary",
"description": "b_description",
"calendar": "c_calendar"
},
"value_replace": {
"summary": "{{ summary|truncate(100, end=' \\N{HORIZONTAL ELLIPSIS}') }}",
"description": "{{ description|truncate(100, end=' \\N{HORIZONTAL ELLIPSIS}') }}",
"calendar": "{{ 0 if calendar == 'private' else 1 }}",
"useless_metric": "{{ start.timestamp() + end.timestamp() }}"
}
}

View file

@ -1,2 +1,2 @@
__version__ = '0.6.3'
__version__ = '0.1'

View file

@ -1,20 +1,29 @@
from typing import List
import json
import logging
from datetime import datetime
from urllib.error import HTTPError
import traceback
import bottle
from isodate import Duration
from icalendar_timeseries_server.config import get_config
from icalendar_timeseries_server.event import Metric
from icalendar_timeseries_server.cal import get_calendar_events, get_calendar_todos
from icalendar_timeseries_server.event import Event
from icalendar_timeseries_server.cal import scrape_calendar
from icalendar_timeseries_server.query import MetricQuery
@bottle.route('/api/v1/query')
@bottle.route('/api/v1/query_range')
def prometheus_api():
events: List[Metric] = []
tz = get_config().tz
now: datetime = datetime.now(tz)
start_delta: Duration = get_config().start_delta
end_delta: Duration = get_config().end_delta
start: datetime = now + start_delta
end: datetime = now + end_delta
events: List[Event] = []
try:
q = MetricQuery(bottle.request.query['query'])
@ -25,21 +34,15 @@ def prometheus_api():
'error': str(e)
}
bottle.response.status = 400
logging.exception('Cannot parse PromQL query')
traceback.print_exc()
bottle.response.add_header('Content-Type', 'application/json')
return json.dumps(response)
try:
for name in get_config().calendars.keys():
if q.name == 'event':
events.extend(get_calendar_events(name))
events = list(filter(q, events))
events.sort(key=lambda e: e.start)
elif q.name == 'todo':
events.extend(get_calendar_todos(name))
events = list(filter(q, events))
# Sort by due date and priority
events.sort(key=lambda e: (e.due is None, e.due, e.priority))
for name, caldef in get_config().calendars.items():
events.extend(scrape_calendar(name, caldef, start, end))
events = list(filter(q, events))
events.sort(key=lambda e: e.start)
response = {
'status': 'success',
'data': {
@ -47,6 +50,14 @@ def prometheus_api():
'result': [e.serialize() for e in events]
}
}
except HTTPError as e:
response = {
'status': 'error',
'errorType': 'internal',
'error': str(e)
}
bottle.response.status = 500
traceback.print_exc()
except BaseException:
response = {
'status': 'error',
@ -54,7 +65,7 @@ def prometheus_api():
'error': 'An internal error occurred.'
}
bottle.response.status = 500
logging.exception('An internal error occurred')
traceback.print_exc()
bottle.response.add_header('Content-Type', 'application/json')
return json.dumps(response)

View file

@ -1,24 +1,18 @@
from typing import Dict, List, Iterable
from typing import Dict, List, Iterable, Tuple
import sys
import urllib.request
import logging
from datetime import datetime, date, timedelta
from threading import Lock, Timer
from dateutil import rrule
from icalendar import cal
from isodate import Duration
from icalendar_timeseries_server import __version__
from icalendar_timeseries_server.config import get_config, CalendarConfig
from icalendar_timeseries_server.event import Event
from icalendar_timeseries_server.todo import Todo
_EVENT_SCRAPE_CACHE: Dict[str, List[Event]] = dict()
_TODO_SCRAPE_CACHE: Dict[str, List[Todo]] = dict()
_SCRAPE_CACHE_LOCK: Lock = Lock()
_SCRAPE_CACHE: Dict[str, Tuple[datetime, List[Event]]] = dict()
__py_version: str = f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}'
USER_AGENT: str = f'icalendar-timeseries-server/{__version__} (Python/{__py_version})'
@ -28,8 +22,6 @@ def _parse_recurring(event: cal.Event, start: datetime, end: datetime, duration:
occurences: List[datetime] = []
evstart = event.get('dtstart').dt
if isinstance(evstart, date) and not isinstance(evstart, datetime):
evstart = datetime(evstart.year, evstart.month, evstart.day, tzinfo=start.tzinfo)
# First occurence lies in the future; no need to process further
if evstart >= end:
return occurences
@ -54,26 +46,17 @@ def _parse_recurring(event: cal.Event, start: datetime, end: datetime, duration:
return occurences
def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: datetime):
global _EVENT_SCRAPE_CACHE, _TODO_SCRAPE_CACHE, _SCRAPE_CACHE_LOCK
def _parse_calendar(name: str, calendar: cal.Calendar, start: datetime, end: datetime) -> List[Event]:
events = []
todos = []
opener: urllib.request.OpenerDirector = config.get_url_opener()
with opener.open(config.url) as response:
data = response.read().decode('utf-8')
calendar = cal.Calendar.from_ical(data)
for element in calendar.walk():
if element.name == "VEVENT":
dtstart = element.get('dtstart').dt
# Apparently datetime is a subclass of date...
if isinstance(dtstart, date) and not isinstance(dtstart, datetime):
if isinstance(dtstart, date):
dtstart = datetime(dtstart.year, dtstart.month, dtstart.day, tzinfo=start.tzinfo)
# Process either end timestamp or duration, if present
if 'dtend' in element:
evend = element.get('dtend').dt
if isinstance(evend, date) and not isinstance(evend, datetime):
if isinstance(evend, date):
evend = datetime(evend.year, evend.month, evend.day, tzinfo=start.tzinfo)
duration = evend - dtstart
elif 'duration' in element:
@ -85,60 +68,25 @@ def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: da
else:
occurences = [dtstart]
for occurence in occurences:
if start <= occurence + duration and occurence < end:
if start <= occurence < end:
events.append(Event(name, element, occurence, occurence + duration))
elif element.name == "VTODO":
dtstart = element.get('dtstamp').dt
duration = timedelta(0)
if 'dtstart' in element:
dtstart = element.get('dtstart').dt
if 'duration' in element:
duration = element.get('duration').dt
todos.append(Todo(name, element, dtstart, dtstart + duration))
with _SCRAPE_CACHE_LOCK:
_EVENT_SCRAPE_CACHE[name] = events
_TODO_SCRAPE_CACHE[name] = todos
return events
def scrape_calendar(name: str, config: CalendarConfig, retry: int):
# Get current time in configured timezone
tz = get_config().tz
now: datetime = datetime.now(tz)
# Only scrape at most once a minute
interval = max(int(config.interval.totimedelta(start=now).total_seconds()), 60)
# Compute interval for which to return events
start_delta: Duration = get_config().start_delta
end_delta: Duration = get_config().end_delta
start: datetime = now + start_delta
end: datetime = now + end_delta
# Scrape and parse the calendar
try:
_scrape_calendar(name, config, start, end)
# Reschedule calendar scraping
cron = Timer(interval, lambda: scrape_calendar(name, config, 0))
except BaseException:
# reschedule with exponential backoff, but no more than the regular scrape interval
backoff_seconds = min(60 * 2**retry, interval)
logging.exception(f'An error occurred while scraping the calendar endpoint "{name}" '
f'({config.url}), retrying in {backoff_seconds}s.')
cron = Timer(backoff_seconds, lambda: scrape_calendar(name, config, retry+1))
cron.start()
def scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: datetime) -> List[Event]:
global _SCRAPE_CACHE
now: datetime = datetime.now(tz=get_config().tz)
if get_config().cache.total_seconds() > 0 and name in _SCRAPE_CACHE:
cache_timeout, cached = _SCRAPE_CACHE[name]
if now < cache_timeout:
print('serving cached')
return cached
print('doing request')
def start_scrape_calendar(name: str, config: CalendarConfig):
# Schedule first calendar scraping
cron = Timer(0, lambda: scrape_calendar(name, config, retry=0))
cron.start()
def get_calendar_events(name: str):
global _EVENT_SCRAPE_CACHE
with _SCRAPE_CACHE_LOCK:
return _EVENT_SCRAPE_CACHE.get(name, [])
def get_calendar_todos(name: str):
global _TODO_SCRAPE_CACHE
with _SCRAPE_CACHE_LOCK:
return _TODO_SCRAPE_CACHE.get(name, [])
opener: urllib.request.OpenerDirector = config.get_url_opener()
with opener.open(config.url) as response:
data = response.read().decode('utf-8')
calendar = cal.Calendar.from_ical(data)
parsed: List[Event] = _parse_calendar(name, calendar, start, end)
_SCRAPE_CACHE[name] = now + get_config().cache, parsed
return parsed

View file

@ -6,12 +6,10 @@ from datetime import timedelta
import ssl
import urllib.request
import sys
import logging
import pytz
import jinja2
from isodate import Duration, parse_duration
from dateutil import tz
from datetime import tzinfo
from icalendar_timeseries_server import __version__
@ -29,8 +27,6 @@ class CalendarConfig:
def __init__(self, config: Dict[str, Any], config_path: str) -> None:
self._url: str = _keycheck('url', config, str, config_path)
self._scrape_interval: Duration = _parse_timedelta('interval', config, config_path, default_value='PT15M',
force_positive=True)
self._ca: Optional[str] = _keycheck('ca', config, str, config_path, optional=True)
auth: Dict[str, Any] = _keycheck('auth', config, dict, config_path, default_value={'type': 'none'})
self._authtype: str = _keycheck('type', auth, str, f'{config_path}.auth',
@ -60,10 +56,6 @@ class CalendarConfig:
def url(self) -> str:
return self._url
@property
def interval(self) -> Duration:
return self._scrape_interval
def get_url_opener(self) -> urllib.request.OpenerDirector:
if self._authtype == 'tls':
@ -94,9 +86,10 @@ class Config:
config = dict()
self._addr: str = _keycheck('addr', config, str, '', default_value='127.0.0.1')
self._port: int = _keycheck('port', config, int, '', default_value=8090)
self._tz: tzinfo = _parse_timezone('tz', config, '', default_value='UTC')
self._tz: pytz.tzinfo = _parse_timezone('tz', config, '', default_value='UTC')
self._start_delta: Duration = _parse_timedelta('start_delta', config, '', default_value='PT')
self._end_delta: Duration = _parse_timedelta('end_delta', config, '', default_value='P30D')
self._cache: Duration = _parse_timedelta('cache', config, '', default_value='PT', force_positive=True)
self._calendars: Dict[str, CalendarConfig] = self._parse_calendars_config('calendars', config, '')
self._key_replace = _parse_key_replace('key_replace', config, '')
self._value_replace = _parse_value_replace('value_replace', config, '')
@ -121,7 +114,7 @@ class Config:
return self._port
@property
def tz(self) -> tzinfo:
def tz(self) -> pytz.tzinfo:
return self._tz
@property
@ -132,6 +125,10 @@ class Config:
def end_delta(self) -> Duration:
return self._end_delta
@property
def cache(self) -> Duration:
return self._cache
@property
def calendars(self) -> Dict[str, CalendarConfig]:
return self._calendars
@ -158,7 +155,7 @@ def _keycheck(key: str,
raise KeyError(f'Expected member "{key}" not found at path {path}')
value: Any = config[key]
if not isinstance(value, typ):
raise TypeError(f'Expected {typ.__name__}, not {type(value).__name__} for path {path}.{key}')
raise TypeError(f'Expected {typ}, not {type(value).__name__} for path {path}.{key}')
if valid_values is not None:
if value not in valid_values:
raise ValueError(f'Expected one of {", ".join(valid_values)} ({typ}), not {value} for path {path}.{key}')
@ -186,10 +183,7 @@ def _parse_timezone(key: str,
path: str,
default_value: Any = None) -> Any:
zonename: str = _keycheck(key, config, str, path, default_value=default_value)
zone: zoneinfo = tz.gettz(zonename)
if zone is None:
raise ValueError(f'Unknown timezone: {zonename}')
return zone
return pytz.timezone(zonename)
def _parse_key_replace(key: str,
@ -222,17 +216,10 @@ def get_jenv() -> jinja2.Environment:
def load_config(filename: str):
global CONFIG, JENV
try:
with open(filename, 'r') as f:
json_config = json.loads(f.read())
CONFIG = Config(json_config)
JENV = jinja2.Environment()
except json.JSONDecodeError as e:
logging.exception('Cannot parse config JSON')
raise e
except Exception as e:
logging.error(e)
raise e
with open(filename, 'r') as f:
json_config = json.loads(f.read())
CONFIG = Config(json_config)
JENV = jinja2.Environment()
def load_default_config():

View file

@ -1,4 +1,4 @@
from typing import Any, Dict, List
from typing import Any, Dict, List, Set
import icalendar
import jinja2
@ -38,7 +38,7 @@ class Event(Metric):
for attr in _ATTRIBUTES:
tmp[attr] = event.get(attr, '')
substitution_keys = set(_ATTRIBUTES)
substitution_keys.update(tmp.keys())
substitution_keys.update(['start', 'end'])
substitution_keys.update(get_config().key_replace.keys())
substitution_keys.update(get_config().value_replace.keys())
for attr in substitution_keys:
@ -59,7 +59,7 @@ class Event(Metric):
},
'value': [
self.start.timestamp(),
"1"
1
]
}
event['metric'].update(self._labels)

View file

@ -1,9 +1,7 @@
import sys
import logging
import bottle
from icalendar_timeseries_server.cal import start_scrape_calendar
from icalendar_timeseries_server.config import load_config, load_default_config, get_config
# Contains decorated bottle handler function for /api/v1/query
@ -12,34 +10,14 @@ from icalendar_timeseries_server.api import prometheus_api
def main():
# Set up logger
log_handler = logging.StreamHandler()
log_handler.setFormatter(logging.Formatter(
'%(asctime)s %(filename)s:%(lineno)d(%(funcName)s) [%(levelname)s]: %(message)s'))
logging.getLogger().addHandler(log_handler)
# Load configuration
config = get_config()
try:
if len(sys.argv) == 1:
load_default_config()
elif len(sys.argv) == 2:
load_config(sys.argv[1])
else:
logging.log(logging.FATAL, f'Can only read one config file, got "{" ".join(sys.argv[1:])}"')
exit(1)
# Re-fetch config after parsing
config = get_config()
except BaseException:
logging.fatal('Could not parse configuration file')
if len(sys.argv) == 1:
load_default_config()
elif len(sys.argv) == 2:
load_config(sys.argv[1])
else:
print(f'Can only read one config file, got "{" ".join(sys.argv[1:])}"')
exit(1)
# Schedule calendar scraping in the background
for calname in config.calendars.keys():
start_scrape_calendar(calname, config.calendars[calname])
# Start the Bottle HTTP server
bottle.run(host=config.addr, port=get_config().port)
bottle.run(host=get_config().addr, port=get_config().port)
if __name__ == '__main__':

View file

@ -1,7 +1,6 @@
from typing import Dict
import re
import logging
LABEL_MATCH_OPERATORS = [
'=',
@ -65,7 +64,7 @@ class MetricQuery:
self.__parse(q)
def __parse(self, q: str):
logging.debug(f'Parsing PromQL query string: {q}')
print(q)
# globalstate:
# 0 = parsing metric name
# 1 = parsing filters
@ -172,7 +171,7 @@ class MetricQuery:
elif filterstate != 0:
raise ValueError('Unexpected EOF')
def __call__(self, metric: Metric) -> bool:
def __call__(self, metric: Metric):
"""
Applies the filter deducted from the query string to the given metric.
@ -188,7 +187,3 @@ class MetricQuery:
return False
# Return True if all filters matched
return True
@property
def name(self) -> str:
return self._metric_name

View file

@ -2,9 +2,9 @@
import unittest
import json
from datetime import timedelta, tzinfo
import pytz
from datetime import timedelta
from dateutil import tz
from isodate.duration import Duration
from icalendar_timeseries_server.config import _keycheck, _parse_timedelta, _parse_timezone, Config
@ -16,6 +16,7 @@ _CONFIG_VALID = """
"port": 8090,
"start_delta": "-PT3H",
"end_delta": "P30D",
"cache": "PT15M",
"tz": "Europe/Zurich",
"calendars": {
"private": {
@ -27,12 +28,10 @@ _CONFIG_VALID = """
}
},
"public": {
"url": "https://example.cloud/dav/me/public.ics",
"interval": "P1D"
"url": "https://example.cloud/dav/me/public.ics"
},
"confidential": {
"url": "https://example.cloud/dav/me/confidential.ics",
"interval": "PT5M",
"ca": "/etc/ssl/ca.pem",
"auth": {
"type": "tls",
@ -113,10 +112,10 @@ class ConfigTest(unittest.TestCase):
'tz': 'Europe/Zurich',
'notz': 'North/Winterfell'
}
self.assertEqual(_parse_timezone('tz', config, ''), tz.gettz('Europe/Zurich'))
self.assertEqual(_parse_timezone('tz', config, ''), pytz.timezone('Europe/Zurich'))
self.assertEqual(_parse_timezone('def', config, '', default_value='Europe/Berlin'),
tz.gettz('Europe/Berlin'))
with self.assertRaises(ValueError):
pytz.timezone('Europe/Berlin'))
with self.assertRaises(pytz.exceptions.UnknownTimeZoneError):
_parse_timezone('notz', config, '')
def test_parse_full_config_valid(self):
@ -125,24 +124,5 @@ class ConfigTest(unittest.TestCase):
self.assertEqual(config.port, 8090)
self.assertEqual(config.start_delta, Duration(hours=-3))
self.assertEqual(config.end_delta, Duration(days=30))
self.assertEqual(config.tz, tz.gettz('Europe/Zurich'))
def test_parse_calendars(self):
config = Config(json.loads(_CONFIG_VALID))
self.assertEqual({'public', 'private', 'confidential'}, config.calendars.keys())
self.assertEqual('https://example.cloud/dav/me/public.ics', config.calendars['public'].url)
self.assertEqual(Duration(days=1), config.calendars['public'].interval)
self.assertEqual('none', config.calendars['public']._authtype)
self.assertEqual('https://example.cloud/dav/me/private.ics', config.calendars['private'].url)
self.assertEqual(Duration(minutes=15), config.calendars['private'].interval)
self.assertEqual('basic', config.calendars['private']._authtype)
self.assertEqual('Basic bWU6bXlzdXBlcnNlY3VyZXBhc3N3b3Jk',
config.calendars['private']._request_headers['Authorization'])
self.assertEqual('https://example.cloud/dav/me/confidential.ics', config.calendars['confidential'].url)
self.assertEqual(Duration(minutes=5), config.calendars['confidential'].interval)
self.assertEqual('tls', config.calendars['confidential']._authtype)
self.assertEqual('/etc/ssl/client.pem', config.calendars['confidential']._tls_keyfile)
self.assertEqual('mysupersecurepassword', config.calendars['confidential']._tls_passphrase)
self.assertEqual(config.cache, Duration(minutes=15))
self.assertEqual(config.tz, pytz.timezone('Europe/Zurich'))

View file

@ -1,77 +0,0 @@
from typing import Any, Dict, List
import icalendar
import jinja2
from datetime import datetime, date, timedelta
from icalendar_timeseries_server.config import get_config, get_jenv
from icalendar_timeseries_server.query import Metric
_ATTRIBUTES: List[str] = [
'class',
'description',
'geo',
'location',
'organizer',
'percent-complete',
'priority',
'status',
'summary',
'url',
'attach'
]
class Todo(Metric):
def __init__(self, cname: str, todo: icalendar.cal.Todo, start: datetime, end: datetime):
self.calendar: str = cname
self.start = start
due = todo.get('due', None)
if due:
if isinstance(due.dt, datetime):
self.due = due.dt
elif isinstance(due.dt, date):
self.due = datetime.combine(due.dt, datetime.min.time())
self.due = self.due.replace(tzinfo=get_config().tz)
else:
self.due = None
# self.attributes: Dict[str, str] = dict()
attributes: Dict[str, str] = dict()
tmp: Dict[str, Any] = {
'calendar': cname,
'start': start,
'end': end
}
if self.due:
tmp['due'] = str(self.due)
for attr in _ATTRIBUTES:
tmp[attr] = todo.get(attr, '')
substitution_keys = set(_ATTRIBUTES)
substitution_keys.update(tmp.keys())
substitution_keys.update(get_config().key_replace.keys())
substitution_keys.update(get_config().value_replace.keys())
for attr in substitution_keys:
newkey: str = get_config().key_replace.get(attr, attr)
value: str = tmp.get(attr, '')
newval_template: str = get_config().value_replace.get(attr, str(value))
jtemplate: jinja2.Template = get_jenv().from_string(newval_template)
newvalue: str = jtemplate.render(**tmp)
attributes[newkey] = newvalue
self.uid: str = f'{cname}-{start.strftime("%Y%m%dT%H%M%S%Z")}'
self.priority = todo.get('priority', '0')
super().__init__('todo', attributes)
def serialize(self) -> Dict[str, Any]:
todo: Dict[str, Any] = {
'metric': {
'__name__': 'todo',
'calendar': self.calendar
},
'value': [
self.start.timestamp(),
"1"
]
}
todo['metric'].update(self._labels)
return todo

View file

@ -1,14 +1,14 @@
Package: icalendar-timeseries-server
Version: __VERSION__
Maintainer: s3lph <s3lph@kabelsalat.ch>
Version: 0.1
Maintainer: s3lph <account-gitlab-ideynizv@kernelpanic.lol>
Section: web
Priority: optional
Architecture: all
Depends: python3 (>= 3.7), python3-jinja2, python3-bottle, python3-dateutil, python3-icalendar, python3-isodate
Description: Scrape iCalendar endpoints and present their data in a timeseries format.
A small service that scrapes iCalendar files served over HTTP, parses
their contents and returns a timeseries format compatible to the
/api/v1/query API endpoint of a Prometheus server. This allows e.g. a
Grafana administrator to add a Prometheus data source pointing at
this server, returning the events in the calendars in the event
metric.
Depends: python3 (>= 3.7), python3-jinja2, python3-bottle, python3-dateutil, python3-icalendar, python3-isodate, python3-tz
Description: Scrape iCalendar endpoints and present their data in a
timeseries format. A small service that scrapes iCalendar files
served over HTTP, parses their contents and returns a timeseries
format compatible to the /api/v1/query API endpoint of a Prometheus
server. This allows e.g. a Grafana administrator to add a Prometheus
data source pointing at this server, returning the events in the
calendars in the event metric.

View file

@ -15,7 +15,6 @@ if [[ "$1" == "configure" ]]; then
chown its:its /var/lib/its
chmod 0750 /var/lib/its
deb-systemd-helper enable icalendar-timeseries-server.service
deb-systemd-invoke restart icalendar-timeseries-server.service
systemctl daemon-reload || true
fi

View file

@ -4,6 +4,6 @@ set -e
if [[ "$1" == "remove" ]]; then
deb-systemd-invoke stop icalendar-timeseries-server.service
userdel its
fi

157
package/release.py Executable file
View file

@ -0,0 +1,157 @@
#!/usr/bin/env python3
from typing import Any, Dict, List, Optional, Tuple
import os
import sys
import json
import urllib.request
import http.client
from urllib.error import HTTPError
def parse_changelog(tag: str) -> Optional[str]:
release_changelog: str = ''
with open('CHANGELOG.md', 'r') as f:
in_target: bool = False
done: bool = False
for line in f.readlines():
if in_target:
if f'<!-- END RELEASE {tag} -->' in line:
done = True
break
release_changelog += line
elif f'<!-- BEGIN RELEASE {tag} -->' in line:
in_target = True
continue
if not done:
return None
return release_changelog
def fetch_job_ids(project_id: int, pipeline_id: int, api_token: str) -> Dict[str, str]:
url: str = f'https://gitlab.com/api/v4/projects/{project_id}/pipelines/{pipeline_id}/jobs'
headers: Dict[str, str] = {
'Private-Token': api_token
}
req = urllib.request.Request(url, headers=headers)
try:
resp: http.client.HTTPResponse = urllib.request.urlopen(req)
except HTTPError as e:
print(e.read().decode())
sys.exit(1)
resp_data: bytes = resp.read()
joblist: List[Dict[str, Any]] = json.loads(resp_data.decode())
jobidmap: Dict[str, str] = {}
for job in joblist:
name: str = job['name']
job_id: str = job['id']
jobidmap[name] = job_id
return jobidmap
def fetch_single_shafile(url: str) -> str:
req = urllib.request.Request(url)
try:
resp: http.client.HTTPResponse = urllib.request.urlopen(req)
except HTTPError as e:
print(e.read().decode())
sys.exit(1)
resp_data: bytes = resp.readline()
shafile: str = resp_data.decode()
filename: str = shafile.strip().split(' ')[-1].strip()
return filename
def fetch_wheel_url(base_url: str, job_ids: Dict[str, str]) -> Optional[Tuple[str, str]]:
mybase: str = f'{base_url}/jobs/{job_ids["build_wheel"]}/artifacts/raw'
wheel_sha_url: str = f'{mybase}/dist/SHA256SUMS'
wheel_filename: str = fetch_single_shafile(wheel_sha_url)
wheel_url: str = f'{mybase}/dist/{wheel_filename}'
return wheel_url, wheel_sha_url
def fetch_debian_url(base_url: str, job_ids: Dict[str, str]) -> Optional[Tuple[str, str]]:
mybase: str = f'{base_url}/jobs/{job_ids["build_debian"]}/artifacts/raw'
debian_sha_url: str = f'{mybase}/package/debian/SHA256SUMS'
debian_filename: str = fetch_single_shafile(debian_sha_url)
debian_url: str = f'{mybase}/package/debian/{debian_filename}'
return debian_url, debian_sha_url
def main():
api_token: Optional[str] = os.getenv('GITLAB_API_TOKEN')
release_tag: Optional[str] = os.getenv('CI_COMMIT_TAG')
project_name: Optional[str] = os.getenv('CI_PROJECT_PATH')
project_id: Optional[str] = os.getenv('CI_PROJECT_ID')
pipeline_id: Optional[str] = os.getenv('CI_PIPELINE_ID')
if api_token is None:
print('GITLAB_API_TOKEN is not set.', file=sys.stderr)
sys.exit(1)
if release_tag is None:
print('CI_COMMIT_TAG is not set.', file=sys.stderr)
sys.exit(1)
if project_name is None:
print('CI_PROJECT_PATH is not set.', file=sys.stderr)
sys.exit(1)
if project_id is None:
print('CI_PROJECT_ID is not set.', file=sys.stderr)
sys.exit(1)
if pipeline_id is None:
print('CI_PIPELINE_ID is not set.', file=sys.stderr)
sys.exit(1)
changelog: Optional[str] = parse_changelog(release_tag)
if changelog is None:
print('Changelog could not be parsed.', file=sys.stderr)
sys.exit(1)
job_ids: Dict[str, str] = fetch_job_ids(project_id, pipeline_id, api_token)
base_url: str = f'https://gitlab.com/{project_name}/-'
wheel_url, wheel_sha_url = fetch_wheel_url(base_url, job_ids)
debian_url, debian_sha_url = fetch_debian_url(base_url, job_ids)
augmented_changelog = f'''{changelog.strip()}
### Download
- [Python Wheel]({wheel_url}) ([sha256]({wheel_sha_url}))
- [Debian Package]({debian_url}) ([sha256]({debian_sha_url}))'''
post_body: str = json.dumps({'description': augmented_changelog})
gitlab_release_api_url: str = \
f'https://gitlab.com/api/v4/projects/{project_id}/repository/tags/{release_tag}/release'
headers: Dict[str, str] = {
'Private-Token': api_token,
'Content-Type': 'application/json; charset=utf-8'
}
request = urllib.request.Request(
gitlab_release_api_url,
post_body.encode('utf-8'),
headers=headers,
method='POST'
)
try:
response: http.client.HTTPResponse = urllib.request.urlopen(request)
except HTTPError as e:
print(e.read().decode())
sys.exit(1)
response_bytes: bytes = response.read()
response_str: str = response_bytes.decode()
response_data: Dict[str, Any] = json.loads(response_str)
if response_data['tag_name'] != release_tag:
print('Something went wrong...', file=sys.stderr)
print(response_str, file=sys.stderr)
sys.exit(1)
print(response_data['description'])
if __name__ == '__main__':
main()

View file

@ -12,25 +12,18 @@ setup(
description='',
license='MIT',
keywords='ical,icalendar,timeseries,prometheus,grafana',
url='https://git.kabelsalat.ch/s3lph/icalendar-timeseries-server',
url='https://gitlab.com/s3lph/icalendar-timeseries-server',
packages=find_packages(exclude=['*.test']),
long_description='',
python_requires='>=3.6',
install_requires=[
'bottle',
'python-dateutil>=2.8',
'python-dateutil',
'icalendar',
'isodate',
'jinja2'
'jinja2',
'pytz'
],
extras_require={
'test': [
'coverage',
'pycodestyle',
'mypy',
'twine'
]
},
entry_points={
'console_scripts': [
'icalendar-timeseries-server = icalendar_timeseries_server:main'