Secure all critical sections using _SCRAPE_CACHE with a lock.

This commit is contained in:
s3lph 2019-08-21 13:52:25 +02:00
parent 8d3e28a11d
commit 4cc8adc7b9

View file

@ -3,7 +3,7 @@ from typing import Dict, List, Iterable
import sys import sys
import urllib.request import urllib.request
from datetime import datetime, date, timedelta from datetime import datetime, date, timedelta
from threading import Timer from threading import Lock, Timer
from dateutil import rrule from dateutil import rrule
from icalendar import cal from icalendar import cal
@ -15,6 +15,7 @@ from icalendar_timeseries_server.event import Event
_SCRAPE_CACHE: Dict[str, List[Event]] = dict() _SCRAPE_CACHE: Dict[str, List[Event]] = dict()
_SCRAPE_CACHE_LOCK: Lock = Lock()
__py_version: str = f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}' __py_version: str = f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}'
USER_AGENT: str = f'icalendar-timeseries-server/{__version__} (Python/{__py_version})' USER_AGENT: str = f'icalendar-timeseries-server/{__version__} (Python/{__py_version})'
@ -49,7 +50,7 @@ def _parse_recurring(event: cal.Event, start: datetime, end: datetime, duration:
def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: datetime): def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: datetime):
global _SCRAPE_CACHE global _SCRAPE_CACHE, _SCRAPE_CACHE_LOCK
events = [] events = []
opener: urllib.request.OpenerDirector = config.get_url_opener() opener: urllib.request.OpenerDirector = config.get_url_opener()
@ -79,7 +80,8 @@ def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: da
for occurence in occurences: for occurence in occurences:
if start <= occurence < end: if start <= occurence < end:
events.append(Event(name, element, occurence, occurence + duration)) events.append(Event(name, element, occurence, occurence + duration))
_SCRAPE_CACHE[name] = events with _SCRAPE_CACHE_LOCK:
_SCRAPE_CACHE[name] = events
def scrape_calendar(name: str, config: CalendarConfig): def scrape_calendar(name: str, config: CalendarConfig):
@ -110,4 +112,5 @@ def start_scrape_calendar(name: str, config: CalendarConfig):
def get_calendar(name: str): def get_calendar(name: str):
global _SCRAPE_CACHE global _SCRAPE_CACHE
return _SCRAPE_CACHE.get(name, []) with _SCRAPE_CACHE_LOCK:
return _SCRAPE_CACHE.get(name, [])