From 4cc8adc7b924d101eec1767235a7b613dcbe3336 Mon Sep 17 00:00:00 2001 From: s3lph Date: Wed, 21 Aug 2019 13:52:25 +0200 Subject: [PATCH] Secure all critical sections using _SCRAPE_CACHE with a lock. --- icalendar_timeseries_server/cal.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/icalendar_timeseries_server/cal.py b/icalendar_timeseries_server/cal.py index fb5269d..0e582ec 100644 --- a/icalendar_timeseries_server/cal.py +++ b/icalendar_timeseries_server/cal.py @@ -3,7 +3,7 @@ from typing import Dict, List, Iterable import sys import urllib.request from datetime import datetime, date, timedelta -from threading import Timer +from threading import Lock, Timer from dateutil import rrule from icalendar import cal @@ -15,6 +15,7 @@ from icalendar_timeseries_server.event import Event _SCRAPE_CACHE: Dict[str, List[Event]] = dict() +_SCRAPE_CACHE_LOCK: Lock = Lock() __py_version: str = f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}' USER_AGENT: str = f'icalendar-timeseries-server/{__version__} (Python/{__py_version})' @@ -49,7 +50,7 @@ def _parse_recurring(event: cal.Event, start: datetime, end: datetime, duration: def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: datetime): - global _SCRAPE_CACHE + global _SCRAPE_CACHE, _SCRAPE_CACHE_LOCK events = [] opener: urllib.request.OpenerDirector = config.get_url_opener() @@ -79,7 +80,8 @@ def _scrape_calendar(name: str, config: CalendarConfig, start: datetime, end: da for occurence in occurences: if start <= occurence < end: events.append(Event(name, element, occurence, occurence + duration)) - _SCRAPE_CACHE[name] = events + with _SCRAPE_CACHE_LOCK: + _SCRAPE_CACHE[name] = events def scrape_calendar(name: str, config: CalendarConfig): @@ -110,4 +112,5 @@ def start_scrape_calendar(name: str, config: CalendarConfig): def get_calendar(name: str): global _SCRAPE_CACHE - return _SCRAPE_CACHE.get(name, []) + with _SCRAPE_CACHE_LOCK: + return _SCRAPE_CACHE.get(name, [])