From 16ac53e6af8d593767c6e7423898c1451d162c1d Mon Sep 17 00:00:00 2001 From: s3lph Date: Sat, 8 Oct 2022 04:24:14 +0200 Subject: [PATCH] Initial commit --- generate_map.py | 284 +++++++++++++++++++++++ map.svg | 602 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 886 insertions(+) create mode 100644 generate_map.py create mode 100644 map.svg diff --git a/generate_map.py b/generate_map.py new file mode 100644 index 0000000..f0e2596 --- /dev/null +++ b/generate_map.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python3 + +import os +import urllib.request +import json +import base64 + +import pyproj +from SPARQLWrapper import SPARQLWrapper, JSON +from geopy import Nominatim + + +BOUNDING_BOX = [(4.27775, 46.7482713), (19.2403594, 54.9833021)] + + +ERFA_URL = 'https://doku.ccc.de/Spezial:Semantische_Suche/format%3Djson/limit%3D50/link%3Dall/headers%3Dshow/searchlabel%3DJSON/class%3Dsortable-20wikitable-20smwtable/sort%3D/order%3Dasc/offset%3D0/-5B-5BKategorie:Erfa-2DKreise-5D-5D-20-5B-5BChaostreff-2DActive::wahr-5D-5D/-3FChaostreff-2DCity/-3FChaostreff-2DPhysical-2DAddress/-3FChaostreff-2DPhysical-2DHousenumber/-3FChaostreff-2DPhysical-2DPostcode/-3FChaostreff-2DPhysical-2DCity/-3FChaostreff-2DCountry/mainlabel%3D/prettyprint%3Dtrue/unescape%3Dtrue' + +CHAOSTREFF_URL = 'https://doku.ccc.de/Spezial:Semantische_Suche/format%3Djson/limit%3D50/link%3Dall/headers%3Dshow/searchlabel%3DJSON/class%3Dsortable-20wikitable-20smwtable/sort%3D/order%3Dasc/offset%3D0/-5B-5BKategorie:Chaostreffs-5D-5D-20-5B-5BChaostreff-2DActive::wahr-5D-5D/-3FChaostreff-2DCity/-3FChaostreff-2DPhysical-2DAddress/-3FChaostreff-2DPhysical-2DHousenumber/-3FChaostreff-2DPhysical-2DPostcode/-3FChaostreff-2DPhysical-2DCity/-3FChaostreff-2DCountry/mainlabel%3D/prettyprint%3Dtrue/unescape%3Dtrue' + +def fetch_wikidata_states(target='shapes_states'): + sparql = SPARQLWrapper('https://query.wikidata.org/sparql') + sparql.setQuery(''' + +PREFIX wd: +PREFIX wdt: + +SELECT DISTINCT ?item ?map WHERE { + # ?item is instance of federal state of germany and has geoshape ?map + ?item wdt:P31 wd:Q1221156; + wdt:P3896 ?map +} +''') + sparql.setReturnFormat(JSON) + results = sparql.query().convert() + + shape_urls = {result['item']['value'].split('/')[-1]: result['map']['value'] for result in results["results"]["bindings"]} + + os.makedirs(target, exist_ok=True) + for item, url in shape_urls.items(): + with urllib.request.urlopen(url) as resp: + with open(os.path.join(target, item + '.json'), 'wb') as f: + f.write(resp.read()) + +def fetch_wikidata_countries(target='shapes_countries'): + sparql = SPARQLWrapper('https://query.wikidata.org/sparql') + sparql.setQuery(''' + +PREFIX wd: +PREFIX wdt: + +SELECT DISTINCT ?item ?map WHERE { + # ?item is instance of sovereign state, transitively part of europe and has geoshape ?map + # ?item is instance of country or sovereign state + ?item wdt:P31 ?stateclass. + # ?item is transitively part of Europe (Contintent) or EEA + ?item wdt:P361+ ?euroclass; + # ?item has geoshape ?map + wdt:P3896 ?map. + FILTER (?stateclass = wd:Q6256 || ?stateclass = wd:Q3624078). + FILTER (?euroclass = wd:Q46 || ?euroclass = wd:Q8932). +} +''') + sparql.setReturnFormat(JSON) + results = sparql.query().convert() + + shape_urls = {result['item']['value'].split('/')[-1]: result['map']['value'] for result in results["results"]["bindings"]} + + os.makedirs(target, exist_ok=True) + for item, url in shape_urls.items(): + try: + with urllib.request.urlopen(url) as resp: + with open(os.path.join(target, item + '.json'), 'wb') as f: + f.write(resp.read()) + except BaseException as e: + print(e) + print(url) + +def filter_boundingbox(source='shapes_countries', target='shapes_filtered'): + files = os.listdir(source) + os.makedirs(target, exist_ok=True) + for f in files: + if not f.endswith('.json') or 'Q183.json' in f: + continue + path = os.path.join(source, f) + with open(path, 'r') as sf: + shapedata = sf.read() + shape = json.loads(shapedata) + keep = False + geo = shape['data']['features'][0]['geometry'] + if geo['type'] == 'Polygon': + geo['coordinates'] = [geo['coordinates']] + for poly in geo['coordinates']: + for point in poly[0]: + if point[0] >= BOUNDING_BOX[0][0] and point[1] >= BOUNDING_BOX[0][1] \ + and point[0] <= BOUNDING_BOX[1][0] and point[1] <= BOUNDING_BOX[1][1]: + keep = True + break + if keep: + break + if keep: + with open(os.path.join(target, f), 'w') as sf: + sf.write(shapedata) + + +def address_lookup(erfa): + locator = Nominatim(user_agent='foobar') + city = erfa['printouts']['Chaostreff-City'][0] + country = erfa['printouts']['Chaostreff-Country'][0] + address = f'{city}, {country}' + response = locator.geocode(address) + if response is None: + return None + return response.longitude, response.latitude + + +def fetch_erfas(target='erfa-info.json', url=ERFA_URL, bbox=None): + userpw = os.getenv('DOKU_CCC_DE_BASICAUTH') + if userpw is None: + print('Please set environment variable DOKU_CCC_DE_BASICAUTH=username:password') + exit(1) + auth = base64.b64encode(userpw.encode()).decode() + erfas = {} + req = urllib.request.Request(url, headers={'Authorization': f'Basic {auth}'}) + with urllib.request.urlopen(req) as resp: + erfadata = json.loads(resp.read().decode()) + for name, erfa in erfadata['results'].items(): + location = address_lookup(erfa) + if location is None: + print(f'WARNING: No location for {name}') + city = erfa['printouts']['Chaostreff-City'][0] + erfas[city] = location + if len(bbox) == 0: + bbox.append(location) + bbox.append(location) + else: + bbox[0] = (min(bbox[0][0], location[0]), min(bbox[0][1], location[1])) + bbox[1] = (max(bbox[1][0], location[0]), max(bbox[1][1], location[1])) + with open(target, 'w') as f: + json.dump(erfas, f) + +def fetch_chaostreffs(target='chaostreff-info.json', bbox=None): + fetch_erfas(target=target, url=CHAOSTREFF_URL, bbox=bbox) + + +def create_svg(source_states='shapes_states', source_countries='shapes_filtered', source_erfa='erfa-info.json', source_ct='chaostreff-info.json'): + transformer = pyproj.Transformer.from_crs('epsg:4326', 'epsg:4258') + scalex = 130 + scaley = 200 + blt = transformer.transform(*BOUNDING_BOX[0]) + trt = transformer.transform(*BOUNDING_BOX[1]) + jtm_bounding_box = [ + (scalex*blt[0], scaley*trt[1]), + (scalex*trt[0], scaley*blt[1]) + ] + origin = jtm_bounding_box[0] + svg_box = (jtm_bounding_box[1][0] - origin[0], origin[1] - jtm_bounding_box[1][1]) + + shapes_states = [] + files = os.listdir(source_states) + for f in files: + if not f.endswith('.json'): + continue + path = os.path.join(source_states, f) + with open(path, 'r') as sf: + shapedata = sf.read() + shape = json.loads(shapedata) + geo = shape['data']['features'][0]['geometry'] + if geo['type'] == 'Polygon': + geo['coordinates'] = [geo['coordinates']] + for poly in geo['coordinates']: + ts = [] + for x, y in poly[0]: + xt, yt = transformer.transform(x, y) + ts.append((xt*scalex - origin[0], origin[1] - yt*scaley)) + shapes_states.append(ts) + + shapes_countries = [] + files = os.listdir(source_countries) + for f in files: + if not f.endswith('.json'): + continue + path = os.path.join(source_countries, f) + with open(path, 'r') as sf: + shapedata = sf.read() + shape = json.loads(shapedata) + geo = shape['data']['features'][0]['geometry'] + if geo['type'] == 'Polygon': + geo['coordinates'] = [geo['coordinates']] + for poly in geo['coordinates']: + ts = [] + for x, y in poly[0]: + xt, yt = transformer.transform(x, y) + ts.append((xt*scalex - origin[0], origin[1] - yt*scaley)) + shapes_countries.append(ts) + + chaostreffs = {} + with open(source_ct, 'r') as f: + ctdata = json.load(f) + for city, location in ctdata.items(): + if location is None: + continue + xt, yt = transformer.transform(*location) + chaostreffs[city] = (xt*scalex - origin[0], origin[1] - yt*scaley) + + erfas = {} + with open(source_erfa, 'r') as f: + ctdata = json.load(f) + for city, location in ctdata.items(): + if location is None: + continue + xt, yt = transformer.transform(*location) + erfas[city] = (xt*scalex - origin[0], origin[1] - yt*scaley) + + rectbox = [0, 0, svg_box[0], svg_box[1]] + for shape in shapes_states + shapes_countries: + for lon, lat in shape: + rectbox[0] = min(lon, rectbox[0]) + rectbox[1] = min(lat, rectbox[1]) + rectbox[2] = max(lon, rectbox[2]) + rectbox[3] = max(lat, rectbox[3]) + + + SVG = f''' + + + +''' + + # Render shortest shapes last s.t. Berlin, Hamburg and Bremen are rendered on top of their surrounding states + for shape in sorted(shapes_states, key=lambda x: -sum(len(s) for s in x)): + points = ' '.join([f'{lon},{lat}' for lon, lat in shape]) + SVG += f''' + + ''' + + for shape in shapes_countries: + points = ' '.join([f'{lon},{lat}' for lon, lat in shape]) + SVG += f''' + + ''' + + + for city, location in erfas.items(): + SVG += f''' + + + ''' + + for city, location in chaostreffs.items(): + SVG += f''' + + ''' + + for city, location in erfas.items(): + weight_right = sum([1 for x, y in erfas.values() if x > location[0] + 25 and x < location[0] + 25 + len(city)*15 and y > location[1] - 15 and y < location[1] + 25]) + weight_left = sum([1 for x, y in erfas.values() if x < location[0] - 25 and x > location[0] - 25 - len(city)*15 and y > location[1] - 15 and y < location[1] + 25]) + if weight_right > weight_left: + SVG += f''' + {city} + ''' + else: + SVG += f''' + {city} + ''' + + + SVG += '' + + with open('map.svg', 'w') as mapfile: + mapfile.write(SVG) + + +bbox = [] +fetch_erfas(bbox=bbox) +#fetch_chaostreffs(bbox=bbox) +#print(bbox) +#fetch_wikidata_states() +#fetch_wikidata_countries() +filter_boundingbox() +create_svg() + +# Q347 P361 Q46 diff --git a/map.svg b/map.svg new file mode 100644 index 0000000..47736c1 --- /dev/null +++ b/map.svg @@ -0,0 +1,602 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Erlangen + + Paderborn + + Hamburg + + Aachen + + Basel + + Berlin + + Bremen + + Köln + + Darmstadt + + Dresden + + Düsseldorf + + Frankfurt am Main + + Freiburg + + Göttingen + + Hannover + + Karlsruhe + + Mannheim + + München + + Salzburg + + Stuttgart + + Ulm + + Wien + + Wiesbaden + + Zürich + + Siegen + + Kaiserslautern + + Essen + + Dortmund + + Fulda + + Würzburg + + Bamberg + + Kassel + \ No newline at end of file