#!/usr/bin/env python3 # vim:tabstop=4 softtabstop=4 shiftwidth=4 textwidth=160 smarttab expandtab colorcolumn=160 # # Copyright (C) 2021 Daniel Friesel # # SPDX-License-Identifier: AGPL-3.0-only import argparse import psycopg2 import aiohttp from aiohttp import web from datetime import datetime, timedelta import dateutil.parser from geopy.distance import distance import json import logging import numpy as np import os import pytz class Stop: def __init__(self, eva, name, coord, ts): self.eva = eva self.name = name self.coord = coord self.when = ts def to_json(self): return self.eva, self.name, self.when.strftime("%H:%M") class Train: def __init__(self, train): self.stopovers = train["previousStopovers"] self.tripId = train["tripId"] self.train_type, self.line_no = train["line"]["name"].split() self.train_no = train["line"]["fahrtNr"] self.request_eva = int(train["stop"]["id"]) if train["when"]: self.arrival = dateutil.parser.parse(train["when"]) else: self.arrival = None if train["delay"] is not None: self.delay = timedelta(seconds=train["delay"]) else: self.delay = timedelta() # preferred candidate for position estimation? self.preferred = False # previous/next stop and progress between those self.prev_stop = None self.next_stop = None self.progress_ratio = None # location and distance to requested position self.coarse_location = None self.location = None self.distance = None self.prepare_stopovers() def prepare_stopovers(self): """ Parse arrival/departure into datetimes and add delay. /arrivals results have delay information ("when" ≠ "plannedWhen"). "previousStopovers" do not ("departure" == "plannedDeparture" in all cases). """ for stopover in self.stopovers: if ( stopover["arrival"] and stopover["plannedArrival"] and stopover["arrival"] != stopover["plannedArrival"] ): # arrival should be realtime. Didn't observe this case in practice yet. stopover["arrival"] = dateutil.parser.parse(stopover["arrival"]) elif stopover["plannedArrival"]: # no realtime data available stopover["arrival"] = ( dateutil.parser.parse(stopover["plannedArrival"]) + self.delay ) if ( stopover["departure"] and stopover["plannedDeparture"] and stopover["departure"] != stopover["plannedDeparture"] ): stopover["departure"] = dateutil.parser.parse(stopover["departure"]) elif stopover["plannedDeparture"]: stopover["departure"] = ( dateutil.parser.parse(stopover["plannedDeparture"]) + self.delay ) def set_coarse_location(self, lat, lon): now = datetime.now(pytz.utc) train_evas = None legs = list() leg_distances = list() # includes train["stop"] -- but with arrival instead of departure # FIXME stopovers do not have realtime data :( for i, stopover in enumerate(self.stopovers): ts = None if stopover["departure"]: ts = stopover["departure"] elif stopover["arrival"]: ts = stopover["arrival"] if i == 0: if ts > now and (ts - now).seconds > 300: # train has not departed from its initial stop yet and is not about to depart return continue prev_eva = int(self.stopovers[i - 1]["stop"]["id"]) this_eva = int(stopover["stop"]["id"]) prev_coord = ( self.stopovers[i - 1]["stop"]["location"]["latitude"], self.stopovers[i - 1]["stop"]["location"]["longitude"], ) this_coord = ( stopover["stop"]["location"]["latitude"], stopover["stop"]["location"]["longitude"], ) # TODO normalize coordinates (distance is skewed towards longitude) prev_c = np.array(prev_coord) this_c = np.array(this_coord) req_c = np.array((lat, lon)) min_dist = np.abs( np.cross(prev_c - this_c, this_c - req_c) ) / np.linalg.norm(prev_c - this_c) legs.append((prev_eva, this_eva)) leg_distances.append(min_dist) # start with origin. (planned)arrival is always null in a previousStopovers list except for the last entry # (which is the stop where arrivals were requested) if ts and ts > now and not train_evas: train_evas = (prev_eva, this_eva) train_stops = ( self.stopovers[i - 1]["stop"]["name"], stopover["stop"]["name"], ) train_coords = (prev_coord, this_coord) # XXX known bug: we're saving departure at i-1 and (possibly) departure at i. For a more accurate coarse position estimate later on, # we need to track departure at i-1 and arrival at i. But we don't always have it. train_times = (self.stopovers[i - 1]["departure"], ts) if not train_evas: return if not train_times[0]: return closest_leg_index = np.argmin(leg_distances) closest_evas = legs[closest_leg_index] if train_evas != closest_evas: return self.set_stops( Stop(train_evas[0], train_stops[0], train_coords[0], train_times[0]), Stop(train_evas[1], train_stops[1], train_coords[1], train_times[1]), ) # the time (i.e., number of minutes) the train needs to travel to reach the requested position # might be a better metric than raw distance. self.distance = distance(self.coarse_location, (lat, lon)).km def set_stops(self, prev_stop, next_stop): self.prev_stop = prev_stop self.next_stop = next_stop now = datetime.now(pytz.utc) self.progress_ratio = 1 - ( (next_stop.when.timestamp() - now.timestamp()) / (next_stop.when.timestamp() - prev_stop.when.timestamp()) ) self.progress_ratio = max(0, min(1, self.progress_ratio)) if self.progress_ratio == 0: self.location = self.coarse_location = prev_stop.coord elif self.progress_ratio == 1: self.location = self.coarse_location = next_stop.coord else: ratio = self.progress_ratio self.coarse_location = ( next_stop.coord[0] * ratio + prev_stop.coord[0] * (1 - ratio), next_stop.coord[1] * ratio + prev_stop.coord[1] * (1 - ratio), ) if distance(prev_stop.coord, next_stop.coord).km < 20: # do not request polyline if the train is between stops less than 20km apart. This speeds up requests # (and reduces transport.rest load) at a hopefully low impact on accuracy. self.location = self.coarse_location if next_stop.eva == self.request_eva: # we can compare departure at previous stop with arrival at this stop. this is most accurate for position estimation. self.preferred = True def merge_with(self, instance): # might be useful in the future pass def to_json(self): return { "line": f"{self.train_type} {self.line_no}", "train": f"{self.train_type} {self.train_no}", "tripId": self.tripId, "location": self.coarse_location, "distance": round(self.distance, 1), "stops": [self.prev_stop.to_json(), self.next_stop.to_json()], } headers = { "Access-Control-Allow-Origin": "*", "Content-Type": "application/json; charset=utf-8", } conn = psycopg2.connect( dbname=os.getenv("GEOLOOKUP_DBNAME", "geo_to_stations"), user=os.getenv("GEOLOOKUP_DBUSER", "geo_to_stations"), password=os.getenv("GEOLOOKUP_DBPASS"), host=os.getenv("GEOLOOKUP_DBHOST", "localhost"), ) db_rest_api = os.getenv("GEOLOOKUP_DB_REST_API", "https://v5.db.transport.rest") conn.autocommit = True conn.set_session(readonly=True) arrivals_request_count = 0 polyline_request_count = 0 async def set_location(train): trip_id = train.tripId line = f"{train.train_type} {train.line_no}" url = f"{db_rest_api}/trips/{trip_id}?lineName={line}&polyline=true" return logging.debug(f"Requesting polyline for {line}: {url}") global polyline_request_count polyline_request_count += 1 async with aiohttp.ClientSession() as session: async with session.get(url) as response: content = await response.text() content = json.loads(content) def is_in_transit(train): return 0 < train.progress_ratio < 1 async def handle_stats(request): response = { "arrivals_request_count": arrivals_request_count, "polyline_request_count": polyline_request_count, } return web.Response(body=json.dumps(response), headers=headers) async def handle_search(request): try: lat = float(request.query.get("lat")) lon = float(request.query.get("lon")) except TypeError: return web.HTTPBadRequest(text="lat/lon are mandatory") except ValueError: return web.HTTPBadRequest(text="lat/lon must be floating-point numbers") lut_lat = round(lat * 1000) lut_lon = round(lon * 1000) evas = set() with conn.cursor() as cur: cur.execute( "select stations from stations where lat between %s and %s and lon between %s and %s", (lut_lat - 3, lut_lat + 3, lut_lon - 3, lut_lon + 3), ) for eva_list in cur.fetchall(): evas.update(eva_list[0]) if not evas: response = {"evas": list(), "trains": list()} return web.Response(body=json.dumps(response), headers=headers) arrivals = list() candidates = list() # deliberately not parallelized to minimize load on transport.rest for eva in evas: logging.debug(f"Requesting arrivals at {eva}") global arrivals_request_count arrivals_request_count += 1 async with aiohttp.ClientSession() as session: async with session.get( f"{db_rest_api}/stops/{eva}/arrivals?results=40&duration=120&stopovers=true&bus=false&subway=false&tram=false" ) as response: content = await response.text() content = json.loads(content) arrivals.append(content) for train_list in arrivals: for train in train_list: for stop in train["previousStopovers"]: if ( int(stop["stop"]["id"]) in evas and stop["stop"]["id"] != train["stop"]["id"] ): candidates.append(Train(train)) break logging.debug( f"{len(candidates)} trains travel between at least two requested evas" ) for train in candidates: train.set_coarse_location(lat, lon) candidates = list(filter(lambda train: train.coarse_location, candidates)) logging.debug(f"{len(candidates)} trains have a coarse location") candidates = sorted( candidates, key=lambda train: 0 if train.preferred else train.distance ) # remove duplicates. for now, we keep the preferred version, or the one with the lowest estimated distance. # later on, we'll need to request polylines and perform accurate calculations. # TODO polyline requests are not needed for trains currently located at a station (ratio == 0 / == 1) # It should also be fine to skip them if the distance between stops[0] and stops[1] is less than ~ 20km # Wenn sich ein Zug gerade an einem Bahnhof befindet (ratio == 0 / == 1) und mehrere km entfernt ist kann man ihn auch direkt ganz rausfiltern seen = dict() trains = list() for train in candidates: if train.preferred: trains.append(train) seen[train.train_no] = train elif train.train_no not in seen: trains.append(train) seen[train.train_no] = train else: seen[train.train_no].merge_with(train) logging.debug(f"{len(trains)} trains remain after deduplication") # If a train's coarse location is 50km+ away, it's fine (polyline-based) location is highly unlikely to be much closer trains = list(filter(lambda train: train.distance < 50, trains)) need_fine = list(filter(lambda train: not train.location, trains)) need_fine = list(filter(is_in_transit, trains)) logging.debug(f"{len(need_fine)} trains need a polyline") for train in trains: await set_location(train) trains = sorted(trains, key=lambda train: train.distance) trains = list(map(lambda train: train.to_json(), trains[:10])) response = {"evas": list(evas), "trains": trains} return web.Response(body=json.dumps(response, ensure_ascii=False), headers=headers) if __name__ == "__main__": parser = argparse.ArgumentParser( description="geolocation to train estimation service" ) parser.add_argument( "--log-level", metavar="LEVEL", choices=["debug", "info", "warning", "error"], default="warning", help="Set log level", ) parser.add_argument("--port", type=int, metavar="PORT", default=8080) parser.add_argument("--prefix", type=str, metavar="PATH", default="/") args = parser.parse_args() if args.log_level: numeric_level = getattr(logging, args.log_level.upper(), None) if not isinstance(numeric_level, int): print(f"Invalid log level: {args.log_level}", file=sys.stderr) sys.exit(1) logging.basicConfig(level=numeric_level) app = web.Application() app.add_routes( [ web.get(f"{args.prefix}search", handle_search), web.get(f"{args.prefix}stats", handle_stats), ] ) web.run_app(app, host="localhost", port=args.port)