summaryrefslogtreecommitdiff
path: root/lib/Travelynx/Command
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Travelynx/Command')
-rw-r--r--lib/Travelynx/Command/database.pm1396
-rw-r--r--lib/Travelynx/Command/dumpconfig.pm1
-rw-r--r--lib/Travelynx/Command/dumpstops.pm9
-rw-r--r--lib/Travelynx/Command/influxdb.pm101
-rw-r--r--lib/Travelynx/Command/integritycheck.pm201
-rw-r--r--lib/Travelynx/Command/maintenance.pm16
-rw-r--r--lib/Travelynx/Command/traewelling.pm38
-rw-r--r--lib/Travelynx/Command/work.pm834
8 files changed, 2267 insertions, 329 deletions
diff --git a/lib/Travelynx/Command/database.pm b/lib/Travelynx/Command/database.pm
index d13b2a7..95d67f5 100644
--- a/lib/Travelynx/Command/database.pm
+++ b/lib/Travelynx/Command/database.pm
@@ -1,14 +1,19 @@
package Travelynx::Command::database;
# Copyright (C) 2020-2023 Birte Kristina Friesel
+# Copyright (C) 2025 networkException <git@nwex.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
use Mojo::Base 'Mojolicious::Command';
use DateTime;
use File::Slurp qw(read_file);
+use List::Util qw();
use JSON;
+use Travel::Status::DE::EFA;
+use Travel::Status::DE::HAFAS;
use Travel::Status::DE::IRIS::Stations;
+use Travel::Status::MOTIS;
has description => 'Initialize or upgrade database layout';
@@ -1918,7 +1923,7 @@ my @migrations = (
# v49 -> v50
# travelynx 2.0 introduced proper HAFAS support, so there is no need for
- # the 'FYI, here is some hAFAS data' kludge anymore.
+ # the 'FYI, here is some HAFAS data' kludge anymore.
sub {
my ($db) = @_;
$db->query(
@@ -1946,6 +1951,1263 @@ my @migrations = (
}
);
},
+
+ # v51 -> v52
+ # Explicitly encode backend type; preparation for multiple HAFAS backends
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ create table backends (
+ id smallserial not null primary key,
+ iris bool not null,
+ hafas bool not null,
+ efa bool not null,
+ ris bool not null,
+ name varchar(32) not null,
+ unique (iris, hafas, efa, ris, name)
+ );
+ insert into backends (id, iris, hafas, efa, ris, name) values (0, true, false, false, false, '');
+ insert into backends (id, iris, hafas, efa, ris, name) values (1, false, true, false, false, 'DB');
+ alter sequence backends_id_seq restart with 2;
+ alter table in_transit add column backend_id smallint references backends (id);
+ alter table journeys add column backend_id smallint references backends (id);
+ update in_transit set backend_id = 0 where train_id not like '%|%';
+ update journeys set backend_id = 0 where train_id not like '%|%';
+ update in_transit set backend_id = 1 where train_id like '%|%';
+ update journeys set backend_id = 1 where train_id like '%|%';
+ update journeys set backend_id = 1 where train_id = 'manual';
+ alter table in_transit alter column backend_id set not null;
+ alter table journeys alter column backend_id set not null;
+
+ drop view in_transit_str;
+ drop view journeys_str;
+ create view in_transit_str as select
+ user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva
+ left join stations as arr_station on checkout_station_id = arr_station.eva
+ left join backends as backend on backend_id = backend.id
+ ;
+ create view journeys_str as select
+ journeys.id as journey_id, user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, edited, route, messages, user_data,
+ dep_platform, arr_platform
+ from journeys
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva
+ left join stations as arr_station on checkout_station_id = arr_station.eva
+ left join backends as backend on backend_id = backend.id
+ ;
+ update schema_version set version = 52;
+ }
+ );
+ },
+
+ # v52 -> v53
+ # Extend train_id to be compatible with more recent HAFAS versions
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ drop view in_transit_str;
+ drop view journeys_str;
+ drop view follows_in_transit;
+ alter table in_transit alter column train_id type varchar(384);
+ alter table journeys alter column train_id type varchar(384);
+ create view in_transit_str as select
+ user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva
+ left join stations as arr_station on checkout_station_id = arr_station.eva
+ left join backends as backend on backend_id = backend.id
+ ;
+ create view journeys_str as select
+ journeys.id as journey_id, user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, edited, route, messages, user_data,
+ dep_platform, arr_platform
+ from journeys
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva
+ left join stations as arr_station on checkout_station_id = arr_station.eva
+ left join backends as backend on backend_id = backend.id
+ ;
+ create view follows_in_transit as select
+ r1.subject_id as follower_id, user_id as followee_id,
+ users.name as followee_name,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join relations as r1 on r1.predicate = 1 and r1.object_id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva
+ left join stations as arr_station on checkout_station_id = arr_station.eva
+ order by checkin_time desc
+ ;
+ update schema_version set version = 53;
+ }
+ );
+ },
+
+ # v53 -> v54
+ # Retrofit lat/lon data onto routes logged before v2.7.8; ensure
+ # consistent name and eva entries as well.
+ sub {
+ my ($db) = @_;
+
+ say
+'Adding lat/lon to routes of journeys logged before v2.7.8 and improving consistency of name/eva data in very old route entries.';
+ say 'This may take a while ...';
+
+ my %legacy_to_new;
+ if ( -r 'share/old_station_names.json' ) {
+ %legacy_to_new = %{ JSON->new->utf8->decode(
+ scalar read_file('share/old_station_names.json')
+ )
+ };
+ }
+
+ my %latlon_by_eva;
+ my %latlon_by_name;
+ my $res = $db->select( 'stations', [ 'name', 'eva', 'lat', 'lon' ] );
+ while ( my $row = $res->hash ) {
+ $latlon_by_eva{ $row->{eva} } = $row;
+ $latlon_by_name{ $row->{name} } = $row;
+ }
+
+ my $total
+ = $db->select( 'journeys', 'count(*) as count' )->hash->{count};
+ my $count = 0;
+ my $total_no_eva = 0;
+ my $total_no_latlon = 0;
+
+ my $json = JSON->new;
+
+ $res = $db->select( 'journeys_str', [ 'route', 'journey_id' ] );
+ while ( my $row = $res->expand->hash ) {
+ my $no_eva = 0;
+ my $no_latlon = 0;
+ my $changed = 0;
+ my @route = @{ $row->{route} };
+ for my $stop (@route) {
+ my $name = $stop->[0];
+ my $eva = $stop->[1];
+
+ if ( not $eva and $stop->[2]{eva} ) {
+ $eva = $stop->[1] = 0 + $stop->[2]{eva};
+ }
+
+ if ( $stop->[2]{eva} and $eva and $eva == $stop->[2]{eva} ) {
+ delete $stop->[2]{eva};
+ }
+
+ if ( $stop->[2]{name} and $name eq $stop->[2]{name} ) {
+ delete $stop->[2]{name};
+ }
+
+ if ( not $eva ) {
+ if ( $latlon_by_name{$name} ) {
+ $eva = $stop->[1] = $latlon_by_name{$name}{eva};
+ $changed = 1;
+ }
+ elsif ( $legacy_to_new{$name}
+ and $latlon_by_name{ $legacy_to_new{$name} } )
+ {
+ $eva = $stop->[1]
+ = $latlon_by_name{ $legacy_to_new{$name} }{eva};
+ $stop->[2]{lat}
+ = $latlon_by_name{ $legacy_to_new{$name} }{lat};
+ $stop->[2]{lon}
+ = $latlon_by_name{ $legacy_to_new{$name} }{lon};
+ $changed = 1;
+ }
+ else {
+ $no_eva = 1;
+ }
+ }
+
+ if ( $stop->[2]{lat} and $stop->[2]{lon} ) {
+ next;
+ }
+
+ if ( $eva and $latlon_by_eva{$eva} ) {
+ $stop->[2]{lat} = $latlon_by_eva{$eva}{lat};
+ $stop->[2]{lon} = $latlon_by_eva{$eva}{lon};
+ $changed = 1;
+ }
+ elsif ( $latlon_by_name{$name} ) {
+ $stop->[2]{lat} = $latlon_by_name{$name}{lat};
+ $stop->[2]{lon} = $latlon_by_name{$name}{lon};
+ $changed = 1;
+ }
+ elsif ( $legacy_to_new{$name}
+ and $latlon_by_name{ $legacy_to_new{$name} } )
+ {
+ $stop->[2]{lat}
+ = $latlon_by_name{ $legacy_to_new{$name} }{lat};
+ $stop->[2]{lon}
+ = $latlon_by_name{ $legacy_to_new{$name} }{lon};
+ $changed = 1;
+ }
+ else {
+ $no_latlon = 1;
+ }
+ }
+ if ($no_eva) {
+ $total_no_eva += 1;
+ }
+ if ($no_latlon) {
+ $total_no_latlon += 1;
+ }
+ if ($changed) {
+ $db->update(
+ 'journeys',
+ {
+ route => $json->encode( \@route ),
+ },
+ { id => $row->{journey_id} }
+ );
+ }
+ if ( $count++ % 10000 == 0 ) {
+ printf( " %2.0f%% complete\n", $count * 100 / $total );
+ }
+ }
+ say ' done';
+ if ($total_no_eva) {
+ printf( " (%d of %d routes still lack some EVA IDs)\n",
+ $total_no_eva, $total );
+ }
+ if ($total_no_latlon) {
+ printf( " (%d of %d routes still lack some lat/lon data)\n",
+ $total_no_latlon, $total );
+ }
+
+ $db->query(
+ qq{
+ update schema_version set version = 54;
+ }
+ );
+ },
+
+ # v54 -> v55
+ # do not share stations between backends
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ alter table schema_version add column hafas varchar(12);
+ alter table users drop column external_services;
+ alter table users add column backend_id smallint references backends (id) default 1;
+ alter table stations drop constraint stations_pkey;
+ alter table stations add unique (eva, source);
+ create index eva_by_source on stations (eva, source);
+ create index eva on stations (eva);
+ alter table related_stations drop constraint related_stations_eva_meta_key;
+ drop index rel_eva;
+ alter table related_stations add column backend_id smallint;
+ update related_stations set backend_id = 1;
+ alter table related_stations alter column backend_id set not null;
+ alter table related_stations add constraint backend_fk foreign key (backend_id) references backends (id);
+ alter table related_stations add unique (eva, meta, backend_id);
+ create index related_stations_eva_backend_key on related_stations (eva, backend_id);
+ }
+ );
+
+ # up until now, IRIS and DB HAFAS shared stations, with IRIS taking
+ # preference. As of v2.7, this is no longer the case. However, old DB
+ # HAFAS journeys may still reference IRIS-specific stations. So, we
+ # make all IRIS stations available as DB HAFAS stations as well.
+ my $total
+ = $db->select( 'stations', 'count(*) as count', { source => 0 } )
+ ->hash->{count};
+ my $count = 0;
+
+ # Caveat: If this is a fresh installation, there are no IRIS stations
+ # in the database yet. So we have to populate it first.
+ if ( not $total ) {
+ say
+'Preparing to untangle IRIS / HAFAS stations, this may take a while ...';
+ $total = scalar Travel::Status::DE::IRIS::Stations::get_stations();
+ for my $s ( Travel::Status::DE::IRIS::Stations::get_stations() ) {
+ my ( $ds100, $name, $eva, $lon, $lat ) = @{$s};
+ if ( $ENV{__TRAVELYNX_TEST_MINI_IRIS}
+ and ( $eva < 8000000 or $eva > 8000100 ) )
+ {
+ next;
+ }
+ $db->insert(
+ 'stations',
+ {
+ eva => $eva,
+ ds100 => $ds100,
+ name => $name,
+ lat => $lat,
+ lon => $lon,
+ source => 0,
+ archived => 0
+ },
+ );
+ if ( $count++ % 1000 == 0 ) {
+ printf( " %2.0f%% complete\n", $count * 100 / $total );
+ }
+ }
+ $count = 0;
+ }
+
+ say 'Untangling IRIS / HAFAS stations, this may take a while ...';
+ my $res = $db->query(
+ qq{
+ select eva, ds100, name, lat, lon, archived
+ from stations
+ where source = 0;
+ }
+ );
+ while ( my $row = $res->hash ) {
+ $db->insert(
+ 'stations',
+ {
+ eva => $row->{eva},
+ ds100 => $row->{ds100},
+ name => $row->{name},
+ lat => $row->{lat},
+ lon => $row->{lon},
+ archived => $row->{archived},
+ source => 1,
+ }
+ );
+ if ( $count++ % 1000 == 0 ) {
+ printf( " %2.0f%% complete\n", $count * 100 / $total );
+ }
+ }
+
+ # Occasionally, IRIS checkins refer to stations that are not part of
+ # the Travel::Status::DE::IRIS database. Add those as HAFAS stops to
+ # satisfy the upcoming foreign key constraints.
+
+ my %iris_has_eva;
+ $res = $db->query(qq{select eva from stations where source = 0;});
+ while ( my $row = $res->hash ) {
+ $iris_has_eva{ $row->{eva} } = 1;
+ }
+
+ my %hafas_by_eva;
+ $res = $db->query(qq{select * from stations where source = 1;});
+ while ( my $row = $res->hash ) {
+ $hafas_by_eva{ $row->{eva} } = $row;
+ }
+
+ my @iris_ref_stations;
+ $res
+ = $db->query(
+qq{select distinct checkin_station_id from journeys where backend_id = 0;}
+ );
+ while ( my $row = $res->hash ) {
+ push( @iris_ref_stations, $row->{checkin_station_id} );
+ }
+ $res
+ = $db->query(
+qq{select distinct checkout_station_id from journeys where backend_id = 0;}
+ );
+ while ( my $row = $res->hash ) {
+ push( @iris_ref_stations, $row->{checkout_station_id} );
+ }
+ $res
+ = $db->query(
+qq{select distinct checkin_station_id from in_transit where backend_id = 0;}
+ );
+ while ( my $row = $res->hash ) {
+ push( @iris_ref_stations, $row->{checkin_station_id} );
+ }
+ $res
+ = $db->query(
+qq{select distinct checkout_station_id from in_transit where backend_id = 0;}
+ );
+ while ( my $row = $res->hash ) {
+ if ( $row->{checkout_station_id} ) {
+ push( @iris_ref_stations, $row->{checkout_station_id} );
+ }
+ }
+
+ @iris_ref_stations = List::Util::uniq @iris_ref_stations;
+
+ for my $station (@iris_ref_stations) {
+ if ( not $iris_has_eva{$station} ) {
+ $hafas_by_eva{$station}{source} = 0;
+ $hafas_by_eva{$station}{archived} = 1;
+ $db->insert( 'stations', $hafas_by_eva{$station} );
+ }
+ }
+
+ $db->query(
+ qq{
+ alter table in_transit add constraint in_transit_checkin_eva_fk
+ foreign key (checkin_station_id, backend_id)
+ references stations (eva, source);
+ alter table in_transit add constraint in_transit_checkout_eva_fk
+ foreign key (checkout_station_id, backend_id)
+ references stations (eva, source);
+ alter table journeys add constraint journeys_checkin_eva_fk
+ foreign key (checkin_station_id, backend_id)
+ references stations (eva, source);
+ alter table journeys add constraint journeys_checkout_eva_fk
+ foreign key (checkout_station_id, backend_id)
+ references stations (eva, source);
+ drop view in_transit_str;
+ drop view journeys_str;
+ drop view follows_in_transit;
+ create view in_transit_str as select
+ user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join backends as backend on in_transit.backend_id = backend.id
+ ;
+ create view journeys_str as select
+ journeys.id as journey_id, user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name, journeys.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, edited, route, messages, user_data,
+ dep_platform, arr_platform
+ from journeys
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and journeys.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and journeys.backend_id = arr_station.source
+ left join backends as backend on journeys.backend_id = backend.id
+ ;
+ create view follows_in_transit as select
+ r1.subject_id as follower_id, user_id as followee_id,
+ users.name as followee_name,
+ train_type, train_line, train_no, train_id,
+ in_transit.backend_id as backend_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join relations as r1 on r1.predicate = 1 and r1.object_id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ order by checkin_time desc
+ ;
+ create view users_with_backend as select
+ users.id as id, users.name as name, status, public_level,
+ email, password, registered_at, last_seen,
+ deletion_requested, deletion_notified, use_history,
+ accept_follows, notifications, profile, backend_id, iris,
+ hafas, efa, ris, backend.name as backend_name
+ from users
+ left join backends as backend on users.backend_id = backend.id
+ ;
+ update schema_version set version = 55;
+ update schema_version set hafas = '0';
+ }
+ );
+ say
+ 'This travelynx instance now has support for non-DB HAFAS backends.';
+ say
+'If the migration fails due to a deadlock, re-run it after stopping all background workers';
+ },
+
+ # v55 -> v56
+ # include backend data in dumpstops command
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ create view stations_str as
+ select stations.name as name,
+ eva, lat, lon,
+ backends.name as backend,
+ iris as is_iris,
+ hafas as is_hafas,
+ efa as is_efa,
+ ris as is_ris
+ from stations
+ left join backends
+ on source = backends.id;
+ update schema_version set version = 56;
+ }
+ );
+ },
+
+ # v56 -> v57
+ # Berlin Hbf used to be divided between "Berlin Hbf" (8011160) and "Berlin
+ # Hbf (tief)" (8098160). Since 2024, both are called "Berlin Hbf".
+ # As there are some places in the IRIS backend where station names are
+ # mapped to EVA IDs, this is not good. As of 2.8.21, travelynx deals with
+ # this IRIS edge case (and probably similar edge cases in Karlsruhe).
+ # Rebuild stats to ensure no bogus data is in there.
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ truncate journey_stats;
+ update schema_version set version = 57;
+ }
+ );
+ },
+
+ # v57 -> v58
+ # Add backend data to follows_in_transit
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ drop view follows_in_transit;
+ create view follows_in_transit as select
+ r1.subject_id as follower_id, user_id as followee_id,
+ users.name as followee_name,
+ train_type, train_line, train_no, train_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.ris as is_ris,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join relations as r1 on r1.predicate = 1 and r1.object_id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join backends as backend on in_transit.backend_id = backend.id
+ order by checkin_time desc
+ ;
+ update schema_version set version = 58;
+ }
+ );
+ },
+
+ # v58 -> v59
+ # DB HAFAS is dead. Default to DB IRIS for now.
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ alter table users alter column backend_id set default 0;
+ update schema_version set version = 59;
+ }
+ );
+ },
+
+ # v59 -> v60
+ # Add bahn.de / DBRIS backend
+ sub {
+ my ($db) = @_;
+ $db->insert(
+ 'backends',
+ {
+ iris => 0,
+ hafas => 0,
+ efa => 0,
+ ris => 1,
+ name => 'bahn.de',
+ },
+ );
+ $db->query(
+ qq{
+ update schema_version set version = 60;
+ }
+ );
+ },
+
+ # v60 -> v61
+ # Rename "ris" / "is_ris" to "dbris" / "is_dbris", as it is DB-specific
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ drop view in_transit_str;
+ drop view journeys_str;
+ drop view users_with_backend;
+ drop view follows_in_transit;
+ alter table backends rename column ris to dbris;
+ create view in_transit_str as select
+ user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join backends as backend on in_transit.backend_id = backend.id
+ ;
+ create view journeys_str as select
+ journeys.id as journey_id, user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.name as backend_name, journeys.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, edited, route, messages, user_data,
+ dep_platform, arr_platform
+ from journeys
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and journeys.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and journeys.backend_id = arr_station.source
+ left join backends as backend on journeys.backend_id = backend.id
+ ;
+ create view users_with_backend as select
+ users.id as id, users.name as name, status, public_level,
+ email, password, registered_at, last_seen,
+ deletion_requested, deletion_notified, use_history,
+ accept_follows, notifications, profile, backend_id, iris,
+ hafas, efa, dbris, backend.name as backend_name
+ from users
+ left join backends as backend on users.backend_id = backend.id
+ ;
+ create view follows_in_transit as select
+ r1.subject_id as follower_id, user_id as followee_id,
+ users.name as followee_name,
+ train_type, train_line, train_no, train_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join relations as r1 on r1.predicate = 1 and r1.object_id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join backends as backend on in_transit.backend_id = backend.id
+ order by checkin_time desc
+ ;
+ update schema_version set version = 61;
+ }
+ );
+ },
+
+ # v61 -> v62
+ # Add MOTIS backend type, add RNV and transitous MOTIS backends
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ alter table backends add column motis bool default false;
+ alter table schema_version add column motis varchar(12);
+
+ create table stations_external_ids (
+ eva serial not null primary key,
+ backend_id smallint not null,
+ external_id text not null,
+
+ unique (backend_id, external_id),
+ foreign key (eva, backend_id) references stations (eva, source)
+ );
+
+ create view stations_with_external_ids as select
+ stations.*, stations_external_ids.external_id
+ from stations
+ left join stations_external_ids on
+ stations.eva = stations_external_ids.eva and
+ stations.source = stations_external_ids.backend_id
+ ;
+
+ drop view in_transit_str;
+ drop view journeys_str;
+ drop view users_with_backend;
+ drop view follows_in_transit;
+
+ create view in_transit_str as select
+ user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.motis as is_motis,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ dep_station_external_id.external_id as dep_external_id,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ arr_station_external_id.external_id as arr_external_id,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join stations_external_ids as dep_station_external_id on checkin_station_id = dep_station_external_id.eva and in_transit.backend_id = dep_station_external_id.backend_id
+ left join stations_external_ids as arr_station_external_id on checkout_station_id = arr_station_external_id.eva and in_transit.backend_id = arr_station_external_id.backend_id
+ left join backends as backend on in_transit.backend_id = backend.id
+ ;
+ create view journeys_str as select
+ journeys.id as journey_id, user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.motis as is_motis,
+ backend.name as backend_name, journeys.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ dep_station_external_id.external_id as dep_external_id,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ arr_station_external_id.external_id as arr_external_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, edited, route, messages, user_data,
+ dep_platform, arr_platform
+ from journeys
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and journeys.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and journeys.backend_id = arr_station.source
+ left join stations_external_ids as dep_station_external_id on checkin_station_id = dep_station_external_id.eva and journeys.backend_id = dep_station_external_id.backend_id
+ left join stations_external_ids as arr_station_external_id on checkout_station_id = arr_station_external_id.eva and journeys.backend_id = arr_station_external_id.backend_id
+ left join backends as backend on journeys.backend_id = backend.id
+ ;
+ create view users_with_backend as select
+ users.id as id, users.name as name, status, public_level,
+ email, password, registered_at, last_seen,
+ deletion_requested, deletion_notified, use_history,
+ accept_follows, notifications, profile, backend_id, iris,
+ hafas, efa, dbris, motis, backend.name as backend_name
+ from users
+ left join backends as backend on users.backend_id = backend.id
+ ;
+ create view follows_in_transit as select
+ r1.subject_id as follower_id, user_id as followee_id,
+ users.name as followee_name,
+ train_type, train_line, train_no, train_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.motis as is_motis,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join relations as r1 on r1.predicate = 1 and r1.object_id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join backends as backend on in_transit.backend_id = backend.id
+ order by checkin_time desc
+ ;
+ }
+ );
+ $db->query(
+ qq{
+ update schema_version set version = 62;
+ }
+ );
+ },
+
+ # v62 -> v63
+ # Add EFA backend support
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ alter table schema_version add column efa varchar(12);
+ update schema_version set version = 63;
+ update schema_version set efa = '0';
+ }
+ );
+ },
+
+ # v63 -> v64
+ # Relax train_type length constraints for EFA and MOTIS checkins
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ drop view in_transit_str;
+ drop view journeys_str;
+ drop view users_with_backend;
+ drop view follows_in_transit;
+
+ alter table in_transit alter column train_type type varchar(32);
+ alter table journeys alter column train_type type varchar(32);
+
+ create view in_transit_str as select
+ user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.motis as is_motis,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ dep_station_external_id.external_id as dep_external_id,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ arr_station_external_id.external_id as arr_external_id,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join stations_external_ids as dep_station_external_id on checkin_station_id = dep_station_external_id.eva and in_transit.backend_id = dep_station_external_id.backend_id
+ left join stations_external_ids as arr_station_external_id on checkout_station_id = arr_station_external_id.eva and in_transit.backend_id = arr_station_external_id.backend_id
+ left join backends as backend on in_transit.backend_id = backend.id
+ ;
+ create view journeys_str as select
+ journeys.id as journey_id, user_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.motis as is_motis,
+ backend.name as backend_name, journeys.backend_id as backend_id,
+ train_type, train_line, train_no, train_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ dep_station_external_id.external_id as dep_external_id,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ arr_station_external_id.external_id as arr_external_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, edited, route, messages, user_data,
+ dep_platform, arr_platform
+ from journeys
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and journeys.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and journeys.backend_id = arr_station.source
+ left join stations_external_ids as dep_station_external_id on checkin_station_id = dep_station_external_id.eva and journeys.backend_id = dep_station_external_id.backend_id
+ left join stations_external_ids as arr_station_external_id on checkout_station_id = arr_station_external_id.eva and journeys.backend_id = arr_station_external_id.backend_id
+ left join backends as backend on journeys.backend_id = backend.id
+ ;
+ create view users_with_backend as select
+ users.id as id, users.name as name, status, public_level,
+ email, password, registered_at, last_seen,
+ deletion_requested, deletion_notified, use_history,
+ accept_follows, notifications, profile, backend_id, iris,
+ hafas, efa, dbris, motis, backend.name as backend_name
+ from users
+ left join backends as backend on users.backend_id = backend.id
+ ;
+ create view follows_in_transit as select
+ r1.subject_id as follower_id, user_id as followee_id,
+ users.name as followee_name,
+ train_type, train_line, train_no, train_id,
+ backend.iris as is_iris, backend.hafas as is_hafas,
+ backend.efa as is_efa, backend.dbris as is_dbris,
+ backend.motis as is_motis,
+ backend.name as backend_name, in_transit.backend_id as backend_id,
+ extract(epoch from checkin_time) as checkin_ts,
+ extract(epoch from sched_departure) as sched_dep_ts,
+ extract(epoch from real_departure) as real_dep_ts,
+ checkin_station_id as dep_eva,
+ dep_station.ds100 as dep_ds100,
+ dep_station.name as dep_name,
+ dep_station.lat as dep_lat,
+ dep_station.lon as dep_lon,
+ extract(epoch from checkout_time) as checkout_ts,
+ extract(epoch from sched_arrival) as sched_arr_ts,
+ extract(epoch from real_arrival) as real_arr_ts,
+ checkout_station_id as arr_eva,
+ arr_station.ds100 as arr_ds100,
+ arr_station.name as arr_name,
+ arr_station.lat as arr_lat,
+ arr_station.lon as arr_lon,
+ polyline_id,
+ polylines.polyline as polyline,
+ visibility,
+ coalesce(visibility, users.public_level & 127) as effective_visibility,
+ cancelled, route, messages, user_data,
+ dep_platform, arr_platform, data
+ from in_transit
+ left join polylines on polylines.id = polyline_id
+ left join users on users.id = user_id
+ left join relations as r1 on r1.predicate = 1 and r1.object_id = user_id
+ left join stations as dep_station on checkin_station_id = dep_station.eva and in_transit.backend_id = dep_station.source
+ left join stations as arr_station on checkout_station_id = arr_station.eva and in_transit.backend_id = arr_station.source
+ left join backends as backend on in_transit.backend_id = backend.id
+ order by checkin_time desc
+ ;
+
+ update schema_version set version = 64;
+ }
+ );
+ },
+
+ # v64 -> v65
+ # stations_str: add is_motis
+ sub {
+ my ($db) = @_;
+ $db->query(
+ qq{
+ drop view stations_str;
+ create view stations_str as
+ select stations.name as name,
+ eva, lat, lon,
+ backends.name as backend,
+ dbris as is_dbris,
+ efa as is_efa,
+ iris as is_iris,
+ hafas as is_hafas,
+ motis as is_motis
+ from stations
+ left join backends
+ on source = backends.id;
+ update schema_version set version = 65;
+ }
+ );
+ },
);
sub sync_stations {
@@ -1977,7 +3239,7 @@ sub sync_stations {
},
{
on_conflict => \
-'(eva) do update set archived = false, source = 0, ds100 = EXCLUDED.ds100, name=EXCLUDED.name, lat=EXCLUDED.lat, lon=EXCLUDED.lon'
+'(eva, source) do update set archived = false, source = 0, ds100 = EXCLUDED.ds100, name=EXCLUDED.name, lat=EXCLUDED.lat, lon=EXCLUDED.lon'
}
);
if ( $count++ % 1000 == 0 ) {
@@ -2136,6 +3398,99 @@ sub sync_stations {
}
}
+sub sync_backends_efa {
+ my ($db) = @_;
+ for my $service ( Travel::Status::DE::EFA::get_services() ) {
+ my $present = $db->select(
+ 'backends',
+ 'count(*) as count',
+ {
+ efa => 1,
+ name => $service->{shortname}
+ }
+ )->hash->{count};
+ if ( not $present ) {
+ $db->insert(
+ 'backends',
+ {
+ dbris => 0,
+ efa => 1,
+ hafas => 0,
+ iris => 0,
+ motis => 0,
+ name => $service->{shortname},
+ },
+ { on_conflict => undef }
+ );
+ }
+ }
+
+ $db->update( 'schema_version',
+ { efa => $Travel::Status::DE::EFA::VERSION } );
+}
+
+sub sync_backends_hafas {
+ my ($db) = @_;
+ for my $service ( Travel::Status::DE::HAFAS::get_services() ) {
+ my $present = $db->select(
+ 'backends',
+ 'count(*) as count',
+ {
+ hafas => 1,
+ name => $service->{shortname}
+ }
+ )->hash->{count};
+ if ( not $present ) {
+ $db->insert(
+ 'backends',
+ {
+ dbris => 0,
+ efa => 0,
+ hafas => 1,
+ iris => 0,
+ motis => 0,
+ name => $service->{shortname},
+ },
+ { on_conflict => undef }
+ );
+ }
+ }
+
+ $db->update( 'schema_version',
+ { hafas => $Travel::Status::DE::HAFAS::VERSION } );
+}
+
+sub sync_backends_motis {
+ my ($db) = @_;
+ for my $service ( Travel::Status::MOTIS::get_services() ) {
+ my $present = $db->select(
+ 'backends',
+ 'count(*) as count',
+ {
+ motis => 1,
+ name => $service->{shortname}
+ }
+ )->hash->{count};
+ if ( not $present ) {
+ $db->insert(
+ 'backends',
+ {
+ dbris => 0,
+ efa => 0,
+ hafas => 0,
+ iris => 0,
+ motis => 1,
+ name => $service->{shortname},
+ },
+ { on_conflict => undef }
+ );
+ }
+ }
+
+ $db->update( 'schema_version',
+ { motis => $Travel::Status::MOTIS::VERSION } );
+}
+
sub setup_db {
my ($db) = @_;
my $tx = $db->begin;
@@ -2202,9 +3557,9 @@ sub migrate_db {
}
my $iris_version = get_schema_version( $db, 'iris' );
- say "Found IRIS station database v${iris_version}";
+ say "Found IRIS station table v${iris_version}";
if ( $iris_version eq $Travel::Status::DE::IRIS::Stations::VERSION ) {
- say 'Station database is up-to-date';
+ say 'Station table is up-to-date';
}
else {
eval {
@@ -2223,6 +3578,39 @@ sub migrate_db {
}
}
+ my $efa_version = get_schema_version( $db, 'efa' );
+ say "Found backend table for EFA v${efa_version}";
+ if ( $efa_version eq $Travel::Status::DE::EFA::VERSION ) {
+ say 'Backend table is up-to-date';
+ }
+ else {
+ say
+"Synchronizing with Travel::Status::DE::EFA $Travel::Status::DE::EFA::VERSION";
+ sync_backends_efa($db);
+ }
+
+ my $hafas_version = get_schema_version( $db, 'hafas' );
+ say "Found backend table for HAFAS v${hafas_version}";
+ if ( $hafas_version eq $Travel::Status::DE::HAFAS::VERSION ) {
+ say 'Backend table is up-to-date';
+ }
+ else {
+ say
+"Synchronizing with Travel::Status::DE::HAFAS $Travel::Status::DE::HAFAS::VERSION";
+ sync_backends_hafas($db);
+ }
+
+ my $motis_version = get_schema_version( $db, 'motis' ) // '0';
+ say "Found backend table for Motis v${motis_version}";
+ if ( $motis_version eq $Travel::Status::MOTIS::VERSION ) {
+ say 'Backend table is up-to-date';
+ }
+ else {
+ say
+"Synchronizing with Travel::Status::MOTIS $Travel::Status::MOTIS::VERSION";
+ sync_backends_motis($db);
+ }
+
$db->update( 'schema_version',
{ travelynx => $self->app->config->{version} } );
diff --git a/lib/Travelynx/Command/dumpconfig.pm b/lib/Travelynx/Command/dumpconfig.pm
index 600ffb0..2c308c9 100644
--- a/lib/Travelynx/Command/dumpconfig.pm
+++ b/lib/Travelynx/Command/dumpconfig.pm
@@ -1,4 +1,5 @@
package Travelynx::Command::dumpconfig;
+
# Copyright (C) 2020-2023 Birte Kristina Friesel
#
# SPDX-License-Identifier: AGPL-3.0-or-later
diff --git a/lib/Travelynx/Command/dumpstops.pm b/lib/Travelynx/Command/dumpstops.pm
index e6740ec..15f5861 100644
--- a/lib/Travelynx/Command/dumpstops.pm
+++ b/lib/Travelynx/Command/dumpstops.pm
@@ -1,6 +1,6 @@
package Travelynx::Command::dumpstops;
-# Copyright (C) 2024 Birte Kristina Friesel
+# Copyright (C) 2024-2025 Birte Kristina Friesel
#
# SPDX-License-Identifier: AGPL-3.0-or-later
@@ -8,7 +8,7 @@ use Mojo::Base 'Mojolicious::Command';
use List::Util qw();
use Text::CSV;
-has description => 'Export HAFAS/IRIS stops to CSV';
+has description => 'Export known stops to CSV';
has usage => sub { shift->extract_usage };
@@ -24,12 +24,13 @@ sub run {
or die("open($filename): $!\n");
my $csv = Text::CSV->new( { eol => "\r\n" } );
- $csv->combine(qw(name eva lat lon source archived));
+ $csv->combine(qw(name eva lat lon backend is_dbris is_efa is_iris is_hafas is_motis));
print $fh $csv->string;
my $iter = $self->app->stations->get_db_iterator;
while ( my $row = $iter->hash ) {
- $csv->combine( @{$row}{qw{name eva lat lon source archived}} );
+ $csv->combine(
+ @{$row}{qw{name eva lat lon backend is_dbris is_efa is_iris is_hafas is_motis}} );
print $fh $csv->string;
}
close($fh);
diff --git a/lib/Travelynx/Command/influxdb.pm b/lib/Travelynx/Command/influxdb.pm
index f3fc3de..4b779a2 100644
--- a/lib/Travelynx/Command/influxdb.pm
+++ b/lib/Travelynx/Command/influxdb.pm
@@ -29,7 +29,7 @@ sub run {
my $active = $now->clone->subtract( months => 1 );
my @stats;
- my @stations;
+ my @backend_stats;
my @traewelling;
push(
@@ -85,50 +85,31 @@ sub run {
)
);
- push(
- @stations,
- query_to_influx(
- 'iris',
- $db->select(
- 'stations',
- 'count(*) as count',
- {
- source => 0,
- archived => 0
- }
- )->hash->{count}
- )
- );
- push(
- @stations,
- query_to_influx(
- 'hafas',
- $db->select(
- 'stations',
- 'count(*) as count',
- {
- source => 1,
- archived => 0
- }
- )->hash->{count}
- )
- );
- push(
- @stations,
- query_to_influx(
- 'archived',
- $db->select( 'stations', 'count(*) as count', { archived => 1 } )
- ->hash->{count}
- )
- );
- push(
- @stations,
- query_to_influx(
- 'meta',
- $db->select( 'related_stations', 'count(*) as count' )
- ->hash->{count}
- )
- );
+ my @backends = $self->app->stations->get_backends;
+
+ for my $backend (@backends) {
+ push(
+ @backend_stats,
+ [
+ $backend->{iris} ? 'IRIS' : $backend->{name},
+ $db->select(
+ 'stations',
+ 'count(*) as count',
+ {
+ source => $backend->{id},
+ archived => 0
+ }
+ )->hash->{count},
+ $db->select(
+ 'related_stations',
+ 'count(*) as count',
+ {
+ backend_id => $backend->{id},
+ }
+ )->hash->{count}
+ ]
+ );
+ }
push(
@traewelling,
@@ -167,10 +148,18 @@ sub run {
. $self->app->config->{influxdb}->{url}
. ' stats '
. join( ',', @stats ) );
- $self->app->log->debug( 'POST '
- . $self->app->config->{influxdb}->{url}
- . ' stations '
- . join( ',', @stations ) );
+ for my $backend_entry (@backend_stats) {
+ $self->app->log->debug(
+ 'POST '
+ . $self->app->config->{influxdb}->{url}
+ . ' stations,backend='
+ . $backend_entry->[0]
+ . sprintf(
+ ' count=%d,meta=%d',
+ $backend_entry->[1], $backend_entry->[2]
+ )
+ );
+ }
$self->app->log->debug( 'POST '
. $self->app->config->{influxdb}->{url}
. ' traewelling '
@@ -181,10 +170,16 @@ sub run {
$self->app->config->{influxdb}->{url},
'stats ' . join( ',', @stats )
)->wait;
- $self->app->ua->post_p(
- $self->app->config->{influxdb}->{url},
- 'stations ' . join( ',', @stations )
- )->wait;
+ my $buf = q{};
+ for my $backend_entry (@backend_stats) {
+ $buf
+ .= "\nstations,backend="
+ . $backend_entry->[0]
+ . sprintf( ' count=%d,meta=%d',
+ $backend_entry->[1], $backend_entry->[2] );
+ }
+ $self->app->ua->post_p( $self->app->config->{influxdb}->{url}, $buf )
+ ->wait;
$self->app->ua->post_p(
$self->app->config->{influxdb}->{url},
'traewelling ' . join( ',', @traewelling )
diff --git a/lib/Travelynx/Command/integritycheck.pm b/lib/Travelynx/Command/integritycheck.pm
index 4894c3d..be5fe71 100644
--- a/lib/Travelynx/Command/integritycheck.pm
+++ b/lib/Travelynx/Command/integritycheck.pm
@@ -9,58 +9,60 @@ use List::Util qw();
use Travel::Status::DE::IRIS::Stations;
sub run {
- my ($self) = @_;
- my $found = 0;
- my $db = $self->app->pg->db;
-
- my $res1 = $db->query(
- qq{
- select checkin_station_id
- from journeys
- left join stations on journeys.checkin_station_id = stations.eva
- where stations.eva is null;
- }
- );
-
- my $res2 = $db->query(
- qq{
- select checkout_station_id
- from journeys
- left join stations on journeys.checkout_station_id = stations.eva
- where stations.eva is null;
- }
- );
-
- my %notified;
- while ( my $row = $res1->hash ) {
- my $eva = $row->{checkin_station_id};
- if ( not $found ) {
- $found = 1;
- say
+ my ( $self, $mode ) = @_;
+ my $found = 0;
+ my $db = $self->app->pg->db;
+
+ if ( $mode eq 'all' or $mode eq 'unknown-evas' ) {
+
+ my %notified;
+ my $res1 = $db->query(
+ qq{
+ select checkin_station_id
+ from journeys
+ left join stations on journeys.checkin_station_id = stations.eva
+ where stations.eva is null;
+ }
+ );
+ my $res2 = $db->query(
+ qq{
+ select checkout_station_id
+ from journeys
+ left join stations on journeys.checkout_station_id = stations.eva
+ where stations.eva is null;
+ }
+ );
+
+ while ( my $row = $res1->hash ) {
+ my $eva = $row->{checkin_station_id};
+ if ( not $found ) {
+ $found = 1;
+ say
'Journeys in the travelynx database contain the following unknown EVA IDs.';
- say '------------8<----------';
- say 'Travel::Status::DE::IRIS v'
- . $Travel::Status::DE::IRIS::Stations::VERSION;
- }
- if ( not $notified{$eva} ) {
- say $eva;
- $notified{$eva} = 1;
+ say '------------8<----------';
+ say 'Travel::Status::DE::IRIS v'
+ . $Travel::Status::DE::IRIS::Stations::VERSION;
+ }
+ if ( not $notified{$eva} ) {
+ say $eva;
+ $notified{$eva} = 1;
+ }
}
- }
- while ( my $row = $res2->hash ) {
- my $eva = $row->{checkout_station_id};
- if ( not $found ) {
- $found = 1;
- say
+ while ( my $row = $res2->hash ) {
+ my $eva = $row->{checkout_station_id};
+ if ( not $found ) {
+ $found = 1;
+ say
'Journeys in the travelynx database contain the following unknown EVA IDs.';
- say '------------8<----------';
- say 'Travel::Status::DE::IRIS v'
- . $Travel::Status::DE::IRIS::Stations::VERSION;
- }
- if ( not $notified{$eva} ) {
- say $eva;
- $notified{$eva} = 1;
+ say '------------8<----------';
+ say 'Travel::Status::DE::IRIS v'
+ . $Travel::Status::DE::IRIS::Stations::VERSION;
+ }
+ if ( not $notified{$eva} ) {
+ say $eva;
+ $notified{$eva} = 1;
+ }
}
}
@@ -70,42 +72,101 @@ sub run {
$found = 0;
}
- my $rename = $self->app->renamed_station;
+ if ( $mode eq 'all' or $mode eq 'unknown-route-entries' ) {
- my $res = $db->select( 'journeys', [ 'route', 'edited' ] )->expand;
- while ( my $j = $res->hash ) {
- if ( $j->{edited} & 0x0010 ) {
- next;
- }
- my @stops = @{ $j->{route} // [] };
- for my $stop (@stops) {
- my $stop_name = $stop->[0];
- if ( $rename->{ $stop->[0] } ) {
- $stop->[0] = $rename->{ $stop->[0] };
+ my %notified;
+ my $rename = $self->app->renamed_station;
+ my $res = $db->select( 'journeys', [ 'route', 'edited' ] )->expand;
+
+ while ( my $j = $res->hash ) {
+ if ( $j->{edited} & 0x0010 ) {
+ next;
+ }
+ my @stops = @{ $j->{route} // [] };
+ for my $stop (@stops) {
+ my $stop_name = $stop->[0];
+ if ( $rename->{ $stop->[0] } ) {
+ $stop->[0] = $rename->{ $stop->[0] };
+ }
+ }
+ my @unknown
+ = $self->app->stations->grep_unknown( map { $_->[0] } @stops );
+ for my $stop_name (@unknown) {
+ if ( not $notified{$stop_name} ) {
+ if ( not $found ) {
+ say
+'Journeys in the travelynx database contain the following unknown route entries.';
+ say
+ 'Note that this check ignores manual route entries.';
+ say
+'All reports refer to routes obtained via HAFAS/IRIS.';
+ say '------------8<----------';
+ say 'Travel::Status::DE::IRIS v'
+ . $Travel::Status::DE::IRIS::Stations::VERSION;
+ $found = 1;
+ }
+ say $stop_name;
+ $notified{$stop_name} = 1;
+ }
}
}
- my @unknown
- = $self->app->stations->grep_unknown( map { $_->[0] } @stops );
- for my $stop_name (@unknown) {
- if ( not $notified{$stop_name} ) {
+ }
+
+ if ($found) {
+ say '------------8<----------';
+ say '';
+ $found = 0;
+ }
+
+ if ( $mode eq 'all' or $mode eq 'checkout-eva-vs-route-eva' ) {
+
+ my $res = $db->select(
+ 'journeys_str',
+ [ 'journey_id', 'sched_arr_ts', 'route', 'arr_name', 'arr_eva' ],
+ { backend_id => 0 }
+ )->expand;
+
+ journey: while ( my $j = $res->hash ) {
+ my $found_in_route;
+ my $found_arr;
+ for my $stop ( @{ $j->{route} // [] } ) {
+ if ( not $stop->[1] ) {
+ next journey;
+ }
+ if ( $stop->[1] == $j->{arr_eva} ) {
+ $found_in_route = 1;
+ last;
+ }
+ if ( $stop->[2]{sched_arr}
+ and $j->{sched_arr_ts}
+ and $stop->[2]{sched_arr} == int( $j->{sched_arr_ts} ) )
+ {
+ $found_arr = $stop;
+ }
+ }
+ if ( $found_arr and not $found_in_route ) {
if ( not $found ) {
+ say q{};
say
-'Journeys in the travelynx database contain the following unknown route entries.';
- say 'Note that this check ignores manual route entries.';
- say 'All reports refer to routes obtained via HAFAS/IRIS.';
+'The following journeys have route entries which do not agree with checkout EVA ID.';
+ say
+'checkout station ID (left) vs route entry with matching checkout time (right)';
say '------------8<----------';
- say 'Travel::Status::DE::IRIS v'
- . $Travel::Status::DE::IRIS::Stations::VERSION;
$found = 1;
}
- say $stop_name;
- $notified{$stop_name} = 1;
+ printf(
+ "%7d %d (%s) vs %d (%s)\n",
+ $j->{journey_id}, $j->{arr_eva}, $j->{arr_name},
+ $found_arr->[1], $found_arr->[0]
+ );
}
}
}
+
if ($found) {
say '------------8<----------';
say '';
+ $found = 0;
}
}
diff --git a/lib/Travelynx/Command/maintenance.pm b/lib/Travelynx/Command/maintenance.pm
index c9c7ed6..7baf762 100644
--- a/lib/Travelynx/Command/maintenance.pm
+++ b/lib/Travelynx/Command/maintenance.pm
@@ -153,22 +153,6 @@ sub run {
}
$tx->commit;
-
- # Computing stats may take a while, but we've got all time in the
- # world here. This means users won't have to wait when loading their
- # own journey log.
- say 'Generating missing stats ...';
- for
- my $user ( $db->select( 'users', ['id'], { status => 1 } )->hashes->each )
- {
- $tx = $db->begin;
- $self->app->journeys->generate_missing_stats( uid => $user->{id} );
- $self->app->journeys->get_stats(
- uid => $user->{id},
- year => $now->year
- );
- $tx->commit;
- }
}
1;
diff --git a/lib/Travelynx/Command/traewelling.pm b/lib/Travelynx/Command/traewelling.pm
index 4c47e84..e4e0134 100644
--- a/lib/Travelynx/Command/traewelling.pm
+++ b/lib/Travelynx/Command/traewelling.pm
@@ -20,6 +20,12 @@ sub pull_sync {
my $request_count = 0;
for my $account_data ( $self->app->traewelling->get_pull_accounts ) {
+ if ( -e 'maintenance' ) {
+ $self->app->log->debug(
+ 'treawelling: "maintenance" file found, aborting');
+ return;
+ }
+
my $in_transit = $self->app->in_transit->get(
uid => $account_data->{user_id},
);
@@ -30,6 +36,13 @@ sub pull_sync {
next;
}
+ if ( not defined $account_data->{data}{user_name} ) {
+ $self->app->log->debug(
+"travelynx user $account_data->{user_id} has a Traewellig connection, but no username"
+ );
+ next;
+ }
+
# $account_data->{user_id} is the travelynx uid
# $account_data->{user_name} is the Träwelling username
$request_count += 1;
@@ -39,7 +52,7 @@ sub pull_sync {
# In 'work', the event loop is not running,
# so there's no need to multiply by $request_count at the moment
- Mojo::Promise->timer(1)->then(
+ Mojo::Promise->timer(1.5)->then(
sub {
return $self->app->traewelling_api->get_status_p(
username => $account_data->{data}{user_name},
@@ -77,6 +90,13 @@ sub push_sync {
my %push_result;
for my $candidate ( $self->app->traewelling->get_pushable_accounts ) {
+
+ if ( -e 'maintenance' ) {
+ $self->app->log->debug(
+ 'treawelling: "maintenance" file found, aborting');
+ return;
+ }
+
$self->app->log->debug(
"Pushing to Traewelling for UID $candidate->{uid}");
my $trip_id = $candidate->{journey_data}{trip_id};
@@ -102,12 +122,12 @@ sub push_sync {
my ($status) = @_;
$push_result{ $status->{http} } += 1;
}
- )->catch(
+ )->catch(
sub {
my ($status) = @_;
$push_result{ $status->{http} // 0 } += 1;
}
- )->wait;
+ )->wait;
}
return \%push_result;
@@ -121,6 +141,12 @@ sub run {
my $push_result;
my $pull_result;
+ if ( -e 'maintenance' ) {
+ $self->app->log->debug(
+ 'treawelling: "maintenance" file found, aborting');
+ return;
+ }
+
if ( not $direction or $direction eq 'push' ) {
$push_result = $self->push_sync;
}
@@ -133,6 +159,12 @@ sub run {
my $trwl_pull_finished_at = DateTime->now( time_zone => 'Europe/Berlin' );
+ if ( -e 'maintenance' ) {
+ $self->app->log->debug(
+ 'treawelling: "maintenance" file found, aborting');
+ return;
+ }
+
my $trwl_push_duration = $trwl_push_finished_at->epoch - $started_at->epoch;
my $trwl_pull_duration
= $trwl_pull_finished_at->epoch - $trwl_push_finished_at->epoch;
diff --git a/lib/Travelynx/Command/work.pm b/lib/Travelynx/Command/work.pm
index 10b1b69..071befa 100644
--- a/lib/Travelynx/Command/work.pm
+++ b/lib/Travelynx/Command/work.pm
@@ -1,11 +1,14 @@
package Travelynx::Command::work;
# Copyright (C) 2020-2023 Birte Kristina Friesel
+# Copyright (C) 2025 networkException <git@nwex.de>
#
# SPDX-License-Identifier: AGPL-3.0-or-later
use Mojo::Base 'Mojolicious::Command';
use Mojo::Promise;
+use utf8;
+
use DateTime;
use JSON;
use List::Util;
@@ -15,12 +18,17 @@ has description => 'Update real-time data of active journeys';
has usage => sub { shift->extract_usage };
sub run {
- my ($self) = @_;
+ my ( $self, $backend ) = @_;
my $now = DateTime->now( time_zone => 'Europe/Berlin' );
my $checkin_deadline = $now->clone->subtract( hours => 48 );
my $json = JSON->new;
+ if ( -e 'maintenance' ) {
+ $self->app->log->debug('work: "maintenance" file found, aborting');
+ return;
+ }
+
my $num_incomplete = $self->app->in_transit->delete_incomplete_checkins(
earlier_than => $checkin_deadline );
@@ -28,245 +36,700 @@ sub run {
$self->app->log->debug("Removed ${num_incomplete} incomplete checkins");
}
- my $errors = 0;
+ my $errors = 0;
+ my $backend_issues = 0;
+ my $rate_limit_counts = 0;
+ my $dbris_rate_limited = 0;
for my $entry ( $self->app->in_transit->get_all_active ) {
+ if ( -e 'maintenance' ) {
+ $self->app->log->debug('work: "maintenance" file found, aborting');
+ return;
+ }
+
my $uid = $entry->{user_id};
my $dep = $entry->{dep_eva};
my $arr = $entry->{arr_eva};
my $train_id = $entry->{train_id};
- if ( $train_id =~ m{[|]} ) {
+ if ( $train_id eq 'manual'
+ and ( not $backend or $backend eq 'manual' ) )
+ {
+ if ( $arr
+ and $entry->{real_arr_ts}
+ and $now->epoch - $entry->{real_arr_ts} > 600 )
+ {
+ $self->app->checkout_p(
+ station => $arr,
+ force => 2,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
+ }
+ }
+
+ elsif ( $entry->{is_dbris} and ( not $backend or $backend eq 'dbris' ) )
+ {
- $self->app->hafas->get_journey_p( trip_id => $train_id )->then(
- sub {
- my ($journey) = @_;
+ eval {
- my $found_dep;
- my $found_arr;
- for my $stop ( $journey->route ) {
- if ( $stop->loc->eva == $dep ) {
- $found_dep = $stop;
+ Mojo::Promise->timer(
+ $dbris_rate_limited ? 4.5 : ( $backend ? 1.2 : 1.0 ) )
+ ->then(
+ sub {
+ return $self->app->dbris->get_journey_p(
+ trip_id => $train_id );
+ }
+ )->then(
+ sub {
+ my ($journey) = @_;
+
+ $dbris_rate_limited = 0;
+
+ my $found_dep;
+ my $found_arr;
+ for my $stop ( $journey->route ) {
+ if ( $stop->eva == $dep ) {
+ $found_dep = $stop;
+ }
+ if ( $arr and $stop->eva == $arr ) {
+ $found_arr = $stop;
+ last;
+ }
+ }
+ if ( not $found_dep ) {
+ $self->app->log->debug(
+ "Did not find $dep within journey $train_id");
+ return;
}
- if ( $arr and $stop->loc->eva == $arr ) {
- $found_arr = $stop;
- last;
+
+ if ( $found_dep->rt_dep ) {
+ $self->app->in_transit->update_departure_dbris(
+ uid => $uid,
+ journey => $journey,
+ stop => $found_dep,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ train_id => $train_id,
+ );
+ }
+ if ( $found_dep->sched_dep
+ and $found_dep->dep->epoch > $now->epoch )
+ {
+ $self->app->add_wagonorder(
+ uid => $uid,
+ train_id => $train_id,
+ is_departure => 1,
+ eva => $dep,
+ datetime => $found_dep->sched_dep,
+ train_type => $journey->type,
+ train_no => $journey->number,
+ );
+ $self->app->add_stationinfo( $uid, 1,
+ $train_id, $found_dep->eva );
+ }
+
+ if (
+ $found_arr
+ and
+ ( $found_arr->rt_arr or $found_arr->is_cancelled )
+ )
+ {
+ $self->app->in_transit->update_arrival_dbris(
+ uid => $uid,
+ journey => $journey,
+ train_id => $train_id,
+ stop => $found_arr,
+ dep_eva => $dep,
+ arr_eva => $arr
+ );
+ }
+ if ( $found_arr and $found_arr->rt_arr ) {
+ if ( $found_arr->arr->epoch - $now->epoch < 600 ) {
+ $self->app->add_wagonorder(
+ uid => $uid,
+ train_id => $train_id,
+ is_arrival => 1,
+ eva => $arr,
+ datetime => $found_arr->sched_dep,
+ train_type => $journey->type,
+ train_no => $journey->number,
+ );
+ $self->app->add_stationinfo( $uid, 0,
+ $train_id, $found_dep->eva,
+ $found_arr->eva );
+ }
+ }
+ if ( $found_arr and $found_arr->is_cancelled ) {
+
+ # check out (adds a cancelled journey and resets journey state
+ # to destination selection)
+ $self->app->checkout_p(
+ station => $arr,
+ force => 0,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
}
}
- if ( not $found_dep ) {
+ )->catch(
+ sub {
+ my ($err) = @_;
$self->app->log->debug(
- "Did not find $dep within journey $train_id");
- return;
- }
-
- if ( $found_dep->{rt_dep} ) {
- $self->app->in_transit->update_departure_hafas(
- uid => $uid,
- journey => $journey,
- stop => $found_dep,
- dep_eva => $dep,
- arr_eva => $arr
+"work($uid) @ DBRIS $entry->{backend_name}: journey: $err"
);
+ if ( $err =~ m{HTTP 429} ) {
+ $dbris_rate_limited = 1;
+ $rate_limit_counts += 1;
+ }
+ else {
+ $backend_issues += 1;
+ }
}
+ )->wait;
- if ( $found_arr and $found_arr->{rt_arr} ) {
- $self->app->in_transit->update_arrival_hafas(
- uid => $uid,
- journey => $journey,
- stop => $found_arr,
- dep_eva => $dep,
- arr_eva => $arr
+ if ( $arr
+ and $entry->{real_arr_ts}
+ and $now->epoch - $entry->{real_arr_ts} > 600 )
+ {
+ $self->app->checkout_p(
+ station => $arr,
+ force => 2,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
+ }
+ };
+ if ($@) {
+ $errors += 1;
+ $self->app->log->error(
+ "work($uid) @ DBRIS $entry->{backend_name}: $@");
+ }
+ }
+
+ elsif ( $entry->{is_efa} and ( not $backend or $backend eq 'efa' ) ) {
+ eval {
+ $self->app->efa->get_journey_p(
+ trip_id => $train_id,
+ service => $entry->{backend_name}
+ )->then(
+ sub {
+ my ($journey) = @_;
+
+ my $found_dep;
+ my $found_arr;
+ for my $stop ( $journey->route ) {
+ if ( $stop->id_num == $dep ) {
+ $found_dep = $stop;
+ }
+ if ( $arr and $stop->id_num == $arr ) {
+ $found_arr = $stop;
+ last;
+ }
+ }
+ if ( not $found_dep ) {
+ $self->app->log->debug(
+ "Did not find $dep within journey $train_id");
+ return;
+ }
+
+ if ( $found_dep->rt_dep ) {
+ $self->app->in_transit->update_departure_efa(
+ uid => $uid,
+ journey => $journey,
+ stop => $found_dep,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ trip_id => $train_id,
+ );
+ }
+
+ if (
+ $found_arr
+ and
+ ( $found_arr->rt_arr or $found_arr->is_cancelled )
+ )
+ {
+ $self->app->in_transit->update_arrival_efa(
+ uid => $uid,
+ journey => $journey,
+ stop => $found_arr,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ trip_id => $train_id,
+ );
+ }
+ if ( $found_arr and $found_arr->is_cancelled ) {
+
+ # check out (adds a cancelled journey and resets journey state
+ # to destination selection)
+ $self->app->checkout_p(
+ station => $arr,
+ force => 0,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
+ }
+ }
+ )->catch(
+ sub {
+ my ($err) = @_;
+ $backend_issues += 1;
+ $self->app->log->error(
+"work($uid) @ EFA $entry->{backend_name}: journey: $err"
);
}
+ )->wait;
+
+ if ( $arr
+ and $entry->{real_arr_ts}
+ and $now->epoch - $entry->{real_arr_ts} > 600 )
+ {
+ $self->app->checkout_p(
+ station => $arr,
+ force => 2,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
}
- )->catch(
- sub {
- my ($err) = @_;
- if ( $err =~ m{svcResL\[0\][.]err is (?:FAIL|PARAMETER)$} )
- {
- # HAFAS do be weird. These are not actionable.
- $self->app->log->debug("work($uid)/journey: $err");
+ };
+ if ($@) {
+ $errors += 1;
+ $self->app->log->error(
+ "work($uid) @ EFA $entry->{backend_name}: $@");
+ }
+ }
+
+ elsif ( $entry->{is_motis} and ( not $backend or $backend eq 'motis' ) )
+ {
+
+ eval {
+ $self->app->motis->get_trip_p(
+ service => $entry->{backend_name},
+ trip_id => $train_id,
+ )->then(
+ sub {
+ my ($journey) = @_;
+
+ for my $stopover ( $journey->stopovers ) {
+ if ( not defined $stopover->stop->{eva} ) {
+
+ # Looks like MOTIS / transitous station IDs can change after the fact.
+ # So let's be safe rather than sorry, even if this causes way too many calls to the slow path
+ # (Stations::get_by_external_id uses string lookups).
+ # This function call implicitly sets $stopover->stop->{eva} for MOTIS backends.
+ $self->app->stations->add_or_update(
+ stop => $stopover->stop,
+ motis => $entry->{backend_name},
+ );
+
+ $self->app->log->debug( "mapped "
+ . $stopover->stop->id . " to "
+ . $stopover->stop->{eva} );
+ }
+ }
+
+ my $found_departure;
+ my $found_arrival;
+ for my $stopover ( $journey->stopovers ) {
+ if ( $stopover->stop->{eva} == $dep ) {
+ $found_departure = $stopover;
+ }
+
+ if ( $arr and $stopover->stop->{eva} == $arr ) {
+ $found_arrival = $stopover;
+ last;
+ }
+ }
+
+ if ( not $found_departure ) {
+ $self->app->log->debug(
+ "Did not find $dep within trip $train_id");
+ return;
+ }
+
+ if ( $found_departure->realtime_departure ) {
+ $self->app->in_transit->update_departure_motis(
+ uid => $uid,
+ journey => $journey,
+ stopover => $found_departure,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ train_id => $train_id,
+ );
+ }
+
+ if ( $found_arrival
+ and $found_arrival->realtime_arrival )
+ {
+ $self->app->in_transit->update_arrival_motis(
+ uid => $uid,
+ journey => $journey,
+ train_id => $train_id,
+ stopover => $found_arrival,
+ dep_eva => $dep,
+ arr_eva => $arr
+ );
+ }
}
- else {
- $self->app->log->error("work($uid)/journey: $err");
+ )->catch(
+ sub {
+ my ($err) = @_;
+ $self->app->log->error(
+"work($uid) @ MOTIS $entry->{backend_name}: journey: $err"
+ );
}
+ )->wait;
+
+ if ( $arr
+ and $entry->{real_arr_ts}
+ and $now->epoch - $entry->{real_arr_ts} > 600 )
+ {
+ $self->app->checkout_p(
+ station => $arr,
+ force => 2,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
}
- )->wait;
+ };
+ if ($@) {
+ $errors += 1;
+ $self->app->log->error(
+ "work($uid) @ MOTIS $entry->{backend_name}: $@");
+ }
+ }
- if ( $arr
- and $entry->{real_arr_ts}
- and $now->epoch - $entry->{real_arr_ts} > 600 )
- {
- $self->app->checkout_p(
- station => $arr,
- force => 2,
- dep_eva => $dep,
- arr_eva => $arr,
- uid => $uid
+ elsif ( $entry->{is_hafas} and ( not $backend or $backend eq 'hafas' ) )
+ {
+
+ eval {
+
+ $self->app->hafas->get_journey_p(
+ trip_id => $train_id,
+ service => $entry->{backend_name}
+ )->then(
+ sub {
+ my ($journey) = @_;
+
+ my $found_dep;
+ my $found_arr;
+ for my $stop ( $journey->route ) {
+ if ( $stop->loc->eva == $dep ) {
+ $found_dep = $stop;
+ }
+ if ( $arr and $stop->loc->eva == $arr ) {
+ $found_arr = $stop;
+ last;
+ }
+ }
+ if ( not $found_dep ) {
+ $self->app->log->debug(
+ "Did not find $dep within journey $train_id");
+ return;
+ }
+
+ if ( $found_dep->rt_dep ) {
+ $self->app->in_transit->update_departure_hafas(
+ uid => $uid,
+ journey => $journey,
+ stop => $found_dep,
+ dep_eva => $dep,
+ arr_eva => $arr
+ );
+ }
+ if (
+ $found_dep->sched_dep
+ and ( $entry->{backend_id} <= 1
+ or $entry->{backend_name} eq 'VRN'
+ or $entry->{backend_name} eq 'ÖBB' )
+ and $journey->class <= 16
+ and $found_dep->dep->epoch > $now->epoch
+ )
+ {
+ $self->app->add_wagonorder(
+ uid => $uid,
+ train_id => $journey->id,
+ is_departure => 1,
+ eva => $dep,
+ datetime => $found_dep->sched_dep,
+ train_type => $journey->type =~ s{ +$}{}r,
+ train_no => $journey->number,
+ );
+ $self->app->add_stationinfo( $uid, 1,
+ $journey->id, $found_dep->loc->eva );
+ }
+
+ if ( $found_arr and $found_arr->rt_arr ) {
+ $self->app->in_transit->update_arrival_hafas(
+ uid => $uid,
+ journey => $journey,
+ stop => $found_arr,
+ dep_eva => $dep,
+ arr_eva => $arr
+ );
+ if (
+ (
+ $entry->{backend_id} <= 1
+ or $entry->{backend_name} eq 'VRN'
+ or $entry->{backend_name} eq 'ÖBB'
+ )
+ and $journey->class <= 16
+ and $found_arr->arr->epoch - $now->epoch < 600
+ )
+ {
+ $self->app->add_wagonorder(
+ uid => $uid,
+ train_id => $journey->id,
+ is_arrival => 1,
+ eva => $arr,
+ datetime => $found_arr->sched_dep,
+ train_type => $journey->type,
+ train_no => $journey->number,
+ );
+ $self->app->add_stationinfo( $uid, 0,
+ $journey->id, $found_dep->loc->eva,
+ $found_arr->loc->eva );
+ }
+ }
+ }
+ )->catch(
+ sub {
+ my ($err) = @_;
+ $backend_issues += 1;
+ if ( $err
+ =~ m{svcResL\[0\][.]err is (?:FAIL|PARAMETER)$}
+ or $err =~ m{timeout} )
+ {
+ # These are not actionable.
+ $self->app->log->debug(
+"work($uid) @ HAFAS $entry->{backend_name}: journey: $err"
+ );
+ }
+ else {
+ $self->app->log->error(
+"work($uid) @ HAFAS $entry->{backend_name}: journey: $err"
+ );
+ }
+ }
)->wait;
+
+ if ( $arr
+ and $entry->{real_arr_ts}
+ and $now->epoch - $entry->{real_arr_ts} > 600 )
+ {
+ $self->app->checkout_p(
+ station => $arr,
+ force => 2,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
+ }
+ };
+ if ($@) {
+ $errors += 1;
+ $self->app->log->error(
+ "work($uid) @ HAFAS $entry->{backend_name}: $@");
}
- next;
}
+ # TODO irgendwo ist hier ne race condition wo ein neuer checkin (in HAFAS) mit IRIS-Daten überschrieben wird.
+ # Die ganzen updates brauchen wirklich mal sanity checks mit train id ...
+
# Note: IRIS data is not always updated in real-time. Both departure and
# arrival delays may take several minutes to appear, especially in case
# of large-scale disturbances. We work around this by continuing to
# update departure data for up to 15 minutes after departure and
# delaying automatic checkout by at least 10 minutes.
- eval {
- if ( $now->epoch - $entry->{real_dep_ts} < 900 ) {
- my $status = $self->app->iris->get_departures(
- station => $dep,
- lookbehind => 30,
- lookahead => 30
- );
- if ( $status->{errstr} ) {
- die("get_departures($dep): $status->{errstr}\n");
- }
-
- my ($train) = List::Util::first { $_->train_id eq $train_id }
- @{ $status->{results} };
+ elsif ( $entry->{is_iris} and ( not $backend or $backend eq 'iris' ) ) {
+ eval {
+ if ( $now->epoch - $entry->{real_dep_ts} < 900 ) {
+ my $status = $self->app->iris->get_departures(
+ station => $dep,
+ lookbehind => 30,
+ lookahead => 30
+ );
+ if ( $status->{errstr} ) {
+ die("get_departures($dep): $status->{errstr}\n");
+ }
- if ( not $train ) {
- $self->app->log->debug(
- "could not find train $train_id at $dep\n");
- return;
- }
+ my ($train)
+ = List::Util::first { $_->train_id eq $train_id }
+ @{ $status->{results} };
- $self->app->in_transit->update_departure(
- uid => $uid,
- train => $train,
- dep_eva => $dep,
- arr_eva => $arr,
- route => [ $self->app->iris->route_diff($train) ]
- );
+ if ( not $train ) {
+ $self->app->log->debug(
+ "could not find train $train_id at $dep\n");
+ return;
+ }
- if ( $train->departure_is_cancelled and $arr ) {
- my $checked_in
- = $self->app->in_transit->update_departure_cancelled(
+ $self->app->in_transit->update_departure(
uid => $uid,
train => $train,
dep_eva => $dep,
arr_eva => $arr,
- );
-
- # depending on the amount of users in transit, some time may
- # have passed between fetching $entry from the database and
- # now. Only check out if the user is still checked into this
- # train.
- if ($checked_in) {
+ route => [ $self->app->iris->route_diff($train) ]
+ );
- # check out (adds a cancelled journey and resets journey state
- # to checkin
- $self->app->checkout_p(
- station => $arr,
- force => 2,
+ if ( $train->departure_is_cancelled and $arr ) {
+ my $checked_in
+ = $self->app->in_transit->update_departure_cancelled(
+ uid => $uid,
+ train => $train,
dep_eva => $dep,
arr_eva => $arr,
- uid => $uid
- )->wait;
+ );
+
+ # depending on the amount of users in transit, some time may
+ # have passed between fetching $entry from the database and
+ # now. Only check out if the user is still checked into this
+ # train.
+ if ($checked_in) {
+
+ # check out (adds a cancelled journey and resets journey state
+ # to checkin
+ $self->app->checkout_p(
+ station => $arr,
+ force => 2,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
+ }
+ }
+ else {
+ $self->app->add_route_timestamps( $uid, $train, 1 );
+ $self->app->add_wagonorder(
+ uid => $uid,
+ train_id => $train->train_id,
+ is_departure => 1,
+ eva => $dep,
+ datetime => $train->sched_departure,
+ train_type => $train->type,
+ train_no => $train->train_no
+ );
+ $self->app->add_stationinfo( $uid, 1, $train->train_id,
+ $dep, $arr );
}
}
- else {
- $self->app->add_route_timestamps( $uid, $train, 1 );
- }
+ };
+ if ($@) {
+ $errors += 1;
+ $self->app->log->error("work($uid) @ IRIS: departure: $@");
}
- };
- if ($@) {
- $errors += 1;
- $self->app->log->error("work($uid)/departure: $@");
- }
- eval {
- if (
- $arr
- and ( not $entry->{real_arr_ts}
- or $now->epoch - $entry->{real_arr_ts} < 600 )
- )
- {
- my $status = $self->app->iris->get_departures(
- station => $arr,
- lookbehind => 20,
- lookahead => 220
- );
- if ( $status->{errstr} ) {
- die("get_departures($arr): $status->{errstr}\n");
- }
+ eval {
+ if (
+ $arr
+ and ( not $entry->{real_arr_ts}
+ or $now->epoch - $entry->{real_arr_ts} < 600 )
+ )
+ {
+ my $status = $self->app->iris->get_departures(
+ station => $arr,
+ lookbehind => 20,
+ lookahead => 220
+ );
+ if ( $status->{errstr} ) {
+ die("get_departures($arr): $status->{errstr}\n");
+ }
- # Note that a train may pass the same station several times.
- # Notable example: S41 / S42 ("Ringbahn") both starts and
- # terminates at Berlin Südkreuz
- my ($train) = List::Util::first {
- $_->train_id eq $train_id
- and $_->sched_arrival
- and $_->sched_arrival->epoch > $entry->{sched_dep_ts}
- }
- @{ $status->{results} };
+ # Note that a train may pass the same station several times.
+ # Notable example: S41 / S42 ("Ringbahn") both starts and
+ # terminates at Berlin Südkreuz
+ my ($train) = List::Util::first {
+ $_->train_id eq $train_id
+ and $_->sched_arrival
+ and $_->sched_arrival->epoch > $entry->{sched_dep_ts}
+ }
+ @{ $status->{results} };
- $train //= List::Util::first { $_->train_id eq $train_id }
- @{ $status->{results} };
+ $train //= List::Util::first { $_->train_id eq $train_id }
+ @{ $status->{results} };
- if ( not $train ) {
+ if ( not $train ) {
- # If we haven't seen the train yet, its arrival is probably
- # too far in the future. This is not critical.
- return;
- }
+ # If we haven't seen the train yet, its arrival is probably
+ # too far in the future. This is not critical.
+ return;
+ }
- my $checked_in = $self->app->in_transit->update_arrival(
- uid => $uid,
- train => $train,
- route => [ $self->app->iris->route_diff($train) ],
- dep_eva => $dep,
- arr_eva => $arr,
- );
+ my $checked_in = $self->app->in_transit->update_arrival(
+ uid => $uid,
+ train => $train,
+ route => [ $self->app->iris->route_diff($train) ],
+ dep_eva => $dep,
+ arr_eva => $arr,
+ );
- if ( $checked_in and $train->arrival_is_cancelled ) {
+ if ( $checked_in and $train->arrival_is_cancelled ) {
- # check out (adds a cancelled journey and resets journey state
- # to destination selection)
- $self->app->checkout_p(
+ # check out (adds a cancelled journey and resets journey state
+ # to destination selection)
+ $self->app->checkout_p(
+ station => $arr,
+ force => 0,
+ dep_eva => $dep,
+ arr_eva => $arr,
+ uid => $uid
+ )->wait;
+ }
+ else {
+ $self->app->add_route_timestamps(
+ $uid, $train, 0,
+ (
+ defined $entry->{real_arr_ts}
+ and $now->epoch > $entry->{real_arr_ts}
+ ) ? 1 : 0
+ );
+ $self->app->add_wagonorder(
+ uid => $uid,
+ train_id => $train->train_id,
+ is_arrival => 1,
+ eva => $arr,
+ datetime => $train->sched_departure,
+ train_type => $train->type,
+ train_no => $train->train_no
+ );
+ $self->app->add_stationinfo( $uid, 0, $train->train_id,
+ $dep, $arr );
+ }
+ }
+ elsif ( $entry->{real_arr_ts} ) {
+ my ( undef, $error ) = $self->app->checkout_p(
station => $arr,
- force => 0,
+ force => 2,
dep_eva => $dep,
arr_eva => $arr,
uid => $uid
+ )->catch(
+ sub {
+ my ($error) = @_;
+ $backend_issues += 1;
+ $self->app->log->error(
+ "work($uid) @ IRIS: arrival: $error");
+ $errors += 1;
+ }
)->wait;
}
- else {
- $self->app->add_route_timestamps(
- $uid, $train, 0,
- (
- defined $entry->{real_arr_ts}
- and $now->epoch > $entry->{real_arr_ts}
- ) ? 1 : 0
- );
- }
+ };
+ if ($@) {
+ $self->app->log->error("work($uid) @ IRIS: arrival: $@");
+ $errors += 1;
}
- elsif ( $entry->{real_arr_ts} ) {
- my ( undef, $error ) = $self->app->checkout_p(
- station => $arr,
- force => 2,
- dep_eva => $dep,
- arr_eva => $arr,
- uid => $uid
- )->catch(
- sub {
- my ($error) = @_;
- $self->app->log->error("work($uid)/arrival: $error");
- $errors += 1;
- }
- )->wait;
- }
- };
- if ($@) {
- $self->app->log->error("work($uid)/arrival: $@");
- $errors += 1;
+
+ eval { };
}
- eval { };
}
my $started_at = $now;
@@ -274,22 +737,35 @@ sub run {
my $worker_duration = $main_finished_at->epoch - $started_at->epoch;
if ( $self->app->config->{influxdb}->{url} ) {
+ my $tags = q{};
+ if ($backend) {
+ $tags .= ",backend=${backend}";
+ }
if ( $self->app->mode eq 'development' ) {
$self->app->log->debug( 'POST '
. $self->app->config->{influxdb}->{url}
- . " worker runtime_seconds=${worker_duration},errors=${errors}"
+ . " worker${tags} runtime_seconds=${worker_duration},errors=${errors},backend_errors=${backend_issues},ratelimit_count=${rate_limit_counts}"
);
}
else {
$self->app->ua->post_p( $self->app->config->{influxdb}->{url},
- "worker runtime_seconds=${worker_duration},errors=${errors}" )
- ->wait;
+"worker${tags} runtime_seconds=${worker_duration},errors=${errors},backend_errors=${backend_issues},ratelimit_count=${rate_limit_counts}"
+ )->wait;
}
}
if ( not $self->app->config->{traewelling}->{separate_worker} ) {
$self->app->start('traewelling');
}
+
+ # add_wagonorder and add_stationinfo assume a permanently running IOLoop
+ # and do not allow Mojolicious commands to wait until they have completed.
+ # Hence, some add_wagonorder and add_stationinfo calls made here may not
+ # complete before the work command exits, and thus have no effect.
+ #
+ # This is not ideal and will need fixing at some point. Until then, here
+ # is the pragmatic solution for 99% of the associated issues.
+ Mojo::Promise->timer(5)->wait;
}
1;