From 65d6dd1d5f4d8bf2ddd868339d22d40821b8983c Mon Sep 17 00:00:00 2001 From: "Steinar H. Gunderson" Date: Wed, 24 Jun 2015 01:46:54 +0200 Subject: [PATCH] Switch from flat files to PostgreSQL. The flat files don't work all that well when you have multiple clients working on them at the same time. I'm not super-happy about requiring a server to be up all the time (it would seem to reduce resilience by quite a lot), but Postgres is a pretty stable thing, so hopefully we will manage. Include a script to ingest old data into the database, which we will remove immediately afterwards. --- config.pm | 5 +++ ingest.pl | 57 ++++++++++++++++++++++++++++++++++ remoteglot.pl | 84 ++++++++++++++++++++++++++------------------------ remoteglot.sql | 16 ++++++++++ 4 files changed, 121 insertions(+), 41 deletions(-) create mode 100755 ingest.pl create mode 100644 remoteglot.sql diff --git a/config.pm b/config.pm index 2bb1974..3a57206 100644 --- a/config.pm +++ b/config.pm @@ -66,6 +66,11 @@ our @masters = ( # you probably need specific prior permission to use this. our $tb_serial_key = undef; +# Postgres configuration. +our $dbistr = "dbi:Pg:dbname=remoteglot"; +our $dbiuser = undef; +our $dbipass = undef; + eval { my $config_filename = $ENV{'REMOTEGLOT_CONFIG'} // 'config.local.pm'; require $config_filename; diff --git a/ingest.pl b/ingest.pl new file mode 100755 index 0000000..9e03f8a --- /dev/null +++ b/ingest.pl @@ -0,0 +1,57 @@ +#! /usr/bin/perl + +# To ingest old data into the database. + +use strict; +use warnings; +use DBI; +use DBD::Pg; +use JSON::XS; +use Tie::Persistent; + +my $dbh = DBI->connect('dbi:Pg:dbname=remoteglot'); +$dbh->{AutoCommit} = 0; +$dbh->{RaiseError} = 1; + +# Import positions from history. +for my $filename () { + $filename =~ m#www/history/(.*)\.json#; + my $id = $1; + print "Analysis: $id...\n"; + + my $contents; + { + local $/ = undef; + open my $fh, "<", $filename + or die "$filename: $!"; + $contents = <$fh>; + close $fh; + } + + #$dbh->do('INSERT INTO analysis VALUES (?)', undef, $contents); + + my $json = JSON::XS::decode_json($contents); + if (defined($json->{'plot_score'})) { + my $engine = $json->{'id'}{'name'} // die; + my $depth = $json->{'depth'} // 0; + my $nodes = $json->{'nodes'} // 0; + + $dbh->do('DELETE FROM scores WHERE id=?', undef, $id); + $dbh->do('INSERT INTO scores (id, plot_score, short_score, engine, depth, nodes) VALUES (?, ?, ?, ?, ?, ?)', + undef, $id, $json->{'plot_score'}, $json->{'short_score'}, $engine, $depth, $nodes); + } +} + +# Import clock information. +tie my %clock_info_for_pos, 'Tie::Persistent', 'clock_info.db', 'rw'; + +while (my ($id, $clock_info) = each %clock_info_for_pos) { + print "Clock: $id...\n"; + $dbh->do('DELETE FROM clock_info WHERE id=?', undef, $id); + $dbh->do('INSERT INTO clock_info (id, white_clock, black_clock, white_clock_target, black_clock_target) VALUES (?, ?, ?, ?, ?)', + undef, $id, $clock_info->{'white_clock'}, $clock_info->{'black_clock'}, + $clock_info->{'white_clock_target'}, $clock_info->{'black_clock_target'}); +} + +$dbh->commit; + diff --git a/remoteglot.pl b/remoteglot.pl index d6ecbcb..9502a71 100755 --- a/remoteglot.pl +++ b/remoteglot.pl @@ -21,6 +21,8 @@ use Time::HiRes; use JSON::XS; use URI::Escape; use Tie::Persistent; +use DBI; +use DBD::Pg; require 'Position.pm'; require 'Engine.pm'; require 'config.pm'; @@ -38,12 +40,12 @@ my %tb_cache = (); my $tb_lookup_running = 0; my $last_written_json = undef; -# Persisted so that we can restart. -tie my %clock_info_for_pos, 'Tie::Persistent', 'clock_info.db', 'rw'; -(tied %clock_info_for_pos)->autosync(1); - -tie my %json_for_pos, 'Tie::Persistent', 'analysis_info.db', 'rw'; -(tied %json_for_pos)->autosync(1); +# Persisted so we can restart. +# TODO: Figure out an appropriate way to deal with database restarts +# and/or Postgres going away entirely. +my $dbh = DBI->connect($remoteglotconf::dbistr, $remoteglotconf::dbiuser, $remoteglotconf::dbipass) + or die DBI->errstr; +$dbh->{RaiseError} = 1; $| = 1; @@ -782,19 +784,22 @@ sub output_json { if (!$historic_json_only && exists($pos_calculating->{'pretty_history'})) { my %score_history = (); + my $q = $dbh->prepare('SELECT * FROM scores WHERE id=?'); my $pos = Position->start_pos('white', 'black'); my $halfmove_num = 0; for my $move (@{$pos_calculating->{'pretty_history'}}) { my $id = id_for_pos($pos, $halfmove_num); - if (exists($json_for_pos{$id}) && defined($json_for_pos{$id}->{'plot_score'})) { + my $ref = $dbh->selectrow_hashref($q, undef, $id); + if (defined($ref)) { $score_history{$halfmove_num} = [ - $json_for_pos{$id}->{'plot_score'}, - $json_for_pos{$id}->{'short_score'} + $ref->{'plot_score'}, + $ref->{'short_score'} ]; } ++$halfmove_num; ($pos) = $pos->make_pretty_move($move); } + $q->finish; # If at any point we are missing 10 consecutive moves, # truncate the history there. This is so we don't get into @@ -836,7 +841,7 @@ sub output_json { # using a different engine, or if we've calculated deeper. # nodes is used as a tiebreaker. Don't bother about Multi-PV # data; it's not that important. - my ($old_engine, $old_depth, $old_nodes) = get_json_analysis_stats($filename); + my ($old_engine, $old_depth, $old_nodes) = get_json_analysis_stats($id); my $new_depth = $json->{'depth'} // 0; my $new_nodes = $json->{'nodes'} // 0; if (!defined($old_engine) || @@ -844,7 +849,14 @@ sub output_json { $new_depth > $old_depth || ($new_depth == $old_depth && $new_nodes >= $old_nodes)) { atomic_set_contents($filename, $encoded); - $json_for_pos{$id} = $json; + if (defined($json->{'plot_score'})) { + local $dbh->{AutoCommit} = 0; + $dbh->do('DELETE FROM scores WHERE id=?', undef, $id); + $dbh->do('INSERT INTO scores (id, plot_score, short_score, engine, depth, nodes) VALUES (?,?,?,?,?,?)', undef, + $id, $json->{'plot_score'}, $json->{'short_score'}, + $json->{'id'}{'name'}, $new_depth, $new_nodes); + $dbh->commit; + } } } } @@ -868,25 +880,13 @@ sub id_for_pos { } sub get_json_analysis_stats { - my $filename = shift; - - my ($engine, $depth, $nodes); - - open my $fh, "<", $filename - or return undef; - local $/ = undef; - eval { - my $json = JSON::XS::decode_json(<$fh>); - $engine = $json->{'id'}{'name'} // die; - $depth = $json->{'depth'} // 0; - $nodes = $json->{'nodes'} // 0; - }; - close $fh; - if ($@) { - warn "Error in decoding $filename: $@"; - return undef; + my $id = shift; + my $ref = $dbh->selectrow_hashref('SELECT * FROM scores WHERE id=?', undef, $id); + if (defined($ref)) { + return ($ref->{'engine'}, $ref->{'depth'}, $ref->{'nodes'}); + } else { + return ('', 0, 0); } - return ($engine, $depth, $nodes); } sub uciprint { @@ -1138,13 +1138,14 @@ sub find_clock_start { } my $id = id_for_pos($pos); - if (exists($clock_info_for_pos{$id})) { - $pos->{'white_clock'} //= $clock_info_for_pos{$id}{'white_clock'}; - $pos->{'black_clock'} //= $clock_info_for_pos{$id}{'black_clock'}; + my $clock_info = $dbh->selectrow_hashref('SELECT * FROM clock_info WHERE id=?', undef, $id); + if (defined($clock_info)) { + $pos->{'white_clock'} //= $clock_info->{'white_clock'}; + $pos->{'black_clock'} //= $clock_info->{'black_clock'}; if ($pos->{'toplay'} eq 'W') { - $pos->{'white_clock_target'} = $clock_info_for_pos{$id}->{'white_clock_target'}; + $pos->{'white_clock_target'} = $clock_info->{'white_clock_target'}; } else { - $pos->{'black_clock_target'} = $clock_info_for_pos{$id}->{'black_clock_target'}; + $pos->{'black_clock_target'} = $clock_info->{'black_clock_target'}; } return; } @@ -1174,16 +1175,17 @@ sub find_clock_start { return; } my $time_left = $pos->{$key}; - my $clock_info = { - white_clock => $pos->{'white_clock'}, - black_clock => $pos->{'black_clock'} - }; + my ($white_clock_target, $black_clock_target); if ($pos->{'toplay'} eq 'W') { - $clock_info->{'white_clock_target'} = $pos->{'white_clock_target'} = time + $time_left; + $white_clock_target = $pos->{'white_clock_target'} = time + $time_left; } else { - $clock_info->{'black_clock_target'} = $pos->{'black_clock_target'} = time + $time_left; + $black_clock_target = $pos->{'black_clock_target'} = time + $time_left; } - $clock_info_for_pos{$id} = $clock_info; + local $dbh->{AutoCommit} = 0; + $dbh->do('DELETE FROM clock_info WHERE id=?', undef, $id); + $dbh->do('INSERT INTO clock_info (id, white_clock, black_clock, white_clock_target, black_clock_target)', undef, + $id, $pos->{'white_clock'}, $pos->{'black_clock'}, $white_clock_target, $black_clock_target); + $dbh->commit; } sub schedule_tb_lookup { diff --git a/remoteglot.sql b/remoteglot.sql new file mode 100644 index 0000000..ed5d1d8 --- /dev/null +++ b/remoteglot.sql @@ -0,0 +1,16 @@ +CREATE TABLE scores ( + id varchar primary key, + plot_score integer not null, + short_score varchar not null, + engine varchar not null, + depth bigint not null, + nodes bigint not null +); + +CREATE TABLE clock_info ( + id varchar primary key, + white_clock integer, + black_clock integer, + white_clock_target integer, -- FIXME: really timestamp with time zone + black_clock_target integer -- FIXME: really timestamp with time zone +); -- 2.39.2