]> git.sesse.net Git - remoteglot/commitdiff
Integrate varnishcount.pl into serve-analysis.js, so there is less chaos in daemons...
authorSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 25 Jun 2015 20:57:08 +0000 (22:57 +0200)
committerSteinar H. Gunderson <sgunderson@bigfoot.com>
Thu, 25 Jun 2015 20:57:08 +0000 (22:57 +0200)
varnishcount.pl [deleted file]
www/serve-analysis.js

diff --git a/varnishcount.pl b/varnishcount.pl
deleted file mode 100755 (executable)
index 35e1402..0000000
+++ /dev/null
@@ -1,81 +0,0 @@
-#! /usr/bin/perl
-use AnyEvent;
-use AnyEvent::Handle;
-use EV;
-use LWP::Simple;
-require 'config.pm';
-use strict;
-use warnings;
-no warnings qw(once);
-
-my $url = $ARGV[0] // "/analysis.pl";  # Technically an URL regex, not an URL.
-my $port = $ARGV[1] // 5000;
-
-open my $fh, "-|", "varnishncsa -F '%{%s}t %U %q tffb=%{Varnish:time_firstbyte}x' -q 'ReqURL ~ \"^$url\"'"
-       or die "varnishncsa: $!";
-my %uniques = ();
-
-my $ev = AnyEvent::Handle->new(
-       fh => $fh,
-       on_read => sub {
-               my ($hdl) = @_;
-               $hdl->push_read(
-                       line => sub {
-                               my ($hdl, $line, $eof) = @_;
-                               handle_line($line);
-                       }
-               );
-       },
-);
-my $ev2 = AnyEvent->timer(
-       interval => 1.0,
-       cb => \&output
-);
-EV::run;
-
-sub handle_line {
-       my $line = shift;
-       $line =~ m#(\d+) $url \?ims=\d+&unique=(.*) tffb=(.*)# or return;
-       $uniques{$2} = {
-               last_seen => $1 + $3,
-               grace => undef,
-       };
-       my $now = time;
-       print "[$now] $1 $2 $3\n";
-}
-
-sub output {
-       my $mtime = (stat($remoteglotconf::json_output))[9] - 1;  # Compensate for subsecond issues.
-       my $now = time;
-
-       while (my ($unique, $hash) = each %uniques) {
-               my $last_seen = $hash->{'last_seen'};
-               if ($now - $last_seen <= 5) {
-                       # We've seen this user in the last five seconds;
-                       # it's okay.
-                       next;
-               }
-               if ($last_seen >= $mtime) {
-                       # This user has the latest version;
-                       # they are probably just hanging.
-                       next;
-               }
-               if (!defined($hash->{'grace'})) {
-                       # They have five seconds after a new JSON has been
-                       # provided to get get it, or they're out.
-                       # We don't simply use $mtime, since we don't want to
-                       # reset the grace timer just because a new JSON is
-                       # published.
-                       $hash->{'grace'} = $mtime;
-               }
-               if ($now - $hash->{'grace'} > 5) {
-                       printf "Timing out %s (last_seen=%d, now=%d, mtime=%d, grace=%d)\n",
-                               $unique, $last_seen, $now, $mtime, $hash->{'grace'};
-                       delete $uniques{$unique};
-               }
-       }
-
-       my $num_viewers = scalar keys %uniques; 
-       printf "%d entries in hash, mtime=$mtime\n", scalar keys %uniques;
-       LWP::Simple::get('http://127.0.0.1:' . $port . '/override-num-viewers?num=' . $num_viewers);    
-}
index 9bb33cfd28340d29dc7ae7e871db8b99bd148782..d659b610803811583c8fd12d827adbe60001027c 100644 (file)
@@ -8,11 +8,14 @@ var url = require('url');
 var querystring = require('querystring');
 var path = require('path');
 var zlib = require('zlib');
+var readline = require('readline');
+var child_process = require('child_process');
 var delta = require('./js/json_delta.js');
 
 // Constants.
 var HISTORY_TO_KEEP = 5;
 var MINIMUM_VERSION = null;
+var COUNT_FROM_VARNISH_LOG = true;
 
 // Filename to serve.
 var json_filename = '/srv/analysis.sesse.net/www/analysis.json';
@@ -59,7 +62,7 @@ var last_seen_clients = {};
 var touch_timer = undefined;
 
 // If we are behind Varnish, we can't count the number of clients
-// ourselves, so some external log-tailing daemon needs to tell us.
+// ourselves, so we need to get it from parsing varnishncsa.
 var viewer_count_override = undefined;
 
 var replace_json = function(new_json_contents, mtime) {
@@ -178,21 +181,6 @@ var send_404 = function(response) {
        response.write('Something went wrong. Sorry.');
        response.end();
 }
-var handle_viewer_override = function(request, u, response) {
-       // Only accept requests from localhost.
-       var peer = request.socket.localAddress;
-       if ((peer != '127.0.0.1' && peer != '::1') || request.headers['x-forwarded-for']) {
-               console.log("Refusing viewer override from " + peer);
-               send_404(response);
-       } else {
-               viewer_count_override = (u.query)['num'];
-               response.writeHead(200, {
-                       'Content-Type': 'text/plain',
-               });
-               response.write('OK.');
-               response.end();
-       }
-}
 var send_json = function(response, ims, accept_gzip, num_viewers) {
        var this_json = diff_json[ims] || json;
 
@@ -253,23 +241,85 @@ var count_viewers = function() {
        last_seen_clients = new_last_seen_clients;
        return num_viewers;
 }
+var log = function(str) {
+       console.log("[" + ((new Date).getTime()*1e-3).toFixed(3) + "] " + str);
+}
 
 // Set up a watcher to catch changes to the file, then do an initial read
 // to make sure we have a copy.
 fs.watch(path.dirname(json_filename), reread_file);
 reread_file(null, path.basename(json_filename));
 
+if (COUNT_FROM_VARNISH_LOG) {
+       // Note: We abuse serve_url as a regex.
+       var varnishncsa = child_process.spawn(
+               'varnishncsa', ['-F', '%{%s}t %U %q tffb=%{Varnish:time_firstbyte}x',
+               '-q', 'ReqURL ~ "^' + serve_url + '"']);
+       var rl = readline.createInterface({
+               input: varnishncsa.stdout,
+               output: varnishncsa.stdin,
+               terminal: false
+       });
+
+       var uniques = [];
+       rl.on('line', function(line) {
+               var v = line.match(/(\d+) .*\?ims=\d+&unique=(.*) tffb=(.*)/);
+               if (v) {
+                       uniques[v[2]] = {
+                               last_seen: (parseInt(v[1]) + parseFloat(v[3])) * 1e3,
+                               grace: null,
+                       };
+                       log(v[1] + " " + v[2] + " " + v[3]);
+               } else {
+                       log("VARNISHNCSA UNPARSEABLE LINE: " + line);
+               }
+       });
+       setInterval(function() {
+               var mtime = json.last_modified - 1000;  // Compensate for subsecond issues.
+               var now = (new Date).getTime();
+               var num_viewers = 0;
+
+               for (var unique in uniques) {
+                       ++num_viewers;
+                       var last_seen = uniques[unique].last_seen;
+                       if (now - last_seen <= 5000) {
+                               // We've seen this user in the last five seconds;
+                               // it's okay.
+                               continue;
+                       }
+                       if (last_seen >= mtime) {
+                               // This user has the latest version;
+                               // they are probably just hanging.
+                               continue;
+                       }
+                       if (uniques[unique].grace === null) {
+                               // They have five seconds after a new JSON has been
+                               // provided to get get it, or they're out.
+                               // We don't simply use mtime, since we don't want to
+                               // reset the grace timer just because a new JSON is
+                               // published.
+                               uniques[unique].grace = mtime;
+                       }
+                       if (now - uniques[unique].grace > 5000) {
+                               log("Timing out " + unique + " (last_seen=" + last_seen + ", now=" + now +
+                                       ", mtime=" + mtime, ", grace=" + uniques[unique].grace + ")");
+                               delete uniques[unique]; // ???
+                               --num_viewers;
+                       }
+               }
+
+               log(num_viewers + " entries in hash, mtime=" + mtime);
+               viewer_count_override = num_viewers;
+       }, 1000);
+}
+
 var server = http.createServer();
 server.on('request', function(request, response) {
        var u = url.parse(request.url, true);
        var ims = (u.query)['ims'];
        var unique = (u.query)['unique'];
 
-       console.log(((new Date).getTime()*1e-3).toFixed(3) + " " + request.url);
-       if (u.pathname === '/override-num-viewers') {
-               handle_viewer_override(request, u, response);
-               return;
-       }
+       log(request.url);
        if (u.pathname !== serve_url) {
                // This is not the request you are looking for.
                send_404(response);