]> git.sesse.net Git - remoteglot/blobdiff - www/serve-analysis.js
Remove an obsolete comment.
[remoteglot] / www / serve-analysis.js
index e45e34f3d3e781e07506bc0f861233e0102569f8..2a53e36ae57714ed3113b2d6bfb24709ab671481 100644 (file)
@@ -8,12 +8,32 @@ var url = require('url');
 var querystring = require('querystring');
 var path = require('path');
 var zlib = require('zlib');
+var readline = require('readline');
+var child_process = require('child_process');
 var delta = require('./js/json_delta.js');
 
 // Constants.
-var JSON_FILENAME = '/srv/analysis.sesse.net/www/analysis.json';
 var HISTORY_TO_KEEP = 5;
-var MINIMUM_VERSION = 2015062104;
+var MINIMUM_VERSION = null;
+var COUNT_FROM_VARNISH_LOG = true;
+
+// Filename to serve.
+var json_filename = '/srv/analysis.sesse.net/www/analysis.json';
+if (process.argv.length >= 3) {
+       json_filename = process.argv[2];
+}
+
+// Expected destination filename.
+var serve_url = '/analysis.pl';
+if (process.argv.length >= 4) {
+       serve_url = process.argv[3];
+}
+
+// TCP port to listen on.
+var port = 5000;
+if (process.argv.length >= 5) {
+       port = parseInt(process.argv[4]);
+}
 
 // If set to 1, we are already processing a JSON update and should not
 // start a new one. If set to 2, we are _also_ having one in the queue.
@@ -42,7 +62,7 @@ var last_seen_clients = {};
 var touch_timer = undefined;
 
 // If we are behind Varnish, we can't count the number of clients
-// ourselves, so some external log-tailing daemon needs to tell us.
+// ourselves, so we need to get it from parsing varnishncsa.
 var viewer_count_override = undefined;
 
 var replace_json = function(new_json_contents, mtime) {
@@ -104,7 +124,7 @@ var create_json_historic_diff = function(new_json, history_left, new_diff_json,
 }
 
 var reread_file = function(event, filename) {
-       if (filename != path.basename(JSON_FILENAME)) {
+       if (filename != path.basename(json_filename)) {
                return;
        }
        if (json_lock >= 2) {
@@ -118,8 +138,8 @@ var reread_file = function(event, filename) {
        }
        json_lock = 1;
 
-       console.log("Rereading " + JSON_FILENAME);
-       fs.open(JSON_FILENAME, 'r+', function(err, fd) {
+       console.log("Rereading " + json_filename);
+       fs.open(json_filename, 'r+', function(err, fd) {
                if (err) throw err;
                fs.fstat(fd, function(err, st) {
                        if (err) throw err;
@@ -140,7 +160,7 @@ var reread_file = function(event, filename) {
        touch_timer = setTimeout(function() {
                console.log("Touching analysis.json due to no other activity");
                var now = Date.now() / 1000;
-               fs.utimes(JSON_FILENAME, now, now);
+               fs.utimes(json_filename, now, now);
        }, 30000);
 }
 var possibly_wakeup_clients = function() {
@@ -161,21 +181,6 @@ var send_404 = function(response) {
        response.write('Something went wrong. Sorry.');
        response.end();
 }
-var handle_viewer_override = function(request, u, response) {
-       // Only accept requests from localhost.
-       var peer = request.socket.localAddress;
-       if ((peer != '127.0.0.1' && peer != '::1') || request.headers['x-forwarded-for']) {
-               console.log("Refusing viewer override from " + peer);
-               send_404(response);
-       } else {
-               viewer_count_override = (u.query)['num'];
-               response.writeHead(200, {
-                       'Content-Type': 'text/plain',
-               });
-               response.write('OK.');
-               response.end();
-       }
-}
 var send_json = function(response, ims, accept_gzip, num_viewers) {
        var this_json = diff_json[ims] || json;
 
@@ -236,11 +241,77 @@ var count_viewers = function() {
        last_seen_clients = new_last_seen_clients;
        return num_viewers;
 }
+var log = function(str) {
+       console.log("[" + ((new Date).getTime()*1e-3).toFixed(3) + "] " + str);
+}
 
 // Set up a watcher to catch changes to the file, then do an initial read
 // to make sure we have a copy.
-fs.watch(path.dirname(JSON_FILENAME), reread_file);
-reread_file(null, path.basename(JSON_FILENAME));
+fs.watch(path.dirname(json_filename), reread_file);
+reread_file(null, path.basename(json_filename));
+
+if (COUNT_FROM_VARNISH_LOG) {
+       // Note: We abuse serve_url as a regex.
+       var varnishncsa = child_process.spawn(
+               'varnishncsa', ['-F', '%{%s}t %U %q tffb=%{Varnish:time_firstbyte}x',
+               '-q', 'ReqURL ~ "^' + serve_url + '"']);
+       var rl = readline.createInterface({
+               input: varnishncsa.stdout,
+               output: varnishncsa.stdin,
+               terminal: false
+       });
+
+       var uniques = [];
+       rl.on('line', function(line) {
+               var v = line.match(/(\d+) .*\?ims=\d+&unique=(.*) tffb=(.*)/);
+               if (v) {
+                       uniques[v[2]] = {
+                               last_seen: (parseInt(v[1]) + parseFloat(v[3])) * 1e3,
+                               grace: null,
+                       };
+                       log(v[1] + " " + v[2] + " " + v[3]);
+               } else {
+                       log("VARNISHNCSA UNPARSEABLE LINE: " + line);
+               }
+       });
+       setInterval(function() {
+               var mtime = json.last_modified - 1000;  // Compensate for subsecond issues.
+               var now = (new Date).getTime();
+               var num_viewers = 0;
+
+               for (var unique in uniques) {
+                       ++num_viewers;
+                       var last_seen = uniques[unique].last_seen;
+                       if (now - last_seen <= 5000) {
+                               // We've seen this user in the last five seconds;
+                               // it's okay.
+                               continue;
+                       }
+                       if (last_seen >= mtime) {
+                               // This user has the latest version;
+                               // they are probably just hanging.
+                               continue;
+                       }
+                       if (uniques[unique].grace === null) {
+                               // They have five seconds after a new JSON has been
+                               // provided to get get it, or they're out.
+                               // We don't simply use mtime, since we don't want to
+                               // reset the grace timer just because a new JSON is
+                               // published.
+                               uniques[unique].grace = mtime;
+                       }
+                       if (now - uniques[unique].grace > 5000) {
+                               log("Timing out " + unique + " (last_seen=" + last_seen + ", now=" + now +
+                                       ", mtime=" + mtime, ", grace=" + uniques[unique].grace + ")");
+                               delete uniques[unique];
+                               --num_viewers;
+                       }
+               }
+
+               log(num_viewers + " entries in hash, mtime=" + mtime);
+               viewer_count_override = num_viewers;
+       }, 1000);
+}
 
 var server = http.createServer();
 server.on('request', function(request, response) {
@@ -248,12 +319,8 @@ server.on('request', function(request, response) {
        var ims = (u.query)['ims'];
        var unique = (u.query)['unique'];
 
-       console.log(((new Date).getTime()*1e-3).toFixed(3) + " " + request.url);
-       if (u.pathname === '/override-num-viewers') {
-               handle_viewer_override(request, u, response);
-               return;
-       }
-       if (u.pathname !== '/analysis.pl') {
+       log(request.url);
+       if (u.pathname !== serve_url) {
                // This is not the request you are looking for.
                send_404(response);
                return;
@@ -297,4 +364,5 @@ server.on('connection', function(socket) {
                }
        });
 });
-server.listen(5000);
+
+server.listen(port);