1 // node.js version of analysis.pl; hopefully scales a bit better
2 // for this specific kind of task.
5 var http = require('http');
6 var fs = require('fs');
7 var url = require('url');
8 var querystring = require('querystring');
9 var path = require('path');
10 var zlib = require('zlib');
11 var readline = require('readline');
12 var child_process = require('child_process');
13 var delta = require('../www/js/json_delta.js');
14 var hash_lookup = require('./hash-lookup.js');
17 var HISTORY_TO_KEEP = 5;
18 var MINIMUM_VERSION = null;
19 var COUNT_FROM_VARNISH_LOG = true;
22 var json_filename = '/srv/analysis.sesse.net/www/analysis.json';
23 if (process.argv.length >= 3) {
24 json_filename = process.argv[2];
27 // Expected destination filenames.
28 var serve_url = '/analysis.pl';
29 var hash_serve_url = '/hash';
30 if (process.argv.length >= 4) {
31 serve_url = process.argv[3];
33 if (process.argv.length >= 5) {
34 hash_serve_url = process.argv[4];
37 // TCP port to listen on.
39 if (process.argv.length >= 6) {
40 port = parseInt(process.argv[5]);
44 var grpc_backends = ["localhost:50051", "localhost:50052"];
45 if (process.argv.length >= 7) {
46 grpc_backends = process.argv[6].split(",");
48 hash_lookup.init(grpc_backends);
50 // If set to 1, we are already processing a JSON update and should not
51 // start a new one. If set to 2, we are _also_ having one in the queue.
54 // The current contents of the file to hand out, and its last modified time.
57 // The last five timestamps, and diffs from them to the latest version.
58 var historic_json = [];
61 // The list of clients that are waiting for new data to show up.
62 // Uniquely keyed by request_id so that we can take them out of
63 // the queue if they close the socket.
64 var sleeping_clients = {};
67 // List of when clients were last seen, keyed by their unique ID.
68 // Used to show a viewer count to the user.
69 var last_seen_clients = {};
71 // The timer used to touch the file every 30 seconds if nobody
72 // else does it for us. This makes sure we don't have clients
73 // hanging indefinitely (which might have them return errors).
74 var touch_timer = undefined;
76 // If we are behind Varnish, we can't count the number of clients
77 // ourselves, so we need to get it from parsing varnishncsa.
78 var viewer_count_override = undefined;
80 var replace_json = function(new_json_contents, mtime) {
81 // Generate the list of diffs from the last five versions.
82 if (json !== undefined) {
83 // If two versions have the same mtime, clients could have either.
84 // Note the fact, so that we never insert it.
85 if (json.last_modified == mtime) {
86 json.invalid_base = true;
88 if (!json.invalid_base) {
89 historic_json.push(json);
90 if (historic_json.length > HISTORY_TO_KEEP) {
91 historic_json.shift();
96 var parsed = JSON.parse(new_json_contents);
98 if (parsed['internal']) {
99 if (parsed['internal']['grpc_backends'] &&
100 hash_lookup.need_reinit(parsed['internal']['grpc_backends'])) {
101 hash_lookup.init(parsed['internal']['grpc_backends']);
103 delete parsed['internal'];
104 new_json_contents = JSON.stringify(parsed);
109 plain: new_json_contents,
112 create_json_historic_diff(new_json, historic_json.slice(0), {}, function(new_diff_json) {
113 // gzip the new version (non-delta), and put it into place.
114 zlib.gzip(new_json_contents, function(err, buffer) {
117 new_json.gzip = buffer;
119 diff_json = new_diff_json;
122 // Finally, wake up any sleeping clients.
123 possibly_wakeup_clients();
128 var create_json_historic_diff = function(new_json, history_left, new_diff_json, cb) {
129 if (history_left.length == 0) {
134 var histobj = history_left.shift();
135 var diff = delta.JSON_delta.diff(histobj.parsed, new_json.parsed);
136 var diff_text = JSON.stringify(diff);
137 zlib.gzip(diff_text, function(err, buffer) {
139 new_diff_json[histobj.last_modified] = {
143 last_modified: new_json.last_modified,
145 create_json_historic_diff(new_json, history_left, new_diff_json, cb);
149 var reread_file = function(event, filename) {
150 if (filename != path.basename(json_filename)) {
153 if (json_lock >= 2) {
156 if (json_lock == 1) {
157 // Already processing; wait a bit.
159 setTimeout(function() { if (json_lock == 2) json_lock = 1; reread_file(event, filename); }, 100);
164 console.log("Rereading " + json_filename);
165 fs.open(json_filename, 'r', function(err, fd) {
167 fs.fstat(fd, function(err, st) {
169 var buffer = new Buffer(1048576);
170 fs.read(fd, buffer, 0, 1048576, 0, function(err, bytesRead, buffer) {
172 fs.close(fd, function() {
173 var new_json_contents = buffer.toString('utf8', 0, bytesRead);
174 replace_json(new_json_contents, st.mtime.getTime());
180 if (touch_timer !== undefined) {
181 clearTimeout(touch_timer);
183 touch_timer = setTimeout(function() {
184 console.log("Touching analysis.json due to no other activity");
185 var now = Date.now() / 1000;
186 fs.utimes(json_filename, now, now);
189 var possibly_wakeup_clients = function() {
190 var num_viewers = count_viewers();
191 for (var i in sleeping_clients) {
192 mark_recently_seen(sleeping_clients[i].unique);
193 send_json(sleeping_clients[i].response,
194 sleeping_clients[i].ims,
195 sleeping_clients[i].accept_gzip,
198 sleeping_clients = {};
200 var send_404 = function(response) {
201 response.writeHead(404, {
202 'Content-Type': 'text/plain',
204 response.write('Something went wrong. Sorry.');
207 var send_json = function(response, ims, accept_gzip, num_viewers) {
208 var this_json = diff_json[ims] || json;
211 'Content-Type': 'text/json',
212 'X-RGLM': this_json.last_modified,
213 'X-RGNV': num_viewers,
214 'Access-Control-Expose-Headers': 'X-RGLM, X-RGNV, X-RGMV',
215 'Vary': 'Accept-Encoding',
218 if (MINIMUM_VERSION) {
219 headers['X-RGMV'] = MINIMUM_VERSION;
223 headers['Content-Length'] = this_json.gzip.length;
224 headers['Content-Encoding'] = 'gzip';
225 response.writeHead(200, headers);
226 response.write(this_json.gzip);
228 headers['Content-Length'] = this_json.plain.length;
229 response.writeHead(200, headers);
230 response.write(this_json.plain);
234 var mark_recently_seen = function(unique) {
236 last_seen_clients[unique] = (new Date).getTime();
239 var count_viewers = function() {
240 if (viewer_count_override !== undefined) {
241 return viewer_count_override;
244 var now = (new Date).getTime();
246 // Go through and remove old viewers, and count them at the same time.
247 var new_last_seen_clients = {};
249 for (var unique in last_seen_clients) {
250 if (now - last_seen_clients[unique] < 5000) {
252 new_last_seen_clients[unique] = last_seen_clients[unique];
256 // Also add sleeping clients that we would otherwise assume timed out.
257 for (var request_id in sleeping_clients) {
258 var unique = sleeping_clients[request_id].unique;
259 if (unique && !(unique in new_last_seen_clients)) {
264 last_seen_clients = new_last_seen_clients;
267 var log = function(str) {
268 console.log("[" + ((new Date).getTime()*1e-3).toFixed(3) + "] " + str);
271 // Set up a watcher to catch changes to the file, then do an initial read
272 // to make sure we have a copy.
273 fs.watch(path.dirname(json_filename), reread_file);
274 reread_file(null, path.basename(json_filename));
276 if (COUNT_FROM_VARNISH_LOG) {
277 // Note: We abuse serve_url as a regex.
278 var varnishncsa = child_process.spawn(
279 'varnishncsa', ['-F', '%{%s}t %U %q tffb=%{Varnish:time_firstbyte}x',
280 '-q', 'ReqURL ~ "^' + serve_url + '"']);
281 var rl = readline.createInterface({
282 input: varnishncsa.stdout,
283 output: varnishncsa.stdin,
288 rl.on('line', function(line) {
289 var v = line.match(/(\d+) .*\?ims=\d+&unique=(.*) tffb=(.*)/);
292 last_seen: (parseInt(v[1]) + parseFloat(v[3])) * 1e3,
295 log(v[1] + " " + v[2] + " " + v[3]);
297 log("VARNISHNCSA UNPARSEABLE LINE: " + line);
300 setInterval(function() {
301 var mtime = json.last_modified - 1000; // Compensate for subsecond issues.
302 var now = (new Date).getTime();
305 for (var unique in uniques) {
307 var last_seen = uniques[unique].last_seen;
308 if (now - last_seen <= 5000) {
309 // We've seen this user in the last five seconds;
313 if (last_seen >= mtime) {
314 // This user has the latest version;
315 // they are probably just hanging.
318 if (uniques[unique].grace === null) {
319 // They have five seconds after a new JSON has been
320 // provided to get get it, or they're out.
321 // We don't simply use mtime, since we don't want to
322 // reset the grace timer just because a new JSON is
324 uniques[unique].grace = mtime;
326 if (now - uniques[unique].grace > 5000) {
327 log("Timing out " + unique + " (last_seen=" + last_seen + ", now=" + now +
328 ", mtime=" + mtime, ", grace=" + uniques[unique].grace + ")");
329 delete uniques[unique];
334 log(num_viewers + " entries in hash, mtime=" + mtime);
335 viewer_count_override = num_viewers;
339 var server = http.createServer();
340 server.on('request', function(request, response) {
341 var u = url.parse(request.url, true);
342 var ims = (u.query)['ims'];
343 var unique = (u.query)['unique'];
346 if (u.pathname === hash_serve_url) {
347 var fen = (u.query)['fen'];
348 hash_lookup.handle_request(fen, response);
351 if (u.pathname !== serve_url) {
352 // This is not the request you are looking for.
357 mark_recently_seen(unique);
359 var accept_encoding = request.headers['accept-encoding'];
361 if (accept_encoding !== undefined && accept_encoding.match(/\bgzip\b/)) {
367 // If we already have something newer than what the user has,
368 // just send it out and be done with it.
369 if (json !== undefined && (!ims || json.last_modified > ims)) {
370 send_json(response, ims, accept_gzip, count_viewers());
374 // OK, so we need to hang until we have something newer.
375 // Put the user on the wait list.
377 client.response = response;
378 client.request_id = request_id;
379 client.accept_gzip = accept_gzip;
380 client.unique = unique;
382 sleeping_clients[request_id++] = client;
384 request.socket.client = client;
386 server.on('connection', function(socket) {
387 socket.on('close', function() {
388 var client = socket.client;
390 mark_recently_seen(client.unique);
391 delete sleeping_clients[client.request_id];