X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=server%2Fserve-analysis.js;h=2e176cc9b211a89410dcdcc96af9da5941269dc7;hb=4b4dbe9808a47d7c61aedc7b70ad4608cd39b7a0;hp=0490e96f604a2639102aa62cc572320208b23fe7;hpb=83aa0538ddb9037d37295f7e6e68348773661d7a;p=remoteglot diff --git a/server/serve-analysis.js b/server/serve-analysis.js index 0490e96..2e176cc 100644 --- a/server/serve-analysis.js +++ b/server/serve-analysis.js @@ -40,6 +40,13 @@ if (process.argv.length >= 6) { port = parseInt(process.argv[5]); } +// gRPC backends. +var grpc_backends = ["localhost:50051", "localhost:50052"]; +if (process.argv.length >= 7) { + grpc_backends = process.argv[6].split(","); +} +hash_lookup.init(grpc_backends); + // If set to 1, we are already processing a JSON update and should not // start a new one. If set to 2, we are _also_ having one in the queue. var json_lock = 0; @@ -86,8 +93,19 @@ var replace_json = function(new_json_contents, mtime) { } } + var parsed = JSON.parse(new_json_contents); + + if (parsed['internal']) { + if (parsed['internal']['grpc_backends'] && + hash_lookup.need_reinit(parsed['internal']['grpc_backends'])) { + hash_lookup.init(parsed['internal']['grpc_backends']); + } + delete parsed['internal']; + new_json_contents = JSON.stringify(parsed); + } + var new_json = { - parsed: JSON.parse(new_json_contents), + parsed: parsed, plain: new_json_contents, last_modified: mtime }; @@ -116,6 +134,17 @@ var create_json_historic_diff = function(new_json, history_left, new_diff_json, var histobj = history_left.shift(); var diff = delta.JSON_delta.diff(histobj.parsed, new_json.parsed); var diff_text = JSON.stringify(diff); + + // Verify that the delta is correct + var base = JSON.parse(histobj.plain); + delta.JSON_delta.patch(base, diff); + var correct_pv = JSON.stringify(base['pv']); + var wrong_pv = JSON.stringify(new_json.parsed['pv']); + if (correct_pv !== wrong_pv) { + console.log("Patch went wrong:", histobj.plain, new_json.plain); + exit(); + } + zlib.gzip(diff_text, function(err, buffer) { if (err) throw err; new_diff_json[histobj.last_modified] = { @@ -128,6 +157,23 @@ var create_json_historic_diff = function(new_json, history_left, new_diff_json, }); } +function read_entire_file(filename, callback) { + fs.open(filename, 'r', function(err, fd) { + if (err) throw err; + fs.fstat(fd, function(err, st) { + if (err) throw err; + var buffer = new Buffer(1048576); + fs.read(fd, buffer, 0, 1048576, 0, function(err, bytesRead, buffer) { + if (err) throw err; + fs.close(fd, function() { + var contents = buffer.toString('utf8', 0, bytesRead); + callback(contents, st.mtime.getTime()); + }); + }); + }); + }); +} + var reread_file = function(event, filename) { if (filename != path.basename(json_filename)) { return; @@ -138,25 +184,14 @@ var reread_file = function(event, filename) { if (json_lock == 1) { // Already processing; wait a bit. json_lock = 2; - setTimeout(function() { json_lock = 1; reread_file(event, filename); }, 100); + setTimeout(function() { if (json_lock == 2) json_lock = 1; reread_file(event, filename); }, 100); return; } json_lock = 1; console.log("Rereading " + json_filename); - fs.open(json_filename, 'r', function(err, fd) { - if (err) throw err; - fs.fstat(fd, function(err, st) { - if (err) throw err; - var buffer = new Buffer(1048576); - fs.read(fd, buffer, 0, 1048576, 0, function(err, bytesRead, buffer) { - if (err) throw err; - fs.close(fd, function() { - var new_json_contents = buffer.toString('utf8', 0, bytesRead); - replace_json(new_json_contents, st.mtime.getTime()); - }); - }); - }); + read_entire_file(json_filename, function(new_json_contents, mtime) { + replace_json(new_json_contents, mtime); }); if (touch_timer !== undefined) { @@ -165,7 +200,7 @@ var reread_file = function(event, filename) { touch_timer = setTimeout(function() { console.log("Touching analysis.json due to no other activity"); var now = Date.now() / 1000; - fs.utimes(json_filename, now, now); + fs.utimes(json_filename, now, now, function() {}); }, 30000); } var possibly_wakeup_clients = function() {