]> git.sesse.net Git - cubemap/blobdiff - main.cpp
Keep information about HLS downloads around for some time afterwards.
[cubemap] / main.cpp
index 33ed9dfba726bc8b5332d42a3fcae2e4ceca54f5..737f0c3573acea88fa7d9f0585aee69d95ec0838 100644 (file)
--- a/main.cpp
+++ b/main.cpp
@@ -15,6 +15,7 @@
 #include <map>
 #include <set>
 #include <string>
+#include <unordered_map>
 #include <utility>
 #include <vector>
 
@@ -36,8 +37,8 @@
 
 using namespace std;
 
-AccessLogThread *access_log = NULL;
-ServerPool *servers = NULL;
+AccessLogThread *access_log = nullptr;
+ServerPool *servers = nullptr;
 volatile bool hupped = false;
 volatile bool stopped = false;
 
@@ -97,15 +98,13 @@ CubemapStateProto collect_state(const timespec &serialize_start,
        CubemapStateProto state = servers->serialize();  // Fills streams() and clients().
        state.set_serialize_start_sec(serialize_start.tv_sec);
        state.set_serialize_start_usec(serialize_start.tv_nsec / 1000);
-       
-       for (size_t i = 0; i < acceptors.size(); ++i) {
-               state.add_acceptors()->MergeFrom(acceptors[i]->serialize());
+
+       for (Acceptor *acceptor : acceptors) {  
+               state.add_acceptors()->MergeFrom(acceptor->serialize());
        }
 
-       for (multimap<InputKey, InputWithRefcount>::const_iterator input_it = inputs.begin();
-            input_it != inputs.end();
-            ++input_it) {
-               state.add_inputs()->MergeFrom(input_it->second.input->serialize());
+       for (const auto &key_and_input_with_refcount : inputs) {
+               state.add_inputs()->MergeFrom(key_and_input_with_refcount.second.input->serialize());
        }
 
        return state;
@@ -117,11 +116,9 @@ vector<Acceptor *> create_acceptors(
        map<AcceptorConfig, Acceptor *, AcceptorConfigCompare> *deserialized_acceptors)
 {
        vector<Acceptor *> acceptors;
-       for (unsigned i = 0; i < config.acceptors.size(); ++i) {
-               const AcceptorConfig &acceptor_config = config.acceptors[i];
-               Acceptor *acceptor = NULL;
-               map<AcceptorConfig, Acceptor *, AcceptorConfigCompare>::iterator deserialized_acceptor_it =
-                       deserialized_acceptors->find(acceptor_config);
+       for (const AcceptorConfig &acceptor_config : config.acceptors) {
+               Acceptor *acceptor = nullptr;
+               const auto deserialized_acceptor_it = deserialized_acceptors->find(acceptor_config);
                if (deserialized_acceptor_it != deserialized_acceptors->end()) {
                        acceptor = deserialized_acceptor_it->second;
                        deserialized_acceptors->erase(deserialized_acceptor_it);
@@ -136,12 +133,9 @@ vector<Acceptor *> create_acceptors(
        }
 
        // Close all acceptors that are no longer in the configuration file.
-       for (map<AcceptorConfig, Acceptor *, AcceptorConfigCompare>::iterator
-                acceptor_it = deserialized_acceptors->begin();
-            acceptor_it != deserialized_acceptors->end();
-            ++acceptor_it) {
-               acceptor_it->second->close_socket();
-               delete acceptor_it->second;
+       for (auto &config_and_acceptor : *deserialized_acceptors) {
+               config_and_acceptor.second->close_socket();
+               delete config_and_acceptor.second;
        }
 
        return acceptors;
@@ -159,7 +153,7 @@ void create_config_input(const string &src, Input::Encoding encoding, multimap<I
 
        InputWithRefcount iwr;
        iwr.input = create_input(src, encoding);
-       if (iwr.input == NULL) {
+       if (iwr.input == nullptr) {
                log(ERROR, "did not understand URL '%s' or source encoding was invalid, clients will not get any data.",
                        src.c_str());
                return;
@@ -171,14 +165,14 @@ void create_config_input(const string &src, Input::Encoding encoding, multimap<I
 // Find all streams in the configuration file, and create inputs for them.
 void create_config_inputs(const Config &config, multimap<InputKey, InputWithRefcount> *inputs)
 {
-       for (unsigned i = 0; i < config.streams.size(); ++i) {
-               const StreamConfig &stream_config = config.streams[i];
-               if (stream_config.src != "delete") {
-                       create_config_input(stream_config.src, Input::Encoding(stream_config.src_encoding), inputs);
+       for (const StreamConfig &stream_config : config.streams) {
+               if (stream_config.src == "delete") {
+                       // Ignored for pre-1.4.0 configuration compatibility.
+                       continue;
                }
+               create_config_input(stream_config.src, Input::Encoding(stream_config.src_encoding), inputs);
        }
-       for (unsigned i = 0; i < config.udpstreams.size(); ++i) {
-               const UDPStreamConfig &udpstream_config = config.udpstreams[i];
+       for (const UDPStreamConfig &udpstream_config : config.udpstreams) {
                create_config_input(udpstream_config.src, Input::INPUT_ENCODING_RAW, inputs);
        }
 }
@@ -189,33 +183,41 @@ void create_streams(const Config &config,
 {
        // HTTP streams.
        set<string> expecting_urls = deserialized_urls;
-       for (unsigned i = 0; i < config.streams.size(); ++i) {
-               const StreamConfig &stream_config = config.streams[i];
+       for (const StreamConfig &stream_config : config.streams) {
                int stream_index;
 
                expecting_urls.erase(stream_config.url);
 
-               // Special-case deleted streams; they were never deserialized in the first place,
-               // so just ignore them.
                if (stream_config.src == "delete") {
+                       // Ignored for pre-1.4.0 configuration compatibility.
                        continue;
                }
 
                if (deserialized_urls.count(stream_config.url) == 0) {
                        stream_index = servers->add_stream(stream_config.url,
+                                                          stream_config.hls_url,
                                                           stream_config.backlog_size,
                                                           stream_config.prebuffering_bytes,
                                                           Stream::Encoding(stream_config.encoding),
-                                                          Stream::Encoding(stream_config.src_encoding));
+                                                          Stream::Encoding(stream_config.src_encoding),
+                                                          stream_config.hls_frag_duration,
+                                                          stream_config.hls_backlog_margin,
+                                                          stream_config.allow_origin);
                } else {
                        stream_index = servers->lookup_stream_by_url(stream_config.url);
                        assert(stream_index != -1);
                        servers->set_backlog_size(stream_index, stream_config.backlog_size);
+                       if (!stream_config.hls_url.empty()) {
+                               servers->register_hls_url(stream_index, stream_config.hls_url);
+                       }
                        servers->set_prebuffering_bytes(stream_index, stream_config.prebuffering_bytes);
                        servers->set_encoding(stream_index,
                                              Stream::Encoding(stream_config.encoding));
                        servers->set_src_encoding(stream_index,
                                                  Stream::Encoding(stream_config.src_encoding));
+                       servers->set_hls_frag_duration(stream_index, stream_config.hls_frag_duration);
+                       servers->set_hls_backlog_margin(stream_index, stream_config.hls_backlog_margin);
+                       servers->set_allow_origin(stream_index, stream_config.allow_origin);
                }
 
                servers->set_pacing_rate(stream_index, stream_config.pacing_rate);
@@ -223,7 +225,7 @@ void create_streams(const Config &config,
                string src = stream_config.src;
                Input::Encoding src_encoding = Input::Encoding(stream_config.src_encoding);
                if (!src.empty()) {
-                       multimap<InputKey, InputWithRefcount>::iterator input_it = inputs->find(make_pair(src, src_encoding));
+                       const auto input_it = inputs->find(make_pair(src, src_encoding));
                        if (input_it != inputs->end()) {
                                input_it->second.input->add_destination(stream_index);
                                ++input_it->second.refcount;
@@ -232,10 +234,7 @@ void create_streams(const Config &config,
        }
 
        // Warn about any streams servers we've lost.
-       for (set<string>::const_iterator stream_it = expecting_urls.begin();
-            stream_it != expecting_urls.end();
-            ++stream_it) {
-               string url = *stream_it;
+       for (const string &url : expecting_urls) {
                log(WARNING, "stream '%s' disappeared from the configuration file. "
                             "It will not be deleted, but clients will not get any new inputs. "
                             "If you really meant to delete it, set src=delete and reload.",
@@ -243,8 +242,7 @@ void create_streams(const Config &config,
        }
 
        // UDP streams.
-       for (unsigned i = 0; i < config.udpstreams.size(); ++i) {
-               const UDPStreamConfig &udpstream_config = config.udpstreams[i];
+       for (const UDPStreamConfig &udpstream_config : config.udpstreams) {
                int stream_index = servers->add_udpstream(
                        udpstream_config.dst,
                        udpstream_config.pacing_rate,
@@ -253,7 +251,7 @@ void create_streams(const Config &config,
 
                string src = udpstream_config.src;
                if (!src.empty()) {
-                       multimap<InputKey, InputWithRefcount>::iterator input_it = inputs->find(make_pair(src, Input::INPUT_ENCODING_RAW));
+                       auto input_it = inputs->find(make_pair(src, Input::INPUT_ENCODING_RAW));
                        assert(input_it != inputs->end());
                        input_it->second.input->add_destination(stream_index);
                        ++input_it->second.refcount;
@@ -261,20 +259,19 @@ void create_streams(const Config &config,
        }
 
        // HTTP gen204 endpoints.
-       for (unsigned i = 0; i < config.pings.size(); ++i) {
-               const Gen204Config &ping_config = config.pings[i];
+       for (const Gen204Config &ping_config : config.pings) {
                servers->add_gen204(ping_config.url, ping_config.allow_origin);
        }
 }
        
 void open_logs(const vector<LogConfig> &log_destinations)
 {
-       for (size_t i = 0; i < log_destinations.size(); ++i) {
-               if (log_destinations[i].type == LogConfig::LOG_TYPE_FILE) {
-                       add_log_destination_file(log_destinations[i].filename);
-               } else if (log_destinations[i].type == LogConfig::LOG_TYPE_CONSOLE) {
+       for (const LogConfig &log_destination : log_destinations) {
+               if (log_destination.type == LogConfig::LOG_TYPE_FILE) {
+                       add_log_destination_file(log_destination.filename);
+               } else if (log_destination.type == LogConfig::LOG_TYPE_CONSOLE) {
                        add_log_destination_console();
-               } else if (log_destinations[i].type == LogConfig::LOG_TYPE_SYSLOG) {
+               } else if (log_destination.type == LogConfig::LOG_TYPE_SYSLOG) {
                        add_log_destination_syslog();
                } else {
                        assert(false);
@@ -297,7 +294,7 @@ bool dry_run_config(const string &argv0, const string &config_filename)
                return false;
        case 0:
                // Child.
-               execlp(argv0_copy, argv0_copy, "--test-config", config_filename_copy, NULL);
+               execlp(argv0_copy, argv0_copy, "--test-config", config_filename_copy, nullptr);
                log_perror(argv0_copy);
                _exit(1);
        default:
@@ -322,13 +319,16 @@ bool dry_run_config(const string &argv0, const string &config_filename)
        return (WIFEXITED(status) && WEXITSTATUS(status) == 0);
 }
 
-void find_deleted_streams(const Config &config, set<string> *deleted_urls)
+void find_all_streams(const Config &config, set<string> *all_urls)
 {
-       for (unsigned i = 0; i < config.streams.size(); ++i) {
-               const StreamConfig &stream_config = config.streams[i];
+       for (const StreamConfig &stream_config : config.streams) {
                if (stream_config.src == "delete") {
-                       log(INFO, "Deleting stream '%s'.", stream_config.url.c_str());
-                       deleted_urls->insert(stream_config.url);
+                       log(WARNING, "stream '%s' has src=delete; ignoring it. Since Cubemap 1.4.0, you do not "
+                                    "need to set src=delete to delete streams anymore; just delete them from "
+                                    "the configuration file.",
+                                    stream_config.url.c_str());
+               } else {
+                       all_urls->insert(stream_config.url);
                }
        }
 }
@@ -379,11 +379,11 @@ int main(int argc, char **argv)
        char argv0_canon[PATH_MAX];
        char config_filename_canon[PATH_MAX];
 
-       if (realpath("/proc/self/exe", argv0_canon) == NULL) {
+       if (realpath("/proc/self/exe", argv0_canon) == nullptr) {
                log_perror(argv[0]);
                exit(1);
        }
-       if (realpath(config_filename.c_str(), config_filename_canon) == NULL) {
+       if (realpath(config_filename.c_str(), config_filename_canon) == nullptr) {
                log_perror(config_filename.c_str());
                exit(1);
        }
@@ -422,9 +422,9 @@ start:
 
        servers = new ServerPool(config.num_servers);
 
-       // Find all the streams that are to be deleted.
-       set<string> deleted_urls;
-       find_deleted_streams(config, &deleted_urls);
+       // Find all the streams that are to be kept.
+       set<string> all_urls;
+       find_all_streams(config, &all_urls);
 
        CubemapStateProto loaded_state;
        timespec serialize_start;
@@ -446,19 +446,18 @@ start:
                serialize_start.tv_nsec = loaded_state.serialize_start_usec() * 1000ull;
 
                // Deserialize the streams.
-               map<string, string> stream_headers_for_url;  // See below.
-               for (int i = 0; i < loaded_state.streams_size(); ++i) {
-                       const StreamProto &stream = loaded_state.streams(i);
-
-                       if (deleted_urls.count(stream.url()) != 0) {
+               unordered_map<string, string> stream_headers_for_url;  // See below.
+               for (const StreamProto &stream : loaded_state.streams()) {
+                       if (all_urls.count(stream.url()) == 0) {
                                // Delete the stream backlogs.
-                               for (int j = 0; j < stream.data_fds_size(); ++j) {
-                                       safe_close(stream.data_fds(j));
+                               log(INFO, "Deleting stream '%s'.", stream.url().c_str());
+                               for (const int fd : stream.data_fds()) {
+                                       safe_close(fd);
                                }
                        } else {
                                vector<int> data_fds;
-                               for (int j = 0; j < stream.data_fds_size(); ++j) {
-                                       data_fds.push_back(stream.data_fds(j));
+                               for (const int fd : stream.data_fds()) {
+                                       data_fds.push_back(fd);
                                }
 
                                servers->add_stream_from_serialized(stream, data_fds);
@@ -469,9 +468,7 @@ start:
                }
 
                // Deserialize the inputs. Note that we don't actually add them to any stream yet.
-               for (int i = 0; i < loaded_state.inputs_size(); ++i) {
-                       InputProto serialized_input = loaded_state.inputs(i);
-
+               for (const InputProto &serialized_input : loaded_state.inputs()) {
                        InputWithRefcount iwr;
                        iwr.input = create_input(serialized_input);
                        iwr.refcount = 0;
@@ -484,14 +481,14 @@ start:
                } 
 
                // Deserialize the acceptors.
-               for (int i = 0; i < loaded_state.acceptors_size(); ++i) {
+               for (const AcceptorProto &serialized_acceptor : loaded_state.acceptors()) {
                        AcceptorConfig config;
-                       config.addr = extract_address_from_acceptor_proto(loaded_state.acceptors(i));
-                       config.certificate_chain = loaded_state.acceptors(i).certificate_chain();
-                       config.private_key = loaded_state.acceptors(i).private_key();
+                       config.addr = extract_address_from_acceptor_proto(serialized_acceptor);
+                       config.certificate_chain = serialized_acceptor.certificate_chain();
+                       config.private_key = serialized_acceptor.private_key();
                        deserialized_acceptors.insert(make_pair(
                                config,
-                               new Acceptor(loaded_state.acceptors(i))));
+                               new Acceptor(serialized_acceptor)));
                }
 
                log(INFO, "Deserialization done.");
@@ -510,6 +507,12 @@ start:
                        servers->create_tls_context_for_acceptor(acceptor);
                }
        }
+
+       // Allocate strings for the short responses.
+       vector<shared_ptr<const string>> short_response_pool;
+       for (const ShortResponsePool &str : loaded_state.short_response_pool()) {
+               short_response_pool.emplace_back(new string(str.header_or_short_response()));
+       }
        
        // Put back the existing clients. It doesn't matter which server we
        // allocate them to, so just do round-robin. However, we need to sort them
@@ -518,18 +521,30 @@ start:
             loaded_state.mutable_clients()->end(),
             OrderByConnectionTime());
        for (int i = 0; i < loaded_state.clients_size(); ++i) {
-               if (deleted_urls.count(loaded_state.clients(i).url()) != 0) {
+               if (!loaded_state.clients(i).url().empty() &&
+                   all_urls.count(loaded_state.clients(i).url()) == 0) {
+                       // Belongs to a dead stream (not keepalive), so we just have to close.
                        safe_close(loaded_state.clients(i).sock());
                } else {
-                       servers->add_client_from_serialized(loaded_state.clients(i));
+                       servers->add_client_from_serialized(loaded_state.clients(i), short_response_pool);
                }
        }
        
+       short_response_pool.clear();  // No longer needed; the clients have their own refcounts now.
+
+       // Put back the HLS zombies. There's no really good allocation here
+       // except round-robin; it would be marginally more efficient to match it
+       // to the client (since that would have them deleted immediately when
+       // the client requests the next fragment, instead of being later weeded
+       // out during statistics collection), but it's not a big deal.
+       for (const HLSZombieProto &zombie_proto : loaded_state.hls_zombies()) {
+               servers->add_hls_zombie_from_serialized(zombie_proto);
+       }
+
        servers->run();
 
        // Now delete all inputs that are longer in use, and start the others.
-       for (multimap<InputKey, InputWithRefcount>::iterator input_it = inputs.begin();
-            input_it != inputs.end(); ) {
+       for (auto input_it = inputs.begin(); input_it != inputs.end(); ) {
                if (input_it->second.refcount == 0) {
                        if (input_it->first.second == Input::INPUT_ENCODING_RAW) {
                                log(WARNING, "Raw input '%s' no longer in use, closing.",
@@ -549,21 +564,20 @@ start:
        }
 
        // Start writing statistics.
-       StatsThread *stats_thread = NULL;
+       unique_ptr<StatsThread> stats_thread;
        if (!config.stats_file.empty()) {
-               stats_thread = new StatsThread(config.stats_file, config.stats_interval);
+               stats_thread.reset(new StatsThread(config.stats_file, config.stats_interval));
                stats_thread->run();
        }
 
-       InputStatsThread *input_stats_thread = NULL;
+       unique_ptr<InputStatsThread> input_stats_thread;
        if (!config.input_stats_file.empty()) {
                vector<Input*> inputs_no_refcount;
-               for (multimap<InputKey, InputWithRefcount>::iterator input_it = inputs.begin();
-                    input_it != inputs.end(); ++input_it) {
-                       inputs_no_refcount.push_back(input_it->second.input);
+               for (const auto &key_and_input_with_refcount : inputs) {
+                       inputs_no_refcount.push_back(key_and_input_with_refcount.second.input);
                }
 
-               input_stats_thread = new InputStatsThread(config.input_stats_file, config.input_stats_interval, inputs_no_refcount);
+               input_stats_thread.reset(new InputStatsThread(config.input_stats_file, config.input_stats_interval, inputs_no_refcount));
                input_stats_thread->run();
        }
 
@@ -595,21 +609,19 @@ start:
        err = clock_gettime(CLOCK_MONOTONIC, &serialize_start);
        assert(err != -1);
 
-       if (input_stats_thread != NULL) {
+       if (input_stats_thread != nullptr) {
                input_stats_thread->stop();
-               delete input_stats_thread;
+               input_stats_thread.reset();
        }
-       if (stats_thread != NULL) {
+       if (stats_thread != nullptr) {
                stats_thread->stop();
-               delete stats_thread;
+               stats_thread.reset();
        }
-       for (size_t i = 0; i < acceptors.size(); ++i) {
-               acceptors[i]->stop();
+       for (Acceptor *acceptor : acceptors) {
+               acceptor->stop();
        }
-       for (multimap<InputKey, InputWithRefcount>::iterator input_it = inputs.begin();
-            input_it != inputs.end();
-            ++input_it) {
-               input_it->second.input->stop();
+       for (const auto &key_and_input_with_refcount : inputs) {
+               key_and_input_with_refcount.second.input->stop();
        }
        servers->stop();
 
@@ -650,7 +662,7 @@ start:
        sprintf(buf, "%d", state_fd);
 
        for ( ;; ) {
-               execlp(argv0_canon, argv0_canon, config_filename_canon, "--state", buf, NULL);
+               execlp(argv0_canon, argv0_canon, config_filename_canon, "--state", buf, nullptr);
                open_logs(config.log_destinations);
                log_perror("execlp");
                log(ERROR, "re-exec of %s failed. Waiting 0.2 seconds and trying again...", argv0_canon);